Compare commits
7 Commits
5000ab768a
...
27200abc58
| Author | SHA1 | Date | |
|---|---|---|---|
| 27200abc58 | |||
| 6c260e654d | |||
| 86ea8ceaf9 | |||
| d50857c528 | |||
| da1feee2a2 | |||
| 707f84ec43 | |||
| dec8e85064 |
|
|
@ -157,22 +157,24 @@ function showSearchResultsExtended(element, template, searchTool, query, input,
|
|||
})
|
||||
}
|
||||
|
||||
function formatLineResult(hit) {
|
||||
function formatLineResult(hits, key) {
|
||||
return [
|
||||
{
|
||||
text: "[[" + hit.title + "]]",
|
||||
text: "[[" + key + "]]",
|
||||
indented: 0,
|
||||
fold: 'open',
|
||||
hidden: false,
|
||||
fleeting: true
|
||||
},
|
||||
{
|
||||
text: hit.line,
|
||||
indented: 1,
|
||||
fold: 'open',
|
||||
hidden: false,
|
||||
fleeting: true
|
||||
}
|
||||
..._.map(hits, (hit) => {
|
||||
return {
|
||||
text: hit.line,
|
||||
indented: 1,
|
||||
fold: 'open',
|
||||
hidden: false,
|
||||
fleeting: true
|
||||
}
|
||||
})
|
||||
]
|
||||
}
|
||||
|
||||
|
|
@ -272,11 +274,15 @@ function Editor(holder, input) {
|
|||
el("td", [renderInline(rowText)]),
|
||||
..._.map(header.children, col => {
|
||||
let td = el("td")
|
||||
let value = rowData[_.snakeCase(_.trim(col.text))];
|
||||
let key = _.trim(col.text);
|
||||
let value = _.get(rowData, key)
|
||||
if (_.isObject(value) && value.line) {
|
||||
value = value.line
|
||||
}
|
||||
if (col.children && col.children.length > 0) {
|
||||
value = col.children[0].text
|
||||
}
|
||||
transform(value ? value.replace(/^:/, '=') : '', $(td), id, editor, rowData)
|
||||
transform(typeof value === 'string' ? value.replace(/^:/, '=') : '', $(td), id, editor, rowData)
|
||||
return td
|
||||
})
|
||||
])
|
||||
|
|
@ -355,10 +361,13 @@ function Editor(holder, input) {
|
|||
} else if (converted.startsWith("=", 0)) {
|
||||
converted = transformMathExpression(converted, scope);
|
||||
} else {
|
||||
let re = /^([A-Z0-9 ]+)::\s*(.+)$/i;
|
||||
let re = /^([A-Z0-9 ]+)::\s*(.*)$/i;
|
||||
let res = text.match(re)
|
||||
if (res) {
|
||||
converted = '**[[' + res[1] + ']]**: ' + res[2]
|
||||
converted = '<span class="metadata-key">[[' + res[1] + ']]</span>'
|
||||
if (res[2]) {
|
||||
converted += ': ' + res[2]
|
||||
}
|
||||
} else if (text.match(/#\[\[TODO]]/)) {
|
||||
converted = converted.replace('#[[TODO]]', '<input class="checkbox" type="checkbox" />')
|
||||
todo = true;
|
||||
|
|
@ -629,6 +638,7 @@ function Editor(holder, input) {
|
|||
.finally(() => editor.render())
|
||||
} else {
|
||||
return search.startQuery(res[2])
|
||||
.then(hits => _.groupBy(hits, (it) => it.title))
|
||||
.then(hits => _.flatMap(hits, formatLineResult))
|
||||
.then(results => editor.replaceChildren(id, results))
|
||||
.finally(() => editor.render())
|
||||
|
|
|
|||
|
|
@ -48,14 +48,20 @@ body {
|
|||
font-family: 'Inter', sans-serif;
|
||||
}
|
||||
|
||||
.content a.wiki-link::before {
|
||||
content: "[[";
|
||||
color: #ccc;
|
||||
.highlight-links {
|
||||
.content a.wiki-link::before {
|
||||
content: "[[";
|
||||
color: #ccc;
|
||||
}
|
||||
.content a.wiki-link::after {
|
||||
content: "]]";
|
||||
color: #ccc;
|
||||
}
|
||||
}
|
||||
|
||||
.content a.wiki-link::after {
|
||||
content: "]]";
|
||||
color: #ccc;
|
||||
.metadata-key a {
|
||||
font-weight: bold;
|
||||
color: black;
|
||||
}
|
||||
|
||||
@supports (font-variation-settings: normal) {
|
||||
|
|
|
|||
129
search.go
129
search.go
|
|
@ -48,11 +48,11 @@ type nameLine struct {
|
|||
}
|
||||
|
||||
type searchObject struct {
|
||||
Title string `json:"title"`
|
||||
Blocks []string `json:"blocks"`
|
||||
Refs []nameLine `json:"refs"`
|
||||
Meta map[string]string `json:"meta"`
|
||||
Links []ParsedLink `json:"links"`
|
||||
Title string `json:"title"`
|
||||
Blocks []string `json:"blocks"`
|
||||
Refs []nameLine `json:"refs"`
|
||||
Meta map[string]interface{} `json:"meta"`
|
||||
Links []ParsedLink `json:"links"`
|
||||
}
|
||||
|
||||
func NewSearchHandler(searchIndex bleve.Index) (http.Handler, error) {
|
||||
|
|
@ -117,7 +117,7 @@ func (s *searchHandler) ServeHTTP(w http.ResponseWriter, r *http.Request) {
|
|||
for _, page := range pages {
|
||||
err = saveBlocksFromPage("data", page)
|
||||
if err != nil {
|
||||
log.Printf("error while processing blocks from page %s: %w", page.Name, err)
|
||||
log.Printf("error while processing blocks from page %s: %v", page.Name, err)
|
||||
continue
|
||||
}
|
||||
}
|
||||
|
|
@ -141,7 +141,7 @@ func (s *searchHandler) ServeHTTP(w http.ResponseWriter, r *http.Request) {
|
|||
log.Println("saveLinks")
|
||||
err = saveLinks(mp)
|
||||
if err != nil {
|
||||
log.Printf("error while saving links %w", err)
|
||||
log.Printf("error while saving links %v", err)
|
||||
http.Error(w, err.Error(), 500)
|
||||
return
|
||||
}
|
||||
|
|
@ -149,14 +149,14 @@ func (s *searchHandler) ServeHTTP(w http.ResponseWriter, r *http.Request) {
|
|||
log.Println("saveBackrefs")
|
||||
err = saveBackrefs("data/backrefs.json", refs)
|
||||
if err != nil {
|
||||
log.Printf("error while saving backrefs %w", err)
|
||||
log.Printf("error while saving backrefs %v", err)
|
||||
http.Error(w, err.Error(), 500)
|
||||
return
|
||||
}
|
||||
|
||||
err = os.RemoveAll("data/_tmp_index")
|
||||
if err != nil {
|
||||
log.Printf("error while remove old index %w", err)
|
||||
log.Printf("error while remove old index %v", err)
|
||||
http.Error(w, err.Error(), 500)
|
||||
return
|
||||
}
|
||||
|
|
@ -170,14 +170,14 @@ func (s *searchHandler) ServeHTTP(w http.ResponseWriter, r *http.Request) {
|
|||
for _, page := range pages {
|
||||
searchObjects, err := createSearchObjects(page.Name)
|
||||
if err != nil {
|
||||
log.Printf("error while creating search object %s: %w", page.Title, err)
|
||||
log.Printf("error while creating search object %s: %v", page.Title, err)
|
||||
continue
|
||||
}
|
||||
|
||||
for _, so := range searchObjects {
|
||||
err = index.Index(so.ID, so)
|
||||
if err != nil {
|
||||
log.Printf("error while indexing %s: %w", page.Title, err)
|
||||
log.Printf("error while indexing %s: %v", page.Title, err)
|
||||
continue
|
||||
}
|
||||
}
|
||||
|
|
@ -185,19 +185,19 @@ func (s *searchHandler) ServeHTTP(w http.ResponseWriter, r *http.Request) {
|
|||
|
||||
err = os.Rename("data/_page-index", "data/_page-index-old")
|
||||
if err != nil {
|
||||
log.Printf("error while resetting index: %w", err)
|
||||
log.Printf("error while resetting index: %v", err)
|
||||
http.Error(w, err.Error(), 500)
|
||||
return
|
||||
}
|
||||
err = os.Rename("data/_tmp_index", "data/_page-index")
|
||||
if err != nil {
|
||||
log.Printf("error while putthing new index in place: %w", err)
|
||||
log.Printf("error while putthing new index in place: %v", err)
|
||||
http.Error(w, err.Error(), 500)
|
||||
return
|
||||
}
|
||||
err = os.RemoveAll("data/_page-index-old")
|
||||
if err != nil {
|
||||
log.Printf("error while remove old index %w", err)
|
||||
log.Printf("error while remove old index %v", err)
|
||||
http.Error(w, err.Error(), 500)
|
||||
return
|
||||
}
|
||||
|
|
@ -294,26 +294,98 @@ func createSearchObjects(rootBlockID string) ([]pageBlock, error) {
|
|||
func createStructuredFormat(page Page) (searchObject, error) {
|
||||
so := searchObject{}
|
||||
so.Title = page.Title
|
||||
so.Meta = make(map[string]string)
|
||||
so.Meta = make(map[string]interface{})
|
||||
|
||||
type simpleListItem struct {
|
||||
Text string
|
||||
ID string
|
||||
Text string
|
||||
ID string
|
||||
Indented int
|
||||
}
|
||||
|
||||
type parent struct {
|
||||
key string
|
||||
indent int
|
||||
items []interface{}
|
||||
values map[string]interface{}
|
||||
}
|
||||
|
||||
var parents []parent
|
||||
parents = append(parents, parent{
|
||||
values: make(map[string]interface{}),
|
||||
})
|
||||
|
||||
var listItems []simpleListItem
|
||||
if err := json.NewDecoder(strings.NewReader(page.Content)).Decode(&listItems); err != nil {
|
||||
so.Blocks = append(so.Blocks, page.Content)
|
||||
} else {
|
||||
for _, li := range listItems {
|
||||
meta := strings.SplitN(li.Text, "::", 2)
|
||||
par := parents[len(parents)-1]
|
||||
// merge up
|
||||
for len(parents) > 1 && li.Indented <= par.indent {
|
||||
parents = parents[:len(parents)-1]
|
||||
nextTop := parents[len(parents)-1]
|
||||
if len(par.values) > 0 {
|
||||
if vals, e := nextTop.values[par.key]; e {
|
||||
if vals2, ok := vals.(map[string]interface{}); ok {
|
||||
for k, v := range par.values {
|
||||
vals2[k] = v
|
||||
}
|
||||
nextTop.values[par.key] = vals2
|
||||
}
|
||||
} else {
|
||||
nextTop.values[par.key] = par.values
|
||||
}
|
||||
} else if len(par.items) > 0 {
|
||||
nextTop.values[par.key] = par.items
|
||||
} else {
|
||||
nextTop.values[par.key] = ""
|
||||
}
|
||||
parents[len(parents)-1] = nextTop
|
||||
par = parents[len(parents)-1]
|
||||
}
|
||||
if len(meta) == 2 {
|
||||
key := strcase.ToSnake(strings.TrimSpace(meta[0]))
|
||||
value := strings.TrimSpace(meta[1])
|
||||
if key == "title" {
|
||||
so.Title = value
|
||||
if value == "" {
|
||||
parents = append(parents, parent{
|
||||
key: key,
|
||||
indent: li.Indented,
|
||||
values: make(map[string]interface{}),
|
||||
})
|
||||
} else {
|
||||
if len(parents) > 0 {
|
||||
par = parents[len(parents)-1]
|
||||
// save new value
|
||||
if li.Indented > par.indent {
|
||||
links, err := ParseLinks(li.ID, value)
|
||||
if err != nil {
|
||||
par.values[key] = value
|
||||
} else {
|
||||
if len(links) > 0 {
|
||||
links[0].Href = fmt.Sprintf("%s%s", *baseurl, links[0].PageName)
|
||||
links[0].ID = ""
|
||||
par.values[key] = links[0]
|
||||
} else {
|
||||
par.values[key] = value
|
||||
}
|
||||
}
|
||||
}
|
||||
parents[len(parents)-1] = par
|
||||
}
|
||||
}
|
||||
so.Meta[key] = value
|
||||
} else {
|
||||
links, err := ParseLinks(li.ID, li.Text)
|
||||
if err != nil {
|
||||
par.items = append(par.items, li.Text)
|
||||
} else if len(links) > 0 {
|
||||
links[0].Href = fmt.Sprintf("%s%s", *baseurl, links[0].PageName)
|
||||
links[0].ID = ""
|
||||
par.items = append(par.items, links[0])
|
||||
} else {
|
||||
par.items = append(par.items, li.Text)
|
||||
}
|
||||
parents[len(parents)-1] = par
|
||||
}
|
||||
|
||||
so.Blocks = append(so.Blocks, li.Text)
|
||||
|
|
@ -331,6 +403,23 @@ func createStructuredFormat(page Page) (searchObject, error) {
|
|||
}
|
||||
}
|
||||
|
||||
// merge up
|
||||
for len(parents) > 1 {
|
||||
par := parents[len(parents)-1]
|
||||
parents = parents[:len(parents)-1]
|
||||
nextTop := parents[len(parents)-1]
|
||||
if len(par.values) > 0 {
|
||||
nextTop.values[par.key] = par.values
|
||||
} else if len(par.items) > 0 {
|
||||
nextTop.values[par.key] = par.items
|
||||
} else {
|
||||
nextTop.values[par.key] = ""
|
||||
}
|
||||
parents[len(parents)-1] = nextTop
|
||||
}
|
||||
|
||||
so.Meta = parents[0].values
|
||||
|
||||
for _, refs := range page.Refs {
|
||||
for _, ref := range refs {
|
||||
so.Refs = append(so.Refs, nameLine{
|
||||
|
|
|
|||
24
util.go
24
util.go
|
|
@ -53,11 +53,11 @@ var (
|
|||
)
|
||||
|
||||
type ParsedLink struct {
|
||||
ID string `json:"ID"`
|
||||
Name string `json:"title"`
|
||||
PageName string `json:"name"`
|
||||
Line string `json:"line"`
|
||||
Href string `json:"href"`
|
||||
ID string `json:"ID,omitempty"`
|
||||
Name string `json:"title,omitempty"`
|
||||
PageName string `json:"name,omitempty"`
|
||||
Line string `json:"line,omitempty"`
|
||||
Href string `json:"href,omitempty"`
|
||||
}
|
||||
|
||||
const letterBytes = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ"
|
||||
|
|
@ -82,6 +82,13 @@ func ParseLinks(blockId string, content string) ([]ParsedLink, error) {
|
|||
for scanner.Scan() {
|
||||
line := scanner.Text()
|
||||
|
||||
keywords := keywordsRE.FindAllStringSubmatch(line, -1)
|
||||
for _, matches := range keywords {
|
||||
link := matches[1]
|
||||
l := cleanNameURL(link)
|
||||
result = append(result, ParsedLink{blockId, link, l, line, ""})
|
||||
}
|
||||
|
||||
links := hrefRE.FindAllStringSubmatch(line, -1)
|
||||
|
||||
for _, matches := range links {
|
||||
|
|
@ -98,13 +105,6 @@ func ParseLinks(blockId string, content string) ([]ParsedLink, error) {
|
|||
l := cleanNameURL(link)
|
||||
result = append(result, ParsedLink{blockId, link, l, line, ""})
|
||||
}
|
||||
|
||||
keywords := keywordsRE.FindAllStringSubmatch(line, -1)
|
||||
for _, matches := range keywords {
|
||||
link := matches[1]
|
||||
l := cleanNameURL(link)
|
||||
result = append(result, ParsedLink{blockId, link, l, line, ""})
|
||||
}
|
||||
}
|
||||
|
||||
return result, nil
|
||||
|
|
|
|||
Loading…
Reference in New Issue
Block a user