diff --git a/CHANGELOG.md b/CHANGELOG.md new file mode 100644 index 0000000..fe21a7f --- /dev/null +++ b/CHANGELOG.md @@ -0,0 +1,10 @@ +# Changelog + +All notable changes to this project will be documented in this file. + +## Unreleased + +### Added + +* Support for tags. + * Many tag flavors are supported: `#hashtags`, `:colon:separated:tags:` and even Bear's [`#multi-word tags#`](https://blog.bear.app/2017/11/bear-tips-how-to-create-multi-word-tags/). If you prefer to use a YAML frontmatter, list your tags with the keys `tags` or `keywords`. diff --git a/adapter/markdown/extensions/tag.go b/adapter/markdown/extensions/tag.go new file mode 100644 index 0000000..366eb09 --- /dev/null +++ b/adapter/markdown/extensions/tag.go @@ -0,0 +1,256 @@ +package extensions + +import ( + "strings" + "unicode" + + "github.com/yuin/goldmark" + "github.com/yuin/goldmark/ast" + gast "github.com/yuin/goldmark/ast" + "github.com/yuin/goldmark/parser" + "github.com/yuin/goldmark/text" + "github.com/yuin/goldmark/util" +) + +// Tags represents a list of inline tags in a Markdown document. +type Tags struct { + gast.BaseInline + // Tags in this list. + Tags []string +} + +func (n *Tags) Dump(source []byte, level int) { + m := map[string]string{} + m["Tags"] = strings.Join(n.Tags, ", ") + gast.DumpHelper(n, source, level, m, nil) +} + +// KindTags is a NodeKind of the Tags node. +var KindTags = gast.NewNodeKind("Tags") + +func (n *Tags) Kind() gast.NodeKind { + return KindTags +} + +// TagExt is an extension parsing various flavors of tags. +// +// * #hashtags, including Bear's #multi words# tags +// * :colon:separated:tags:`, e.g. vimwiki and Org mode +// +// Are authorized in a tag: +// * unicode categories [L]etter and [N]umber +// * / @ ' ~ - _ $ % & + = and when possible # : +// * any character escaped with \, including whitespace +type TagExt struct { + // Indicates whether #hashtags are parsed. + HashtagEnabled bool + // Indicates whether Bear's multi-word tags are parsed. Hashtags must be enabled as well. + MultiWordTagEnabled bool + // Indicates whether :colon:tags: are parsed. + ColontagEnabled bool +} + +func (t *TagExt) Extend(m goldmark.Markdown) { + parsers := []util.PrioritizedValue{} + + if t.HashtagEnabled { + parsers = append(parsers, util.Prioritized(&hashtagParser{ + multiWordTagEnabled: t.MultiWordTagEnabled, + }, 2000)) + } + + if t.ColontagEnabled { + parsers = append(parsers, util.Prioritized(&colontagParser{}, 2000)) + } + + if len(parsers) > 0 { + m.Parser().AddOptions(parser.WithInlineParsers(parsers...)) + } +} + +// hashtagParser parses #hashtags, including Bear's #multi words# tags +type hashtagParser struct { + multiWordTagEnabled bool +} + +func (p *hashtagParser) Trigger() []byte { + return []byte{'#'} +} + +func (p *hashtagParser) Parse(parent ast.Node, block text.Reader, pc parser.Context) ast.Node { + previousChar := block.PrecendingCharacter() + line, _ := block.PeekLine() + + // A hashtag can't be directly preceded by a # or any other valid character. + if isValidTagChar(previousChar, '\x00') { + return nil + } + + var ( + tag string // Accumulator for the hashtag + multiWordTagCandidate string // Accumulator for a potential Bear multi-word tag + ) + + var ( + escaping = false // Found a backslash, next character will be literal + parsingMultiWordTag = false // Finished parsing a hashtag, now attempt parsing a Bear multi-word tag + endPos = 0 // Last position of the tag in the line + multiWordTagEndPos = 0 // Last position of the multi-word tag in the line + ) + + appendChar := func(c rune) { + if parsingMultiWordTag { + multiWordTagCandidate += string(c) + } else { + tag += string(c) + } + } + + for i, char := range string(line[1:]) { + if parsingMultiWordTag { + multiWordTagEndPos = i + } else { + endPos = i + } + + if escaping { + // Currently escaping? The character will be appended literally. + appendChar(char) + escaping = false + + } else if char == '\\' { + // Found a backslash, next character will be escaped. + escaping = true + + } else if parsingMultiWordTag { + // Parsing a multi-word tag candidate. + if isValidTagChar(char, '#') || unicode.IsSpace(char) { + appendChar(char) + } else if char == '#' { + // A valid multi-word tag must not have a space before the closing #. + if !unicode.IsSpace(previousChar) { + tag = multiWordTagCandidate + endPos = multiWordTagEndPos + } + break + } + previousChar = char + + } else if !p.multiWordTagEnabled && char == '#' { + // A tag terminated with a # is invalid when not in a multi-word tag. + return nil + + } else if p.multiWordTagEnabled && unicode.IsSpace(char) { + // Found a space, let's try to parse a multi-word tag. + previousChar = char + multiWordTagCandidate = tag + parsingMultiWordTag = true + appendChar(char) + + } else if !isValidTagChar(char, '#') { + // Found an invalid character, the hashtag is complete. + break + + } else { + appendChar(char) + } + } + + if len(tag) == 0 || !isValidHashTag(tag) { + return nil + } + + block.Advance(endPos) + + return &Tags{ + BaseInline: gast.BaseInline{}, + Tags: []string{tag}, + } +} + +func isValidHashTag(tag string) bool { + for _, char := range tag { + if !unicode.IsNumber(char) { + return true + } + } + return false +} + +// colontagParser parses :colon:separated:tags:. +type colontagParser struct{} + +func (p *colontagParser) Trigger() []byte { + return []byte{':'} +} + +func (p *colontagParser) Parse(parent ast.Node, block text.Reader, pc parser.Context) ast.Node { + previousChar := block.PrecendingCharacter() + line, _ := block.PeekLine() + + // A colontag can't be directly preceded by a : or any other valid character. + if isValidTagChar(previousChar, '\x00') { + return nil + } + + var ( + tag string // Accumulator for the current colontag + tags = []string{} // All colontags found + ) + + var ( + escaping = false // Found a backslash, next character will be literal + endPos = 0 // Last position of the colontags in the line + ) + + appendChar := func(c rune) { + tag += string(c) + } + + for i, char := range string(line[1:]) { + endPos = i + + if escaping { + // Currently escaping? The character will be appended literally. + appendChar(char) + escaping = false + + } else if char == '\\' { + // Found a backslash, next character will be escaped. + escaping = true + + } else if char == ':' { + if len(tag) == 0 { + break + } + tags = append(tags, tag) + tag = "" + + } else if !isValidTagChar(char, ':') { + // Found an invalid character, the colontag is complete. + break + + } else { + appendChar(char) + } + } + + if len(tags) == 0 { + return nil + } + + block.Advance(endPos) + + return &Tags{ + BaseInline: gast.BaseInline{}, + Tags: tags, + } +} + +func isValidTagChar(r rune, excluded rune) bool { + return r != excluded && (unicode.IsLetter(r) || unicode.IsNumber(r) || + r == '/' || r == '@' || r == '\'' || r == '~' || + r == '-' || r == '_' || r == '$' || r == '%' || + r == '&' || r == '+' || r == '=' || r == ':' || + r == '#') +} diff --git a/adapter/markdown/extensions/wikilink.go b/adapter/markdown/extensions/wikilink.go index 4762f9f..8db0738 100644 --- a/adapter/markdown/extensions/wikilink.go +++ b/adapter/markdown/extensions/wikilink.go @@ -11,10 +11,10 @@ import ( "github.com/yuin/goldmark/util" ) -// WikiLink is an extension parsing wiki links and Neuron's Folgezettel. +// WikiLinkExt is an extension parsing wiki links and Neuron's Folgezettel. // // For example, [[wiki link]], [[[legacy downlink]]], #[[uplink]], [[downlink]]#. -var WikiLink = &wikiLink{} +var WikiLinkExt = &wikiLink{} type wikiLink struct{} diff --git a/adapter/markdown/markdown.go b/adapter/markdown/markdown.go index c22380b..0369dc7 100644 --- a/adapter/markdown/markdown.go +++ b/adapter/markdown/markdown.go @@ -2,6 +2,7 @@ package markdown import ( "bufio" + "fmt" "regexp" "strings" @@ -23,8 +24,17 @@ type Parser struct { md goldmark.Markdown } +type ParserOpts struct { + // Indicates whether #hashtags are parsed. + HashtagEnabled bool + // Indicates whether Bear's multi-word tags are parsed. Hashtags must be enabled as well. + MultiWordTagEnabled bool + // Indicates whether :colon:tags: are parsed. + ColontagEnabled bool +} + // NewParser creates a new Markdown Parser. -func NewParser() *Parser { +func NewParser(options ParserOpts) *Parser { return &Parser{ md: goldmark.New( goldmark.WithExtensions( @@ -38,7 +48,12 @@ func NewParser() *Parser { xurls.Strict, ), ), - extensions.WikiLink, + extensions.WikiLinkExt, + &extensions.TagExt{ + HashtagEnabled: options.HashtagEnabled, + MultiWordTagEnabled: options.MultiWordTagEnabled, + ColontagEnabled: options.ColontagEnabled, + }, ), ), } @@ -70,11 +85,17 @@ func (p *Parser) Parse(source string) (*note.Content, error) { } body := parseBody(bodyStart, bytes) + tags, err := parseTags(frontmatter, root, bytes) + if err != nil { + return nil, err + } + return ¬e.Content{ Title: title, Body: body, Lead: parseLead(body), Links: links, + Tags: tags, }, nil } @@ -135,6 +156,45 @@ func parseLead(body opt.String) opt.String { return opt.NewNotEmptyString(strings.TrimSpace(lead)) } +// parseTags extracts tags as #hashtags, :colon:tags: or from the YAML frontmatter. +func parseTags(frontmatter frontmatter, root ast.Node, source []byte) ([]string, error) { + tags := make([]string, 0) + + // Parse from YAML frontmatter, either: + // * a list of strings + // * a single space-separated string + findFMTags := func(key string) []string { + if tags, ok := frontmatter.getStrings(key); ok { + return tags + } else if tags := frontmatter.getString(key); !tags.IsNull() { + return strings.Fields(tags.Unwrap()) + } else { + return []string{} + } + } + + for _, key := range []string{"tag", "tags", "keyword", "keywords"} { + for _, t := range findFMTags(key) { + // Trims any # prefix to support hashtags embedded in YAML + // frontmatter, as in Simple Markdown Zettelkasten: + // http://evantravers.com/articles/2020/11/23/zettelkasten-updates/ + tags = append(tags, strings.TrimPrefix(t, "#")) + } + } + + // Parse #hashtags and :colon:tags: + err := ast.Walk(root, func(n ast.Node, entering bool) (ast.WalkStatus, error) { + if tagsNode, ok := n.(*extensions.Tags); ok && entering { + for _, tag := range tagsNode.Tags { + tags = append(tags, tag) + } + } + return ast.WalkContinue, nil + }) + + return strutil.RemoveDuplicates(tags), err +} + // parseLinks extracts outbound links from the note. func parseLinks(root ast.Node, source []byte) ([]note.Link, error) { links := make([]note.Link, 0) @@ -193,14 +253,28 @@ type frontmatter struct { var frontmatterRegex = regexp.MustCompile(`(?ms)^\s*-+\s*$.*?^\s*-+\s*$`) -func parseFrontmatter(context parser.Context, source []byte) (front frontmatter, err error) { +func parseFrontmatter(context parser.Context, source []byte) (frontmatter, error) { + var front frontmatter + index := frontmatterRegex.FindIndex(source) - if index != nil { - front.start = index[0] - front.end = index[1] - front.values, err = meta.TryGet(context) + if index == nil { + return front, nil } - return + + front.start = index[0] + front.end = index[1] + front.values = map[string]interface{}{} + + values, err := meta.TryGet(context) + if err != nil { + return front, err + } + // Convert keys to lowercase, because we don't want to be case sensitive. + for k, v := range values { + front.values[strings.ToLower(k)] = v + } + + return front, nil } // getString returns the first string value found for any of the given keys. @@ -210,6 +284,7 @@ func (m frontmatter) getString(keys ...string) opt.String { } for _, key := range keys { + key = strings.ToLower(key) if val, ok := m.values[key]; ok { if val, ok := val.(string); ok { return opt.NewNotEmptyString(val) @@ -218,3 +293,24 @@ func (m frontmatter) getString(keys ...string) opt.String { } return opt.NullString } + +// getStrings returns the first string list found for any of the given keys. +func (m frontmatter) getStrings(keys ...string) ([]string, bool) { + if m.values == nil { + return nil, false + } + + for _, key := range keys { + key = strings.ToLower(key) + if val, ok := m.values[key]; ok { + if val, ok := val.([]interface{}); ok { + strings := []string{} + for _, v := range val { + strings = append(strings, fmt.Sprint(v)) + } + return strings, true + } + } + } + return nil, false +} diff --git a/adapter/markdown/markdown_test.go b/adapter/markdown/markdown_test.go index aecf5f4..5d022cc 100644 --- a/adapter/markdown/markdown_test.go +++ b/adapter/markdown/markdown_test.go @@ -153,6 +153,194 @@ Paragraph`, ) } +func TestParseHashtags(t *testing.T) { + test := func(source string, tags []string) { + content := parseWithOptions(t, source, ParserOpts{ + HashtagEnabled: true, + MultiWordTagEnabled: false, + }) + assert.Equal(t, content.Tags, tags) + } + + test("", []string{}) + test("#", []string{}) + test("##", []string{}) + test("# No tags around here", []string{}) + test("#single-hashtag", []string{"single-hashtag"}) + test("a #tag in the middle", []string{"tag"}) + test("#multiple #hashtags", []string{"multiple", "hashtags"}) + test("#multiple#hashtags", []string{}) + // Unicode hashtags + test("#libellé-français, #日本語ハッシュタグ", []string{"libellé-français", "日本語ハッシュタグ"}) + // Punctuation breaking tags + test( + "#a #b, #c; #d. #e! #f? #g* #h\", #i(, #j), #k[, #l], #m{, #n}", + []string{"a", "b", "c", "d", "e", "f", "g", "h", "i", "j", "k", "l", "m", "n"}, + ) + // Authorized special characters + test("#a/@'~-_$%&+=: end", []string{"a/@'~-_$%&+=:"}) + // Escape punctuation and space + test(`#an\ \\espaced\ tag\!`, []string{`an \espaced tag!`}) + // Hashtags containing only numbers and dots are invalid + test("#123, #1.2.3", []string{}) + // Must not be preceded by a hash or any other valid hashtag character + test("##invalid also#invalid", []string{}) + // Bear's multi multi-word tags are disabled + test("#multi word# end", []string{"multi"}) +} + +func TestParseWordtags(t *testing.T) { + test := func(source string, tags []string) { + content := parseWithOptions(t, source, ParserOpts{ + HashtagEnabled: true, + MultiWordTagEnabled: true, + }) + assert.Equal(t, content.Tags, tags) + } + + test("", []string{}) + test("#", []string{}) + test("##", []string{}) + test("# No tags around here", []string{}) + test("#single-hashtag", []string{"single-hashtag"}) + test("a #tag in the middle", []string{"tag"}) + test("#multiple #hashtags", []string{"multiple", "hashtags"}) + test("#multiple#hashtags", []string{"multiple"}) + // Unicode hashtags + test("#libellé-français, #日本語ハッシュタグ", []string{"libellé-français", "日本語ハッシュタグ"}) + // Punctuation breaking tags + test( + "#a #b, #c; #d. #e! #f? #g* #h\", #i(, #j), #k[, #l], #m{, #n}", + []string{"a", "b", "c", "d", "e", "f", "g", "h", "i", "j", "k", "l", "m", "n"}, + ) + // Authorized special characters + test("#a/@'~-_$%&+=: end", []string{"a/@'~-_$%&+=:"}) + // Escape punctuation and space + test(`#an\ \\espaced\ tag\!`, []string{`an \espaced tag!`}) + // Hashtags containing only numbers and dots are invalid + test("#123, #1.2.3", []string{}) + // Must not be preceded by a hash or any other valid hashtag character + test("##invalid also#invalid", []string{}) + // Bear's multi multi-word tags + test("#multi word#", []string{"multi word"}) + test("#surrounded# end", []string{"surrounded"}) + test("#multi word#end", []string{"multi word"}) + test("#multi word #other", []string{"multi", "other"}) + test("#multi word# #other", []string{"multi word", "other"}) + test("#multi word##other", []string{"multi word"}) + test("a #multi word# in the middle", []string{"multi word"}) + test("a #multi word#, and a #tag", []string{"multi word", "tag"}) + test("#multi, word#", []string{"multi"}) +} + +func TestParseColontags(t *testing.T) { + test := func(source string, tags []string) { + content := parseWithOptions(t, source, ParserOpts{ + ColontagEnabled: true, + }) + assert.Equal(t, content.Tags, tags) + } + + test("", []string{}) + test(":", []string{}) + test("::", []string{}) + test("not:valid:", []string{}) + test(":no tags:", []string{}) + test(": no-tags:", []string{}) + test(":no-tags :", []string{}) + test(":single-colontag:", []string{"single-colontag"}) + test("a :tag: in the middle", []string{"tag"}) + test(":multiple:colontags:", []string{"multiple", "colontags"}) + test(":multiple::colontags:", []string{"multiple"}) + test(":multiple: :colontags:", []string{"multiple", "colontags"}) + test(":multiple:,:colontags:", []string{"multiple", "colontags"}) + test(":more:than:two:colontags:", []string{"more", "than", "two", "colontags"}) + test(":multiple :colontags", []string{}) + test(":multiple :colontags:", []string{"colontags"}) + // Unicode colontags + test(":libellé-français:日本語ハッシュタグ:", []string{"libellé-français", "日本語ハッシュタグ"}) + // Punctuation is not allowed + test(":a : :b,: :c;: :d.: :e!: :f?: :g*: :h\": :i(: :j): :k[: :l]: :m{: :n}:", []string{}) + // Authorized special characters + test(":#a/@'~-_$%&+=: end", []string{"#a/@'~-_$%&+="}) + // Escape punctuation and space + test(`:an\ \\espaced\ tag\!:`, []string{`an \espaced tag!`}) + // A colontag containing only numbers is valid + test(":123:1.2.3:", []string{"123"}) + // Must not be preceded by a : or any other valid colontag character + test("::invalid also:invalid:", []string{}) +} + +func TestParseMixedTags(t *testing.T) { + test := func(source string, tags []string) { + content := parseWithOptions(t, source, ParserOpts{ + HashtagEnabled: true, + MultiWordTagEnabled: true, + ColontagEnabled: true, + }) + assert.Equal(t, content.Tags, tags) + } + + test(":colontag: #tag #word tag#", []string{"colontag", "tag", "word tag"}) + test(":#colontag: #:tag: #:word:tag:#", []string{"#colontag", ":tag:", ":word:tag:"}) +} + +func TestParseTagsFromFrontmatter(t *testing.T) { + test := func(source string, tags []string) { + content := parse(t, source) + assert.Equal(t, content.Tags, tags) + } + + test(`--- +Tags: + - "#tag1" + - tag 2 +--- + +Body +`, []string{"tag1", "tag 2"}) + + test(`--- +Keywords: [keyword1, "#keyword 2"] +--- + +Body +`, []string{"keyword1", "keyword 2"}) + + test(`--- +tags: [tag1, tag 2] +keywords: + - keyword1 + - keyword 2 +--- + +Body +`, []string{"tag1", "tag 2", "keyword1", "keyword 2"}) + + // When a string, parse space-separated tags. + test(`--- +Tags: "tag1 #tag-2" +Keywords: kw1 kw2 kw3 +--- + +Body +`, []string{"tag1", "tag-2", "kw1", "kw2", "kw3"}) +} + +func TestParseTagsIgnoresDuplicates(t *testing.T) { + test := func(source string, tags []string) { + content := parse(t, source) + assert.Equal(t, content.Tags, tags) + } + + test(`--- +Tags: [tag1, "#tag1", tag2] +--- + +#tag1 #tag2 #tag3 #tag3 :tag2: +`, []string{"tag1", "tag2", "tag3"}) +} + func TestParseLinks(t *testing.T) { test := func(source string, links []note.Link) { content := parse(t, source) @@ -311,7 +499,15 @@ A link can have [one relation](one "rel-1") or [several relations](several "rel- } func parse(t *testing.T, source string) note.Content { - content, err := NewParser().Parse(source) + return parseWithOptions(t, source, ParserOpts{ + HashtagEnabled: true, + MultiWordTagEnabled: true, + ColontagEnabled: true, + }) +} + +func parseWithOptions(t *testing.T, source string, options ParserOpts) note.Content { + content, err := NewParser(options).Parse(source) assert.Nil(t, err) return *content } diff --git a/adapter/sqlite/collection_dao.go b/adapter/sqlite/collection_dao.go new file mode 100644 index 0000000..71a8d41 --- /dev/null +++ b/adapter/sqlite/collection_dao.go @@ -0,0 +1,188 @@ +package sqlite + +import ( + "database/sql" + "fmt" + + "github.com/mickael-menu/zk/core" + "github.com/mickael-menu/zk/core/note" + "github.com/mickael-menu/zk/util" + "github.com/mickael-menu/zk/util/errors" +) + +// CollectionDAO persists collections (e.g. tags) in the SQLite database. +type CollectionDAO struct { + tx Transaction + logger util.Logger + + // Prepared SQL statements + createCollectionStmt *LazyStmt + findCollectionStmt *LazyStmt + findAssociationStmt *LazyStmt + createAssociationStmt *LazyStmt + removeAssociationsStmt *LazyStmt +} + +// NewCollectionDAO creates a new instance of a DAO working on the given +// database transaction. +func NewCollectionDAO(tx Transaction, logger util.Logger) *CollectionDAO { + return &CollectionDAO{ + tx: tx, + logger: logger, + + // Create a new collection. + createCollectionStmt: tx.PrepareLazy(` + INSERT INTO collections (kind, name) + VALUES (?, ?) + `), + + // Finds a collection's ID from its kind and name. + findCollectionStmt: tx.PrepareLazy(` + SELECT id FROM collections + WHERE kind = ? AND name = ? + `), + + // Returns whether a note and a collection are associated. + findAssociationStmt: tx.PrepareLazy(` + SELECT id FROM notes_collections + WHERE note_id = ? AND collection_id = ? + `), + + // Creates a new association between a note and a collection. + createAssociationStmt: tx.PrepareLazy(` + INSERT INTO notes_collections (note_id, collection_id) + VALUES (?, ?) + `), + + // Removes all associations for the given note. + removeAssociationsStmt: tx.PrepareLazy(` + DELETE FROM notes_collections + WHERE note_id = ? + `), + } +} + +// FindOrCreate returns the ID of the collection with given kind and name. +// Creates the collection if it does not already exist. +func (d *CollectionDAO) FindOrCreate(kind note.CollectionKind, name string) (core.CollectionId, error) { + id, err := d.findCollection(kind, name) + + switch { + case err != nil: + return id, err + case id.IsValid(): + return id, nil + default: + return d.create(kind, name) + } +} + +func (d *CollectionDAO) findCollection(kind note.CollectionKind, name string) (core.CollectionId, error) { + wrap := errors.Wrapperf("failed to get %s named %s", kind, name) + + row, err := d.findCollectionStmt.QueryRow(kind, name) + if err != nil { + return core.CollectionId(0), wrap(err) + } + + var id sql.NullInt64 + err = row.Scan(&id) + + switch { + case err == sql.ErrNoRows: + return core.CollectionId(0), nil + case err != nil: + return core.CollectionId(0), wrap(err) + default: + return core.CollectionId(id.Int64), nil + } +} + +func (d *CollectionDAO) create(kind note.CollectionKind, name string) (core.CollectionId, error) { + wrap := errors.Wrapperf("failed to create new %s named %s", kind, name) + + res, err := d.createCollectionStmt.Exec(kind, name) + if err != nil { + return 0, wrap(err) + } + + id, err := res.LastInsertId() + if err != nil { + return 0, wrap(err) + } + + return core.CollectionId(id), nil +} + +// Associate creates a new association between a note and a collection, if it +// does not already exist. +func (d *CollectionDAO) Associate(noteId core.NoteId, collectionId core.CollectionId) (core.NoteCollectionId, error) { + wrap := errors.Wrapperf("failed to associate note %d to collection %d", noteId, collectionId) + + id, err := d.findAssociation(noteId, collectionId) + + switch { + case err != nil: + return id, wrap(err) + case id.IsValid(): + return id, nil + default: + id, err = d.createAssociation(noteId, collectionId) + return id, wrap(err) + } +} + +func (d *CollectionDAO) findAssociation(noteId core.NoteId, collectionId core.CollectionId) (core.NoteCollectionId, error) { + if !noteId.IsValid() || !collectionId.IsValid() { + return 0, fmt.Errorf("Note ID (%d) or collection ID (%d) not valid", noteId, collectionId) + } + + row, err := d.findAssociationStmt.QueryRow(noteId, collectionId) + if err != nil { + return 0, err + } + + var id sql.NullInt64 + err = row.Scan(&id) + + switch { + case err == sql.ErrNoRows: + return 0, nil + case err != nil: + return 0, err + default: + return core.NoteCollectionId(id.Int64), nil + } +} + +func (d *CollectionDAO) createAssociation(noteId core.NoteId, collectionId core.CollectionId) (core.NoteCollectionId, error) { + if !noteId.IsValid() || !collectionId.IsValid() { + return 0, fmt.Errorf("Note ID (%d) or collection ID (%d) not valid", noteId, collectionId) + } + + res, err := d.createAssociationStmt.Exec(noteId, collectionId) + if err != nil { + return 0, err + } + + id, err := res.LastInsertId() + if err != nil { + return 0, err + } + + return core.NoteCollectionId(id), nil +} + +// RemoveAssociations deletes all associations with the given note. +func (d *CollectionDAO) RemoveAssociations(noteId core.NoteId) error { + if !noteId.IsValid() { + return fmt.Errorf("Note ID (%d) not valid", noteId) + } + + _, err := d.removeAssociationsStmt.Exec(noteId) + if err != nil { + return errors.Wrapf(err, "failed to remove associations of note %d", noteId) + } + + return nil +} diff --git a/adapter/sqlite/collection_dao_test.go b/adapter/sqlite/collection_dao_test.go new file mode 100644 index 0000000..3108559 --- /dev/null +++ b/adapter/sqlite/collection_dao_test.go @@ -0,0 +1,72 @@ +package sqlite + +import ( + "testing" + + "github.com/mickael-menu/zk/core" + "github.com/mickael-menu/zk/util" + "github.com/mickael-menu/zk/util/test/assert" +) + +func TestCollectionDAOFindOrCreate(t *testing.T) { + testCollectionDAO(t, func(tx Transaction, dao *CollectionDAO) { + // Finds existing ones + id, err := dao.FindOrCreate("tag", "adventure") + assert.Nil(t, err) + assert.Equal(t, id, core.CollectionId(2)) + id, err = dao.FindOrCreate("genre", "fiction") + assert.Nil(t, err) + assert.Equal(t, id, core.CollectionId(3)) + + // The name is case sensitive + id, err = dao.FindOrCreate("tag", "Adventure") + assert.Nil(t, err) + assert.NotEqual(t, id, core.CollectionId(2)) + + // Creates when not found + sql := "SELECT id FROM collections WHERE kind = ? AND name = ?" + assertNotExist(t, tx, sql, "unknown", "created") + id, err = dao.FindOrCreate("unknown", "created") + assert.Nil(t, err) + assertExist(t, tx, sql, "unknown", "created") + }) +} + +func TestCollectionDAOAssociate(t *testing.T) { + testCollectionDAO(t, func(tx Transaction, dao *CollectionDAO) { + // Returns existing association + id, err := dao.Associate(1, 2) + assert.Nil(t, err) + assert.Equal(t, id, core.NoteCollectionId(2)) + + // Creates a new association if missing + noteId := core.NoteId(5) + collectionId := core.CollectionId(3) + sql := "SELECT id FROM notes_collections WHERE note_id = ? AND collection_id = ?" + assertNotExist(t, tx, sql, noteId, collectionId) + _, err = dao.Associate(noteId, collectionId) + assert.Nil(t, err) + assertExist(t, tx, sql, noteId, collectionId) + }) +} + +func TestCollectionDAORemoveAssociations(t *testing.T) { + testCollectionDAO(t, func(tx Transaction, dao *CollectionDAO) { + noteId := core.NoteId(1) + sql := "SELECT id FROM notes_collections WHERE note_id = ?" + assertExist(t, tx, sql, noteId) + err := dao.RemoveAssociations(noteId) + assert.Nil(t, err) + assertNotExist(t, tx, sql, noteId) + + // Removes associations of note without any. + err = dao.RemoveAssociations(999) + assert.Nil(t, err) + }) +} + +func testCollectionDAO(t *testing.T, callback func(tx Transaction, dao *CollectionDAO)) { + testTransaction(t, func(tx Transaction) { + callback(tx, NewCollectionDAO(tx, &util.NullLogger)) + }) +} diff --git a/adapter/sqlite/db.go b/adapter/sqlite/db.go index 92cfef2..0a111dc 100644 --- a/adapter/sqlite/db.go +++ b/adapter/sqlite/db.go @@ -55,7 +55,7 @@ func (db *DB) Migrate() error { return err } - if version == 0 { + if version <= 0 { err = tx.ExecStmts([]string{ // Notes `CREATE TABLE IF NOT EXISTS notes ( @@ -116,6 +116,35 @@ func (db *DB) Migrate() error { } } + if version <= 1 { + err = tx.ExecStmts([]string{ + // Collections + `CREATE TABLE IF NOT EXISTS collections ( + id INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL, + kind TEXT NO NULL, + name TEXT NOT NULL, + UNIQUE(kind, name) + )`, + `CREATE INDEX IF NOT EXISTS index_collections ON collections (kind, name)`, + + // Note-Collection association + `CREATE TABLE IF NOT EXISTS notes_collections ( + id INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL, + note_id INTEGER NOT NULL REFERENCES notes(id) + ON DELETE CASCADE, + collection_id INTEGER NOT NULL REFERENCES collections(id) + ON DELETE CASCADE + )`, + `CREATE INDEX IF NOT EXISTS index_notes_collections ON notes_collections (note_id, collection_id)`, + + `PRAGMA user_version = 2`, + }) + + if err != nil { + return err + } + } + return nil }) diff --git a/adapter/sqlite/db_test.go b/adapter/sqlite/db_test.go index fe3f793..73fbfe8 100644 --- a/adapter/sqlite/db_test.go +++ b/adapter/sqlite/db_test.go @@ -33,7 +33,7 @@ func TestMigrateFrom0(t *testing.T) { var version int err := tx.QueryRow("PRAGMA user_version").Scan(&version) assert.Nil(t, err) - assert.Equal(t, version, 1) + assert.Equal(t, version, 2) _, err = tx.Exec(` INSERT INTO notes (path, sortable_path, title, body, word_count, checksum) diff --git a/adapter/sqlite/fixtures/default/collections.yml b/adapter/sqlite/fixtures/default/collections.yml new file mode 100644 index 0000000..5fdc128 --- /dev/null +++ b/adapter/sqlite/fixtures/default/collections.yml @@ -0,0 +1,12 @@ +- id: 1 + kind: "tag" + name: "fiction" +- id: 2 + kind: "tag" + name: "adventure" +- id: 3 + kind: "genre" + name: "fiction" +- id: 4 + kind: "tag" + name: "fantasy" diff --git a/adapter/sqlite/fixtures/default/notes_collections.yml b/adapter/sqlite/fixtures/default/notes_collections.yml new file mode 100644 index 0000000..34315b7 --- /dev/null +++ b/adapter/sqlite/fixtures/default/notes_collections.yml @@ -0,0 +1,9 @@ +- id: 1 + note_id: 1 + collection_id: 1 +- id: 2 + note_id: 1 + collection_id: 2 +- id: 3 + note_id: 2 + collection_id: 3 diff --git a/adapter/sqlite/note_dao.go b/adapter/sqlite/note_dao.go index ca1206b..fa0592f 100644 --- a/adapter/sqlite/note_dao.go +++ b/adapter/sqlite/note_dao.go @@ -3,9 +3,11 @@ package sqlite import ( "database/sql" "fmt" + "strconv" "strings" "time" + "github.com/mickael-menu/zk/core" "github.com/mickael-menu/zk/core/note" "github.com/mickael-menu/zk/util" "github.com/mickael-menu/zk/util/errors" @@ -16,7 +18,7 @@ import ( ) // NoteDAO persists notes in the SQLite database. -// It implements the core ports note.Indexer and note.Finder. +// It implements the core port note.Finder. type NoteDAO struct { tx Transaction logger util.Logger @@ -101,11 +103,9 @@ func NewNoteDAO(tx Transaction, logger util.Logger) *NoteDAO { // Indexed returns file info of all indexed notes. func (d *NoteDAO) Indexed() (<-chan paths.Metadata, error) { - wrap := errors.Wrapper("failed to get indexed notes") - rows, err := d.indexedStmt.Query() if err != nil { - return nil, wrap(err) + return nil, err } c := make(chan paths.Metadata) @@ -120,7 +120,7 @@ func (d *NoteDAO) Indexed() (<-chan paths.Metadata, error) { for rows.Next() { err := rows.Scan(&path, &modified) if err != nil { - d.logger.Err(wrap(err)) + d.logger.Err(err) } c <- paths.Metadata{ @@ -131,7 +131,7 @@ func (d *NoteDAO) Indexed() (<-chan paths.Metadata, error) { err = rows.Err() if err != nil { - d.logger.Err(wrap(err)) + d.logger.Err(err) } }() @@ -139,9 +139,7 @@ func (d *NoteDAO) Indexed() (<-chan paths.Metadata, error) { } // Add inserts a new note to the index. -func (d *NoteDAO) Add(note note.Metadata) (int64, error) { - wrap := errors.Wrapperf("%v: can't add note to the index", note.Path) - +func (d *NoteDAO) Add(note note.Metadata) (core.NoteId, error) { // For sortable_path, we replace in path / by the shortest non printable // character available to make it sortable. Without this, sorting by the // path would be a lexicographical sort instead of being the same order @@ -155,28 +153,27 @@ func (d *NoteDAO) Add(note note.Metadata) (int64, error) { note.Created, note.Modified, ) if err != nil { - return 0, wrap(err) + return 0, err } - id, err := res.LastInsertId() + lastId, err := res.LastInsertId() if err != nil { - return 0, wrap(err) + return core.NoteId(0), err } + id := core.NoteId(lastId) err = d.addLinks(id, note) return id, err } // Update modifies an existing note. -func (d *NoteDAO) Update(note note.Metadata) error { - wrap := errors.Wrapperf("%v: failed to update note index", note.Path) - +func (d *NoteDAO) Update(note note.Metadata) (core.NoteId, error) { id, err := d.findIdByPath(note.Path) if err != nil { - return wrap(err) + return 0, err } - if !id.Valid { - return wrap(errors.New("note not found in the index")) + if !id.IsValid() { + return 0, errors.New("note not found in the index") } _, err = d.updateStmt.Exec( @@ -184,33 +181,33 @@ func (d *NoteDAO) Update(note note.Metadata) error { note.Path, ) if err != nil { - return wrap(err) + return id, err } - _, err = d.removeLinksStmt.Exec(id.Int64) + _, err = d.removeLinksStmt.Exec(d.idToSql(id)) if err != nil { - return wrap(err) + return id, err } - err = d.addLinks(id.Int64, note) - return wrap(err) + err = d.addLinks(id, note) + return id, err } // addLinks inserts all the outbound links of the given note. -func (d *NoteDAO) addLinks(id int64, note note.Metadata) error { +func (d *NoteDAO) addLinks(id core.NoteId, note note.Metadata) error { for _, link := range note.Links { targetId, err := d.findIdByPathPrefix(link.Href) if err != nil { return err } - _, err = d.addLinkStmt.Exec(id, targetId, link.Title, link.Href, link.External, joinLinkRels(link.Rels), link.Snippet) + _, err = d.addLinkStmt.Exec(id, d.idToSql(targetId), link.Title, link.Href, link.External, joinLinkRels(link.Rels), link.Snippet) if err != nil { return err } } - _, err := d.setLinksTargetStmt.Exec(id, note.Path) + _, err := d.setLinksTargetStmt.Exec(int64(id), note.Path) return err } @@ -226,61 +223,59 @@ func joinLinkRels(rels []string) string { // Remove deletes the note with the given path from the index. func (d *NoteDAO) Remove(path string) error { - wrap := errors.Wrapperf("%v: failed to remove note index", path) - id, err := d.findIdByPath(path) if err != nil { - return wrap(err) + return err } - if !id.Valid { - return wrap(errors.New("note not found in the index")) + if !id.IsValid() { + return errors.New("note not found in the index") } _, err = d.removeStmt.Exec(id) - return wrap(err) + return err } -func (d *NoteDAO) findIdByPath(path string) (sql.NullInt64, error) { +func (d *NoteDAO) findIdByPath(path string) (core.NoteId, error) { row, err := d.findIdByPathStmt.QueryRow(path) if err != nil { - return sql.NullInt64{}, err + return core.NoteId(0), err } return idForRow(row) } -func (d *NoteDAO) findIdsByPathPrefixes(paths []string) ([]int64, error) { - ids := make([]int64, 0) +func (d *NoteDAO) findIdsByPathPrefixes(paths []string) ([]core.NoteId, error) { + ids := make([]core.NoteId, 0) for _, path := range paths { id, err := d.findIdByPathPrefix(path) if err != nil { return ids, err } - if id.Valid { - ids = append(ids, id.Int64) + if id.IsValid() { + ids = append(ids, id) } } return ids, nil } -func (d *NoteDAO) findIdByPathPrefix(path string) (sql.NullInt64, error) { +func (d *NoteDAO) findIdByPathPrefix(path string) (core.NoteId, error) { row, err := d.findIdByPathPrefixStmt.QueryRow(path) if err != nil { - return sql.NullInt64{}, err + return core.NoteId(0), err } return idForRow(row) } -func idForRow(row *sql.Row) (sql.NullInt64, error) { +func idForRow(row *sql.Row) (core.NoteId, error) { var id sql.NullInt64 err := row.Scan(&id) switch { case err == sql.ErrNoRows: - return id, nil + return core.NoteId(0), nil case err != nil: - return id, err + return core.NoteId(0), err default: - return id, err + return core.NoteId(id.Int64), nil } } @@ -353,7 +348,7 @@ func (d *NoteDAO) findRows(opts note.FinderOpts) (*sql.Rows, error) { if len(ids) == 0 { return nil } - idsList := "(" + strutil.JoinInt64(ids, ",") + ")" + idsList := "(" + d.joinIds(ids, ",") + ")" linksSrc := "links" @@ -614,3 +609,19 @@ func pathRegex(path string) string { path = icu.EscapePattern(path) return path + "[^/]*|" + path + "/.+" } + +func (d *NoteDAO) idToSql(id core.NoteId) sql.NullInt64 { + if id.IsValid() { + return sql.NullInt64{Int64: int64(id), Valid: true} + } else { + return sql.NullInt64{} + } +} + +func (d *NoteDAO) joinIds(ids []core.NoteId, delimiter string) string { + strs := make([]string, 0) + for _, i := range ids { + strs = append(strs, strconv.FormatInt(int64(i), 10)) + } + return strings.Join(strs, delimiter) +} diff --git a/adapter/sqlite/note_dao_test.go b/adapter/sqlite/note_dao_test.go index b9f89f5..48938df 100644 --- a/adapter/sqlite/note_dao_test.go +++ b/adapter/sqlite/note_dao_test.go @@ -6,6 +6,7 @@ import ( "testing" "time" + "github.com/mickael-menu/zk/core" "github.com/mickael-menu/zk/core/note" "github.com/mickael-menu/zk/util" "github.com/mickael-menu/zk/util/paths" @@ -170,14 +171,14 @@ func TestNoteDAOAddWithLinks(t *testing.T) { assert.Equal(t, rows, []linkRow{ { SourceId: id, - TargetId: intPointer(2), + TargetId: idPointer(2), Title: "Same dir", Href: "log/2021-01-04", Rels: "\x01rel-1\x01rel-2\x01", }, { SourceId: id, - TargetId: intPointer(4), + TargetId: idPointer(4), Title: "Relative", Href: "f39c8", Rels: "", @@ -185,7 +186,7 @@ func TestNoteDAOAddWithLinks(t *testing.T) { }, { SourceId: id, - TargetId: intPointer(4), + TargetId: idPointer(4), Title: "Second is added", Href: "f39c8", Rels: "\x01second\x01", @@ -234,13 +235,13 @@ func TestNoteDAOAddFillsLinksMissingTargetId(t *testing.T) { func TestNoteDAOAddExistingNote(t *testing.T) { testNoteDAO(t, func(tx Transaction, dao *NoteDAO) { _, err := dao.Add(note.Metadata{Path: "ref/test/a.md"}) - assert.Err(t, err, "ref/test/a.md: can't add note to the index: UNIQUE constraint failed: notes.path") + assert.Err(t, err, "UNIQUE constraint failed: notes.path") }) } func TestNoteDAOUpdate(t *testing.T) { testNoteDAO(t, func(tx Transaction, dao *NoteDAO) { - err := dao.Update(note.Metadata{ + id, err := dao.Update(note.Metadata{ Path: "ref/test/a.md", Title: "Updated note", Lead: "Updated lead", @@ -252,6 +253,7 @@ func TestNoteDAOUpdate(t *testing.T) { Modified: time.Date(2020, 11, 22, 16, 49, 47, 0, time.UTC), }) assert.Nil(t, err) + assert.Equal(t, id, core.NoteId(6)) row, err := queryNoteRow(tx, `path = "ref/test/a.md"`) assert.Nil(t, err) @@ -271,10 +273,10 @@ func TestNoteDAOUpdate(t *testing.T) { func TestNoteDAOUpdateUnknown(t *testing.T) { testNoteDAO(t, func(tx Transaction, dao *NoteDAO) { - err := dao.Update(note.Metadata{ + _, err := dao.Update(note.Metadata{ Path: "unknown/unknown.md", }) - assert.Err(t, err, "unknown/unknown.md: failed to update note index: note not found in the index") + assert.Err(t, err, "note not found in the index") }) } @@ -284,7 +286,7 @@ func TestNoteDAOUpdateWithLinks(t *testing.T) { assert.Equal(t, links, []linkRow{ { SourceId: 1, - TargetId: intPointer(2), + TargetId: idPointer(2), Title: "An internal link", Href: "log/2021-01-04.md", Snippet: "[[An internal link]]", @@ -299,7 +301,7 @@ func TestNoteDAOUpdateWithLinks(t *testing.T) { }, }) - err := dao.Update(note.Metadata{ + _, err := dao.Update(note.Metadata{ Path: "log/2021-01-03.md", Links: []note.Link{ { @@ -323,7 +325,7 @@ func TestNoteDAOUpdateWithLinks(t *testing.T) { assert.Equal(t, links, []linkRow{ { SourceId: 1, - TargetId: intPointer(3), + TargetId: idPointer(3), Title: "A new link", Href: "index", Rels: "\x01rel\x01", @@ -357,7 +359,7 @@ func TestNoteDAORemove(t *testing.T) { func TestNoteDAORemoveUnknown(t *testing.T) { testNoteDAO(t, func(tx Transaction, dao *NoteDAO) { err := dao.Remove("unknown/unknown.md") - assert.Err(t, err, "unknown/unknown.md: failed to remove note index: note not found in the index") + assert.Err(t, err, "note not found in the index") }) } @@ -368,7 +370,7 @@ func TestNoteDAORemoveCascadeLinks(t *testing.T) { assert.Equal(t, len(links) > 0, true) links = queryLinkRows(t, tx, `id = 4`) - assert.Equal(t, *links[0].TargetId, int64(1)) + assert.Equal(t, *links[0].TargetId, core.NoteId(1)) err := dao.Remove("log/2021-01-03.md") assert.Nil(t, err) @@ -922,8 +924,8 @@ func queryNoteRow(tx Transaction, where string) (noteRow, error) { } type linkRow struct { - SourceId int64 - TargetId *int64 + SourceId core.NoteId + TargetId *core.NoteId Href, Title, Rels, Snippet string External bool } @@ -941,8 +943,14 @@ func queryLinkRows(t *testing.T, tx Transaction, where string) []linkRow { for rows.Next() { var row linkRow - err = rows.Scan(&row.SourceId, &row.TargetId, &row.Title, &row.Href, &row.External, &row.Rels, &row.Snippet) + var sourceId int64 + var targetId *int64 + err = rows.Scan(&sourceId, &targetId, &row.Title, &row.Href, &row.External, &row.Rels, &row.Snippet) assert.Nil(t, err) + row.SourceId = core.NoteId(sourceId) + if targetId != nil { + row.TargetId = idPointer(*targetId) + } links = append(links, row) } rows.Close() @@ -951,6 +959,7 @@ func queryLinkRows(t *testing.T, tx Transaction, where string) []linkRow { return links } -func intPointer(i int64) *int64 { - return &i +func idPointer(i int64) *core.NoteId { + id := core.NoteId(i) + return &id } diff --git a/adapter/sqlite/note_indexer.go b/adapter/sqlite/note_indexer.go new file mode 100644 index 0000000..8b3de50 --- /dev/null +++ b/adapter/sqlite/note_indexer.go @@ -0,0 +1,91 @@ +package sqlite + +import ( + "github.com/mickael-menu/zk/core" + "github.com/mickael-menu/zk/core/note" + "github.com/mickael-menu/zk/util" + "github.com/mickael-menu/zk/util/errors" + "github.com/mickael-menu/zk/util/paths" +) + +// NoteIndexer persists note indexing results in the SQLite database. +// It implements the core port note.Indexer and acts as a facade to the DAOs. +type NoteIndexer struct { + tx Transaction + notes *NoteDAO + collections *CollectionDAO + logger util.Logger +} + +func NewNoteIndexer(notes *NoteDAO, collections *CollectionDAO, logger util.Logger) *NoteIndexer { + return &NoteIndexer{ + notes: notes, + collections: collections, + logger: logger, + } +} + +// Indexed returns the list of indexed note file metadata. +func (i *NoteIndexer) Indexed() (<-chan paths.Metadata, error) { + c, err := i.notes.Indexed() + return c, errors.Wrap(err, "failed to get indexed notes") +} + +// Add indexes a new note from its metadata. +func (i *NoteIndexer) Add(metadata note.Metadata) (core.NoteId, error) { + wrap := errors.Wrapperf("%v: failed to index the note", metadata.Path) + noteId, err := i.notes.Add(metadata) + if err != nil { + return 0, wrap(err) + } + + err = i.associateTags(noteId, metadata.Tags) + if err != nil { + return 0, wrap(err) + } + + return noteId, nil +} + +// Update updates the metadata of an already indexed note. +func (i *NoteIndexer) Update(metadata note.Metadata) error { + wrap := errors.Wrapperf("%v: failed to update note index", metadata.Path) + + noteId, err := i.notes.Update(metadata) + if err != nil { + return wrap(err) + } + + err = i.collections.RemoveAssociations(noteId) + if err != nil { + return wrap(err) + } + + err = i.associateTags(noteId, metadata.Tags) + if err != nil { + return wrap(err) + } + + return err +} + +func (i *NoteIndexer) associateTags(noteId core.NoteId, tags []string) error { + for _, tag := range tags { + tagId, err := i.collections.FindOrCreate(note.CollectionKindTag, tag) + if err != nil { + return err + } + _, err = i.collections.Associate(noteId, tagId) + if err != nil { + return err + } + } + + return nil +} + +// Remove deletes a note from the index. +func (i *NoteIndexer) Remove(path string) error { + err := i.notes.Remove(path) + return errors.Wrapf(err, "%v: failed to remove note index", path) +} diff --git a/adapter/sqlite/note_indexer_test.go b/adapter/sqlite/note_indexer_test.go new file mode 100644 index 0000000..edf3d90 --- /dev/null +++ b/adapter/sqlite/note_indexer_test.go @@ -0,0 +1,64 @@ +package sqlite + +import ( + "testing" + + "github.com/mickael-menu/zk/core" + "github.com/mickael-menu/zk/core/note" + "github.com/mickael-menu/zk/util" + "github.com/mickael-menu/zk/util/test/assert" +) + +func TestNoteIndexerAddWithTags(t *testing.T) { + testNoteIndexer(t, func(tx Transaction, indexer *NoteIndexer) { + assertSQL := func(after bool) { + assertTagExistsOrNot(t, tx, true, "fiction") + assertTagExistsOrNot(t, tx, after, "new-tag") + } + + assertSQL(false) + id, err := indexer.Add(note.Metadata{ + Path: "log/added.md", + Tags: []string{"new-tag", "fiction"}, + }) + assert.Nil(t, err) + assertSQL(true) + assertTaggedOrNot(t, tx, true, id, "new-tag") + assertTaggedOrNot(t, tx, true, id, "fiction") + }) +} + +func TestNoteIndexerUpdateWithTags(t *testing.T) { + testNoteIndexer(t, func(tx Transaction, indexer *NoteIndexer) { + id := core.NoteId(1) + + assertSQL := func(after bool) { + assertTaggedOrNot(t, tx, true, id, "fiction") + assertTaggedOrNot(t, tx, after, id, "new-tag") + assertTaggedOrNot(t, tx, after, id, "fantasy") + } + + assertSQL(false) + err := indexer.Update(note.Metadata{ + Path: "log/2021-01-03.md", + Tags: []string{"new-tag", "fiction", "fantasy"}, + }) + assert.Nil(t, err) + assertSQL(true) + }) +} + +func testNoteIndexer(t *testing.T, callback func(tx Transaction, dao *NoteIndexer)) { + testTransaction(t, func(tx Transaction) { + logger := &util.NullLogger + callback(tx, NewNoteIndexer(NewNoteDAO(tx, logger), NewCollectionDAO(tx, logger), logger)) + }) +} + +func assertTagExistsOrNot(t *testing.T, tx Transaction, shouldExist bool, tag string) { + assertExistOrNot(t, tx, shouldExist, "SELECT id FROM collections WHERE kind = 'tag' AND name = ?", tag) +} + +func assertTaggedOrNot(t *testing.T, tx Transaction, shouldBeTagged bool, noteId core.NoteId, tag string) { + assertExistOrNot(t, tx, shouldBeTagged, "SELECT id FROM notes_collections WHERE note_id = ? AND collection_id IS (SELECT id FROM collections WHERE kind = 'tag' AND name = ?)", noteId, tag) +} diff --git a/adapter/sqlite/transaction_test.go b/adapter/sqlite/transaction_test.go index 4ca0dbc..8b8b27c 100644 --- a/adapter/sqlite/transaction_test.go +++ b/adapter/sqlite/transaction_test.go @@ -47,3 +47,30 @@ func testTransactionWithFixtures(t *testing.T, fixturesDir opt.String, test func }) assert.Nil(t, err) } + +func assertExistOrNot(t *testing.T, tx Transaction, shouldExist bool, sql string, args ...interface{}) { + if shouldExist { + assertExist(t, tx, sql, args...) + } else { + assertNotExist(t, tx, sql, args...) + } +} + +func assertExist(t *testing.T, tx Transaction, sql string, args ...interface{}) { + if !exists(t, tx, sql, args...) { + t.Errorf("SQL query did not return any result: %s, with arguments %v", sql, args) + } +} + +func assertNotExist(t *testing.T, tx Transaction, sql string, args ...interface{}) { + if exists(t, tx, sql, args...) { + t.Errorf("SQL query returned a result: %s, with arguments %v", sql, args) + } +} + +func exists(t *testing.T, tx Transaction, sql string, args ...interface{}) bool { + var exists int + err := tx.QueryRow("SELECT EXISTS ("+sql+")", args...).Scan(&exists) + assert.Nil(t, err) + return exists == 1 +} diff --git a/cmd/container.go b/cmd/container.go index 31e755f..bece518 100644 --- a/cmd/container.go +++ b/cmd/container.go @@ -58,7 +58,11 @@ func (c *Container) TemplateLoader(lang string) *handlebars.Loader { } func (c *Container) Parser() *markdown.Parser { - return markdown.NewParser() + return markdown.NewParser(markdown.ParserOpts{ + HashtagEnabled: true, + MultiWordTagEnabled: false, + ColontagEnabled: true, + }) } func (c *Container) NoteFinder(tx sqlite.Transaction, opts fzf.NoteFinderOpts) *fzf.NoteFinder { @@ -66,6 +70,12 @@ func (c *Container) NoteFinder(tx sqlite.Transaction, opts fzf.NoteFinderOpts) * return fzf.NewNoteFinder(opts, notes, c.Terminal) } +func (c *Container) NoteIndexer(tx sqlite.Transaction) *sqlite.NoteIndexer { + notes := sqlite.NewNoteDAO(tx, c.Logger) + collections := sqlite.NewCollectionDAO(tx, c.Logger) + return sqlite.NewNoteIndexer(notes, collections, c.Logger) +} + // Database returns the DB instance for the given notebook, after executing any // pending migration. func (c *Container) Database(path string) (*sqlite.DB, error) { diff --git a/cmd/index.go b/cmd/index.go index cf873e7..11dbe55 100644 --- a/cmd/index.go +++ b/cmd/index.go @@ -40,13 +40,11 @@ func (cmd *Index) Run(container *Container) error { var stats note.IndexingStats err = db.WithTransaction(func(tx sqlite.Transaction) error { - notes := sqlite.NewNoteDAO(tx, container.Logger) - stats, err = note.Index( zk, cmd.Force, container.Parser(), - notes, + container.NoteIndexer(tx), container.Logger, func(change paths.DiffChange) { bar.Add(1) diff --git a/core/ids.go b/core/ids.go new file mode 100644 index 0000000..dc9eb93 --- /dev/null +++ b/core/ids.go @@ -0,0 +1,19 @@ +package core + +type NoteId int64 + +func (id NoteId) IsValid() bool { + return id > 0 +} + +type CollectionId int64 + +func (id CollectionId) IsValid() bool { + return id > 0 +} + +type NoteCollectionId int64 + +func (id NoteCollectionId) IsValid() bool { + return id > 0 +} diff --git a/core/note/index.go b/core/note/index.go index 9704645..1b049f1 100644 --- a/core/note/index.go +++ b/core/note/index.go @@ -8,6 +8,7 @@ import ( "strings" "time" + "github.com/mickael-menu/zk/core" "github.com/mickael-menu/zk/core/zk" "github.com/mickael-menu/zk/util" "github.com/mickael-menu/zk/util/errors" @@ -25,6 +26,7 @@ type Metadata struct { RawContent string WordCount int Links []Link + Tags []string Created time.Time Modified time.Time Checksum string @@ -57,7 +59,7 @@ type Indexer interface { // Indexed returns the list of indexed note file metadata. Indexed() (<-chan paths.Metadata, error) // Add indexes a new note from its metadata. - Add(metadata Metadata) (int64, error) + Add(metadata Metadata) (core.NoteId, error) // Update updates the metadata of an already indexed note. Update(metadata Metadata) error // Remove deletes a note from the index. @@ -116,7 +118,8 @@ func Index(zk *zk.Zk, force bool, parser Parser, indexer Indexer, logger util.Lo func metadata(path string, zk *zk.Zk, parser Parser) (Metadata, error) { metadata := Metadata{ Path: path, - Links: make([]Link, 0), + Links: []Link{}, + Tags: []string{}, } absPath := filepath.Join(zk.Path, path) @@ -135,6 +138,7 @@ func metadata(path string, zk *zk.Zk, parser Parser) (Metadata, error) { metadata.RawContent = contentStr metadata.WordCount = len(strings.Fields(contentStr)) metadata.Links = make([]Link, 0) + metadata.Tags = contentParts.Tags metadata.Checksum = fmt.Sprintf("%x", sha256.Sum256(content)) for _, link := range contentParts.Links { diff --git a/core/note/parse.go b/core/note/parse.go index 13bab9c..da9513f 100644 --- a/core/note/parse.go +++ b/core/note/parse.go @@ -11,10 +11,13 @@ type Content struct { Lead opt.String // Body is the content of the note, including the Lead but without the Title. Body opt.String + // Tags is the list of tags found in the note content. + Tags []string // Links is the list of outbound links found in the note. Links []Link } +// Link links a note to another note or an external resource. type Link struct { Title string Href string @@ -36,3 +39,10 @@ const ( type Parser interface { Parse(source string) (*Content, error) } + +// CollectionKind defines a kind of note collection, such as tags. +type CollectionKind string + +const ( + CollectionKindTag CollectionKind = "tag" +) diff --git a/docs/.zk/templates/default.md b/docs/.zk/templates/default.md index b2f2c4a..cb44174 100644 --- a/docs/.zk/templates/default.md +++ b/docs/.zk/templates/default.md @@ -1,2 +1,3 @@ # {{title}} +{{content}} diff --git a/util/strings/strings.go b/util/strings/strings.go index 0835556..5f620b5 100644 --- a/util/strings/strings.go +++ b/util/strings/strings.go @@ -75,6 +75,10 @@ func IsURL(s string) bool { // RemoveDuplicates keeps only unique strings in the source. func RemoveDuplicates(strings []string) []string { + if strings == nil { + return nil + } + check := make(map[string]bool) res := make([]string, 0) for _, val := range strings { diff --git a/util/test/assert/assert.go b/util/test/assert/assert.go index 965ec3a..92004bc 100644 --- a/util/test/assert/assert.go +++ b/util/test/assert/assert.go @@ -40,6 +40,15 @@ func Equal(t *testing.T, actual, expected interface{}) { } } +func NotEqual(t *testing.T, actual, other interface{}) { + if reflect.DeepEqual(actual, other) || cmp.Equal(actual, other) { + t.Errorf("Received (type %v):\n% #v", reflect.TypeOf(actual), pretty.Formatter(actual)) + t.Errorf("\n---\n") + t.Errorf("Expected to be different from (type %v):\n% #v", reflect.TypeOf(other), pretty.Formatter(other)) + t.Errorf("\n---\n") + } +} + func toJSON(t *testing.T, obj interface{}) string { json, err := json.Marshal(obj) // json, err := json.MarshalIndent(obj, "", " ")