Add go dependencies for mdtoc
This commit is contained in:
parent
cb33c4ed26
commit
36959a4878
63 changed files with 12675 additions and 0 deletions
184
vendor/github.com/mmarkdown/mmark/mparser/bibliography.go
generated
vendored
Normal file
184
vendor/github.com/mmarkdown/mmark/mparser/bibliography.go
generated
vendored
Normal file
|
|
@ -0,0 +1,184 @@
|
|||
package mparser
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"encoding/xml"
|
||||
"log"
|
||||
|
||||
"github.com/gomarkdown/markdown/ast"
|
||||
"github.com/mmarkdown/mmark/mast"
|
||||
"github.com/mmarkdown/mmark/mast/reference"
|
||||
)
|
||||
|
||||
// CitationToBibliography walks the AST and gets all the citations on HTML blocks and groups them into
|
||||
// normative and informative references.
|
||||
func CitationToBibliography(doc ast.Node) (normative ast.Node, informative ast.Node) {
|
||||
seen := map[string]*mast.BibliographyItem{}
|
||||
raw := map[string][]byte{}
|
||||
|
||||
// Gather all citations.
|
||||
// Gather all reference HTML Blocks to see if we have XML we can output.
|
||||
ast.WalkFunc(doc, func(node ast.Node, entering bool) ast.WalkStatus {
|
||||
switch c := node.(type) {
|
||||
case *ast.Citation:
|
||||
for i, d := range c.Destination {
|
||||
if _, ok := seen[string(bytes.ToLower(d))]; ok {
|
||||
continue
|
||||
}
|
||||
ref := &mast.BibliographyItem{}
|
||||
ref.Anchor = d
|
||||
ref.Type = c.Type[i]
|
||||
|
||||
seen[string(d)] = ref
|
||||
}
|
||||
case *ast.HTMLBlock:
|
||||
anchor := anchorFromReference(c.Literal)
|
||||
if anchor != nil {
|
||||
raw[string(bytes.ToLower(anchor))] = c.Literal
|
||||
}
|
||||
}
|
||||
return ast.GoToNext
|
||||
})
|
||||
|
||||
for _, r := range seen {
|
||||
// If we have a reference anchor and the raw XML add that here.
|
||||
if raw, ok := raw[string(bytes.ToLower(r.Anchor))]; ok {
|
||||
var x reference.Reference
|
||||
if e := xml.Unmarshal(raw, &x); e != nil {
|
||||
log.Printf("Failed to unmarshal reference: %q: %s", r.Anchor, e)
|
||||
continue
|
||||
}
|
||||
r.Reference = &x
|
||||
}
|
||||
|
||||
switch r.Type {
|
||||
case ast.CitationTypeInformative:
|
||||
if informative == nil {
|
||||
informative = &mast.Bibliography{Type: ast.CitationTypeInformative}
|
||||
}
|
||||
|
||||
ast.AppendChild(informative, r)
|
||||
case ast.CitationTypeSuppressed:
|
||||
fallthrough
|
||||
case ast.CitationTypeNormative:
|
||||
if normative == nil {
|
||||
normative = &mast.Bibliography{Type: ast.CitationTypeNormative}
|
||||
}
|
||||
ast.AppendChild(normative, r)
|
||||
}
|
||||
}
|
||||
return normative, informative
|
||||
}
|
||||
|
||||
// NodeBackMatter is the place where we should inject the bibliography
|
||||
func NodeBackMatter(doc ast.Node) ast.Node {
|
||||
var matter ast.Node
|
||||
|
||||
ast.WalkFunc(doc, func(node ast.Node, entering bool) ast.WalkStatus {
|
||||
if mat, ok := node.(*ast.DocumentMatter); ok {
|
||||
if mat.Matter == ast.DocumentMatterBack {
|
||||
matter = mat
|
||||
return ast.Terminate
|
||||
}
|
||||
}
|
||||
return ast.GoToNext
|
||||
})
|
||||
return matter
|
||||
}
|
||||
|
||||
// Parse '<reference anchor='CBR03' target=''>' and return the string after anchor= is the ID for the reference.
|
||||
func anchorFromReference(data []byte) []byte {
|
||||
if !bytes.HasPrefix(data, []byte("<reference ")) {
|
||||
return nil
|
||||
}
|
||||
|
||||
anchor := bytes.Index(data, []byte("anchor="))
|
||||
if anchor < 0 {
|
||||
return nil
|
||||
}
|
||||
|
||||
beg := anchor + 7
|
||||
if beg >= len(data) {
|
||||
return nil
|
||||
}
|
||||
|
||||
quote := data[beg]
|
||||
|
||||
i := beg + 1
|
||||
// scan for an end-of-reference marker
|
||||
for i < len(data) && data[i] != quote {
|
||||
i++
|
||||
}
|
||||
// no end-of-reference marker
|
||||
if i >= len(data) {
|
||||
return nil
|
||||
}
|
||||
return data[beg+1 : i]
|
||||
}
|
||||
|
||||
// ReferenceHook is the hook used to parse reference nodes.
|
||||
func ReferenceHook(data []byte) (ast.Node, []byte, int) {
|
||||
ref, ok := IsReference(data)
|
||||
if !ok {
|
||||
return nil, nil, 0
|
||||
}
|
||||
|
||||
node := &ast.HTMLBlock{}
|
||||
node.Literal = fmtReference(ref)
|
||||
return node, nil, len(ref)
|
||||
}
|
||||
|
||||
// IfReference returns wether data contains a reference.
|
||||
func IsReference(data []byte) ([]byte, bool) {
|
||||
if !bytes.HasPrefix(data, []byte("<reference ")) {
|
||||
return nil, false
|
||||
}
|
||||
|
||||
i := 12
|
||||
// scan for an end-of-reference marker, across lines if necessary
|
||||
for i < len(data) &&
|
||||
!(data[i-12] == '<' && data[i-11] == '/' && data[i-10] == 'r' && data[i-9] == 'e' && data[i-8] == 'f' &&
|
||||
data[i-7] == 'e' && data[i-6] == 'r' && data[i-5] == 'e' &&
|
||||
data[i-4] == 'n' && data[i-3] == 'c' && data[i-2] == 'e' &&
|
||||
data[i-1] == '>') {
|
||||
i++
|
||||
}
|
||||
|
||||
// no end-of-reference marker
|
||||
if i > len(data) {
|
||||
return nil, false
|
||||
}
|
||||
return data[:i], true
|
||||
}
|
||||
|
||||
func fmtReference(data []byte) []byte {
|
||||
var x reference.Reference
|
||||
if e := xml.Unmarshal(data, &x); e != nil {
|
||||
return data
|
||||
}
|
||||
|
||||
out, e := xml.MarshalIndent(x, "", " ")
|
||||
if e != nil {
|
||||
return data
|
||||
}
|
||||
return out
|
||||
}
|
||||
|
||||
// AddBibliography adds the bibliography to the document. It will be
|
||||
// added just after the backmatter node. If that node can't be found this
|
||||
// function returns false and does nothing.
|
||||
func AddBibliography(doc ast.Node) bool {
|
||||
where := NodeBackMatter(doc)
|
||||
if where == nil {
|
||||
return false
|
||||
}
|
||||
|
||||
norm, inform := CitationToBibliography(doc)
|
||||
if norm != nil {
|
||||
ast.AppendChild(where, norm)
|
||||
}
|
||||
if inform != nil {
|
||||
ast.AppendChild(where, inform)
|
||||
}
|
||||
return (norm != nil) || (inform != nil)
|
||||
}
|
||||
11
vendor/github.com/mmarkdown/mmark/mparser/extensions.go
generated
vendored
Normal file
11
vendor/github.com/mmarkdown/mmark/mparser/extensions.go
generated
vendored
Normal file
|
|
@ -0,0 +1,11 @@
|
|||
package mparser
|
||||
|
||||
import (
|
||||
"github.com/gomarkdown/markdown/parser"
|
||||
)
|
||||
|
||||
// Extensions is the default set of extensions mmark requires.
|
||||
var Extensions = parser.Tables | parser.FencedCode | parser.Autolink | parser.Strikethrough |
|
||||
parser.SpaceHeadings | parser.HeadingIDs | parser.BackslashLineBreak | parser.SuperSubscript |
|
||||
parser.DefinitionLists | parser.MathJax | parser.AutoHeadingIDs | parser.Footnotes |
|
||||
parser.Strikethrough | parser.OrderedListStart | parser.Attributes | parser.Mmark | parser.Includes
|
||||
59
vendor/github.com/mmarkdown/mmark/mparser/hook.go
generated
vendored
Normal file
59
vendor/github.com/mmarkdown/mmark/mparser/hook.go
generated
vendored
Normal file
|
|
@ -0,0 +1,59 @@
|
|||
package mparser
|
||||
|
||||
import (
|
||||
"io/ioutil"
|
||||
"log"
|
||||
"path/filepath"
|
||||
|
||||
"github.com/gomarkdown/markdown/ast"
|
||||
"github.com/gomarkdown/markdown/parser"
|
||||
)
|
||||
|
||||
var UnsafeInclude parser.Flags = 1 << 3
|
||||
|
||||
// Hook will call both TitleHook and ReferenceHook.
|
||||
func Hook(data []byte) (ast.Node, []byte, int) {
|
||||
n, b, i := TitleHook(data)
|
||||
if n != nil {
|
||||
return n, b, i
|
||||
}
|
||||
|
||||
return ReferenceHook(data)
|
||||
}
|
||||
|
||||
// ReadInclude is the hook to read includes.
|
||||
// Its supports the following options for address.
|
||||
//
|
||||
// 4,5 - line numbers separated by commas
|
||||
// N, - line numbers, end not specified, read until the end.
|
||||
// /start/,/end/ - regexp separated by commas
|
||||
// optional a prefix="" string.
|
||||
func (i Initial) ReadInclude(from, file string, address []byte) []byte {
|
||||
path := i.path(from, file)
|
||||
|
||||
if i.Flags&UnsafeInclude == 0 {
|
||||
if ok := i.pathAllowed(path); !ok {
|
||||
log.Printf("Failure to read: %q: path is not on or below %q", path, i.i)
|
||||
return nil
|
||||
}
|
||||
}
|
||||
|
||||
data, err := ioutil.ReadFile(path)
|
||||
if err != nil {
|
||||
log.Printf("Failure to read: %q (from %q)", err, filepath.Join(from, "*"))
|
||||
return nil
|
||||
}
|
||||
|
||||
data, err = parseAddress(address, data)
|
||||
if err != nil {
|
||||
log.Printf("Failure to parse address for %q: %q (from %q)", path, err, filepath.Join(from, "*"))
|
||||
return nil
|
||||
}
|
||||
if len(data) == 0 {
|
||||
return data
|
||||
}
|
||||
if data[len(data)-1] != '\n' {
|
||||
data = append(data, '\n')
|
||||
}
|
||||
return data
|
||||
}
|
||||
251
vendor/github.com/mmarkdown/mmark/mparser/include.go
generated
vendored
Normal file
251
vendor/github.com/mmarkdown/mmark/mparser/include.go
generated
vendored
Normal file
|
|
@ -0,0 +1,251 @@
|
|||
// Copyright 2012 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
// Adapted for mmark, by Miek Gieben, 2015.
|
||||
// Adapted for mmark2 (fastly simplified and features removed), 2018.
|
||||
|
||||
package mparser
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"errors"
|
||||
"fmt"
|
||||
"os"
|
||||
"path"
|
||||
"path/filepath"
|
||||
"regexp"
|
||||
"strconv"
|
||||
"strings"
|
||||
|
||||
"github.com/gomarkdown/markdown/parser"
|
||||
)
|
||||
|
||||
// Initial is the initial file we are working on, empty for stdin and adjusted is we we have an absolute or relative file.
|
||||
type Initial struct {
|
||||
Flags parser.Flags
|
||||
i string
|
||||
}
|
||||
|
||||
// NewInitial returns an initialized Initial.
|
||||
func NewInitial(s string) Initial {
|
||||
if path.IsAbs(s) {
|
||||
return Initial{i: path.Dir(s)}
|
||||
}
|
||||
|
||||
cwd, _ := os.Getwd()
|
||||
if s == "" {
|
||||
return Initial{i: cwd}
|
||||
}
|
||||
return Initial{i: path.Dir(filepath.Join(cwd, s))}
|
||||
}
|
||||
|
||||
// path returns the full path we should use according to from, file and initial.
|
||||
func (i Initial) path(from, file string) string {
|
||||
if path.IsAbs(file) {
|
||||
return file
|
||||
}
|
||||
if path.IsAbs(from) {
|
||||
filepath.Join(from, file)
|
||||
}
|
||||
|
||||
f1 := filepath.Join(i.i, from)
|
||||
|
||||
return filepath.Join(f1, file)
|
||||
}
|
||||
|
||||
// pathAllowed returns true is file is on the same level or below the initial file.
|
||||
func (i Initial) pathAllowed(file string) bool {
|
||||
x, err := filepath.Rel(i.i, file)
|
||||
if err != nil {
|
||||
return false
|
||||
}
|
||||
return !strings.Contains(x, "..")
|
||||
}
|
||||
|
||||
// parseAddress parses a code address directive and returns the bytes or an error.
|
||||
func parseAddress(addr []byte, data []byte) ([]byte, error) {
|
||||
bytes.TrimSpace(addr)
|
||||
|
||||
if len(addr) == 0 {
|
||||
return data, nil
|
||||
}
|
||||
|
||||
// check for prefix, either as ;prefix, prefix; or just standalone prefix.
|
||||
var prefix []byte
|
||||
if x := bytes.Index(addr, []byte("prefix=")); x >= 0 {
|
||||
if x+1 > len(addr) {
|
||||
return nil, fmt.Errorf("invalid prefix in address specification: %s", addr)
|
||||
}
|
||||
start := x + len("prefix=")
|
||||
quote := addr[start]
|
||||
if quote != '\'' && quote != '"' {
|
||||
return nil, fmt.Errorf("invalid prefix in address specification: %s", addr)
|
||||
}
|
||||
|
||||
end := SkipUntilChar(addr, start+1, quote)
|
||||
prefix = addr[start+1 : end]
|
||||
if len(prefix) == 0 {
|
||||
return nil, fmt.Errorf("invalid prefix in address specification: %s", addr)
|
||||
}
|
||||
|
||||
addr = append(addr[:x], addr[end+1:]...)
|
||||
addr = bytes.Replace(addr, []byte(";"), []byte(""), 1)
|
||||
if len(addr) == 0 {
|
||||
data = addPrefix(data, prefix)
|
||||
return data, nil
|
||||
}
|
||||
}
|
||||
|
||||
lo, hi, err := addrToByteRange(addr, data)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// Acme pattern matches can stop mid-line,
|
||||
// so run to end of line in both directions if not at line start/end.
|
||||
for lo > 0 && data[lo-1] != '\n' {
|
||||
lo--
|
||||
}
|
||||
if hi > 0 {
|
||||
for hi < len(data) && data[hi-1] != '\n' {
|
||||
hi++
|
||||
}
|
||||
}
|
||||
|
||||
data = data[lo:hi]
|
||||
if prefix != nil {
|
||||
data = addPrefix(data, prefix)
|
||||
}
|
||||
return data, nil
|
||||
}
|
||||
|
||||
// addrToByteRange evaluates the given address. It returns the start and end index of the data we should return.
|
||||
// Supported syntax: N, M or /start/, /end/ .
|
||||
func addrToByteRange(addr, data []byte) (lo, hi int, err error) {
|
||||
chunk := bytes.Split(addr, []byte(","))
|
||||
if len(chunk) != 2 {
|
||||
return 0, 0, fmt.Errorf("invalid address specification: %s", addr)
|
||||
}
|
||||
left := bytes.TrimSpace(chunk[0])
|
||||
right := bytes.TrimSpace(chunk[1])
|
||||
|
||||
if len(left) == 0 {
|
||||
return 0, 0, fmt.Errorf("invalid address specification: %s", addr)
|
||||
}
|
||||
if len(right) == 0 {
|
||||
// open ended right term
|
||||
}
|
||||
|
||||
if left[0] == '/' { //regular expression
|
||||
if left[len(left)-1] != '/' {
|
||||
return 0, 0, fmt.Errorf("invalid address specification: %s", addr)
|
||||
}
|
||||
if right[0] != '/' {
|
||||
return 0, 0, fmt.Errorf("invalid address specification: %s", addr)
|
||||
}
|
||||
if right[len(right)-1] != '/' {
|
||||
return 0, 0, fmt.Errorf("invalid address specification: %s", addr)
|
||||
}
|
||||
|
||||
lo, hi, err = addrRegexp(data, string(left[1:len(left)-1]), string(right[1:len(right)-1]))
|
||||
if err != nil {
|
||||
return 0, 0, err
|
||||
}
|
||||
} else {
|
||||
lo, err = strconv.Atoi(string(left))
|
||||
if err != nil {
|
||||
return 0, 0, err
|
||||
}
|
||||
i, j := 0, 0
|
||||
for i < len(data) {
|
||||
if data[i] == '\n' {
|
||||
j++
|
||||
if j >= lo {
|
||||
break
|
||||
}
|
||||
}
|
||||
i++
|
||||
}
|
||||
lo = i
|
||||
|
||||
if len(right) == 0 {
|
||||
hi = len(data)
|
||||
goto End
|
||||
}
|
||||
|
||||
hi, err = strconv.Atoi(string(right))
|
||||
if err != nil {
|
||||
return 0, 0, err
|
||||
}
|
||||
i, j = 0, 0
|
||||
for i < len(data) {
|
||||
if data[i] == '\n' {
|
||||
j++
|
||||
if j+1 >= hi {
|
||||
break
|
||||
}
|
||||
}
|
||||
i++
|
||||
}
|
||||
hi = i
|
||||
}
|
||||
|
||||
End:
|
||||
if lo > hi {
|
||||
return 0, 0, fmt.Errorf("invalid address specification: %s", addr)
|
||||
}
|
||||
|
||||
return lo, hi, nil
|
||||
}
|
||||
|
||||
// addrRegexp searches for pattern start and pattern end
|
||||
func addrRegexp(data []byte, start, end string) (int, int, error) {
|
||||
start = "(?m:" + start + ")" // match through newlines
|
||||
reStart, err := regexp.Compile(start)
|
||||
if err != nil {
|
||||
return 0, 0, err
|
||||
}
|
||||
|
||||
end = "(?m:" + end + ")"
|
||||
reEnd, err := regexp.Compile(end)
|
||||
if err != nil {
|
||||
return 0, 0, err
|
||||
}
|
||||
m := reStart.FindIndex(data)
|
||||
if len(m) == 0 {
|
||||
return 0, 0, errors.New("no match for " + start)
|
||||
}
|
||||
lo := m[0]
|
||||
|
||||
m = reEnd.FindIndex(data[lo:]) // start *from* lo
|
||||
if len(m) == 0 {
|
||||
return 0, 0, errors.New("no match for " + end)
|
||||
}
|
||||
hi := m[0]
|
||||
|
||||
return lo, hi, nil
|
||||
}
|
||||
|
||||
func SkipUntilChar(data []byte, i int, c byte) int {
|
||||
n := len(data)
|
||||
for i < n && data[i] != c {
|
||||
i++
|
||||
}
|
||||
return i
|
||||
}
|
||||
|
||||
func addPrefix(data, prefix []byte) []byte {
|
||||
b := &bytes.Buffer{}
|
||||
b.Write(prefix)
|
||||
// assured that data ends in newline
|
||||
i := 0
|
||||
for i < len(data)-1 {
|
||||
b.WriteByte(data[i])
|
||||
if data[i] == '\n' {
|
||||
b.Write(prefix)
|
||||
}
|
||||
i++
|
||||
}
|
||||
return b.Bytes()
|
||||
}
|
||||
111
vendor/github.com/mmarkdown/mmark/mparser/index.go
generated
vendored
Normal file
111
vendor/github.com/mmarkdown/mmark/mparser/index.go
generated
vendored
Normal file
|
|
@ -0,0 +1,111 @@
|
|||
package mparser
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"fmt"
|
||||
"sort"
|
||||
|
||||
"github.com/gomarkdown/markdown/ast"
|
||||
"github.com/mmarkdown/mmark/mast"
|
||||
)
|
||||
|
||||
// IndexToDocumentIndex crawls the entire doc searching for indices, it will then return
|
||||
// an mast.DocumentIndex that contains a tree:
|
||||
//
|
||||
// IndexLetter
|
||||
// - IndexItem
|
||||
// - IndexLink
|
||||
// - IndexSubItem
|
||||
// - IndexLink
|
||||
// - IndexLink
|
||||
//
|
||||
// Which can then be rendered by the renderer.
|
||||
func IndexToDocumentIndex(doc ast.Node) *mast.DocumentIndex {
|
||||
main := map[string]*mast.IndexItem{}
|
||||
subitem := map[string][]*mast.IndexSubItem{} // gather these so we can add them in one swoop at the end
|
||||
|
||||
// Gather all indexes.
|
||||
ast.WalkFunc(doc, func(node ast.Node, entering bool) ast.WalkStatus {
|
||||
switch i := node.(type) {
|
||||
case *ast.Index:
|
||||
item := string(i.Item)
|
||||
|
||||
if _, ok := main[item]; !ok {
|
||||
main[item] = &mast.IndexItem{Index: i}
|
||||
}
|
||||
// only the main item
|
||||
if i.Subitem == nil {
|
||||
ast.AppendChild(main[item], newLink(i.ID, len(main[item].GetChildren()), i.Primary))
|
||||
return ast.GoToNext
|
||||
}
|
||||
// check if we already have a child with the subitem and then just add the link
|
||||
for _, sub := range subitem[item] {
|
||||
if bytes.Compare(sub.Subitem, i.Subitem) == 0 {
|
||||
ast.AppendChild(sub, newLink(i.ID, len(sub.GetChildren()), i.Primary))
|
||||
return ast.GoToNext
|
||||
}
|
||||
}
|
||||
|
||||
sub := &mast.IndexSubItem{Index: i}
|
||||
ast.AppendChild(sub, newLink(i.ID, len(subitem[item]), i.Primary))
|
||||
subitem[item] = append(subitem[item], sub)
|
||||
}
|
||||
return ast.GoToNext
|
||||
})
|
||||
if len(main) == 0 {
|
||||
return nil
|
||||
}
|
||||
|
||||
// Now add a subitem children to the correct main item.
|
||||
for k, sub := range subitem {
|
||||
// sort sub here ideally
|
||||
for j := range sub {
|
||||
ast.AppendChild(main[k], sub[j])
|
||||
}
|
||||
}
|
||||
|
||||
keys := []string{}
|
||||
for k := range main {
|
||||
keys = append(keys, k)
|
||||
}
|
||||
sort.Strings(keys)
|
||||
|
||||
letters := []*mast.IndexLetter{}
|
||||
var prevLetter byte
|
||||
var il *mast.IndexLetter
|
||||
for _, k := range keys {
|
||||
letter := k[0]
|
||||
if letter != prevLetter {
|
||||
il = &mast.IndexLetter{}
|
||||
il.Literal = []byte{letter}
|
||||
letters = append(letters, il)
|
||||
}
|
||||
ast.AppendChild(il, main[k])
|
||||
prevLetter = letter
|
||||
}
|
||||
docIndex := &mast.DocumentIndex{}
|
||||
for i := range letters {
|
||||
ast.AppendChild(docIndex, letters[i])
|
||||
}
|
||||
|
||||
return docIndex
|
||||
}
|
||||
|
||||
func newLink(id string, number int, primary bool) *mast.IndexLink {
|
||||
link := &ast.Link{Destination: []byte(id)}
|
||||
il := &mast.IndexLink{Link: link, Primary: primary}
|
||||
il.Literal = []byte(fmt.Sprintf("%d", number))
|
||||
return il
|
||||
}
|
||||
|
||||
// AddIndex adds an index to the end of the current document. If not indices can be found
|
||||
// this returns false and no index will be added.
|
||||
func AddIndex(doc ast.Node) bool {
|
||||
idx := IndexToDocumentIndex(doc)
|
||||
if idx == nil {
|
||||
return false
|
||||
}
|
||||
|
||||
ast.AppendChild(doc, idx)
|
||||
return true
|
||||
}
|
||||
57
vendor/github.com/mmarkdown/mmark/mparser/title.go
generated
vendored
Normal file
57
vendor/github.com/mmarkdown/mmark/mparser/title.go
generated
vendored
Normal file
|
|
@ -0,0 +1,57 @@
|
|||
package mparser
|
||||
|
||||
import (
|
||||
"log"
|
||||
|
||||
"github.com/BurntSushi/toml"
|
||||
"github.com/gomarkdown/markdown/ast"
|
||||
"github.com/mmarkdown/mmark/mast"
|
||||
)
|
||||
|
||||
// TitleHook will parse a title and returns it. The start and ending can
|
||||
// be signalled with %%% or --- (the later to more inline with Hugo and other markdown dialects.
|
||||
func TitleHook(data []byte) (ast.Node, []byte, int) {
|
||||
i := 0
|
||||
if len(data) < 3 {
|
||||
return nil, nil, 0
|
||||
}
|
||||
|
||||
c := data[i] // first char can either be % or -
|
||||
if c != '%' && c != '-' {
|
||||
return nil, nil, 0
|
||||
}
|
||||
|
||||
if data[i] != c || data[i+1] != c || data[i+2] != c {
|
||||
return nil, nil, 0
|
||||
}
|
||||
|
||||
i += 3
|
||||
beg := i
|
||||
found := false
|
||||
// search for end.
|
||||
for i < len(data) {
|
||||
if data[i] == c && data[i+1] == c && data[i+2] == c {
|
||||
found = true
|
||||
break
|
||||
}
|
||||
i++
|
||||
}
|
||||
if !found {
|
||||
return nil, nil, 0
|
||||
}
|
||||
|
||||
node := mast.NewTitle(c)
|
||||
buf := data[beg:i]
|
||||
|
||||
if c == '-' {
|
||||
node.Content = buf
|
||||
return node, nil, i + 3
|
||||
}
|
||||
|
||||
if _, err := toml.Decode(string(buf), node.TitleData); err != nil {
|
||||
log.Printf("Failure parsing title block: %s", err)
|
||||
}
|
||||
node.Content = buf
|
||||
|
||||
return node, nil, i + 3
|
||||
}
|
||||
Loading…
Add table
Add a link
Reference in a new issue