|
@@ -1,224 +1,261 @@
|
|
|
|
+//go:generate go-bindata -pkg static -o templates.go templates/
|
|
package static
|
|
package static
|
|
|
|
|
|
import (
|
|
import (
|
|
- "bufio"
|
|
|
|
"html/template"
|
|
"html/template"
|
|
- "io"
|
|
|
|
"io/ioutil"
|
|
"io/ioutil"
|
|
"os"
|
|
"os"
|
|
"path/filepath"
|
|
"path/filepath"
|
|
"strings"
|
|
"strings"
|
|
)
|
|
)
|
|
|
|
|
|
-var readfile = ioutil.ReadFile
|
|
|
|
|
|
+var readall = ioutil.ReadAll
|
|
|
|
+var open = os.Open
|
|
var create = os.Create
|
|
var create = os.Create
|
|
var mkdirall = os.MkdirAll
|
|
var mkdirall = os.MkdirAll
|
|
-var parseFiles = template.ParseFiles
|
|
|
|
-var walk = filepath.Walk
|
|
|
|
|
|
|
|
-type executor interface {
|
|
|
|
- Execute(io.Writer, interface{}) error
|
|
|
|
|
|
+type logger interface {
|
|
|
|
+ Info(string, ...interface{})
|
|
|
|
+ Debug(string, ...interface{})
|
|
|
|
+ Error(string, ...interface{})
|
|
}
|
|
}
|
|
|
|
|
|
type operation func([]byte) []byte
|
|
type operation func([]byte) []byte
|
|
|
|
|
|
|
|
+// This is the compiler that collects the list markdown files, a title, the
|
|
|
|
+// input and output paths, and whether to produce multiple files (web mode) or
|
|
|
|
+// to produce a single file (default, book mode).
|
|
|
|
+//
|
|
|
|
+// All public properties are not thread safe, so concurrent execution may yield
|
|
|
|
+// errors if those properties are being modified or accessed in parallel.
|
|
type Markdown struct {
|
|
type Markdown struct {
|
|
- Book bool `json:"book,omitempty"`
|
|
|
|
- Input string `json:"input,omitempty"`
|
|
|
|
- Output string `json:"output,omitempty"`
|
|
|
|
- Relative bool `json:"relative,omitempty"`
|
|
|
|
- TemplateFile string `json:"template,omitempty"`
|
|
|
|
-
|
|
|
|
- L logger
|
|
|
|
-
|
|
|
|
- version string
|
|
|
|
- pages []string
|
|
|
|
- template executor
|
|
|
|
|
|
+ Title string `json:"title,omitempty"`
|
|
|
|
+ Input string `json:"input,omitempty"`
|
|
|
|
+ Output string `json:"output,omitempty"`
|
|
|
|
+ Web bool `json:"web,omitempty"`
|
|
|
|
+ Template string `json:"template,omitempty"`
|
|
|
|
+ Version string `json:"version,omitempty"`
|
|
|
|
+ L logger `json:"-"`
|
|
|
|
+
|
|
|
|
+ err error
|
|
|
|
+ files []string
|
|
}
|
|
}
|
|
|
|
|
|
-func (g *Markdown) ior(path string) string {
|
|
|
|
- return strings.TrimSuffix(strings.Replace(path, g.Input, g.Output, 1), filepath.Ext(path)) + ".html"
|
|
|
|
|
|
+// This function helps us handle any errors encountered during processing
|
|
|
|
+// without forcing us to immediately terminate.
|
|
|
|
+//
|
|
|
|
+// It also is responsible for logging every error encountered.
|
|
|
|
+//
|
|
|
|
+// If the error is nil it ignores it, thus the last non-nil error will be
|
|
|
|
+// returned, so that the caller knows at least one failure has occurred.
|
|
|
|
+func (m *Markdown) errors(err error) {
|
|
|
|
+ if err == nil {
|
|
|
|
+ return
|
|
|
|
+ }
|
|
|
|
+ m.L.Error(err.Error())
|
|
|
|
+ m.err = err
|
|
}
|
|
}
|
|
|
|
|
|
-func (g *Markdown) depth(path string) string {
|
|
|
|
- if g.Relative {
|
|
|
|
- if rel, err := filepath.Rel(filepath.Dir(path), g.Output); err == nil {
|
|
|
|
- return rel + string(os.PathSeparator)
|
|
|
|
|
|
+// If the absolute path minus the file extension already exist then we want to
|
|
|
|
+// know so we can avoid redundant processing, overwriting files, and potential
|
|
|
|
+// race conditions.
|
|
|
|
+func (m *Markdown) matches(file string) bool {
|
|
|
|
+ for i := range m.files {
|
|
|
|
+ if strings.TrimSuffix(file, filepath.Ext(file)) == strings.TrimSuffix(m.files[i], filepath.Ext(m.files[i])) {
|
|
|
|
+ return true
|
|
}
|
|
}
|
|
}
|
|
}
|
|
- return ""
|
|
|
|
|
|
+ return false
|
|
}
|
|
}
|
|
|
|
|
|
-func (g *Markdown) walk(path string, file os.FileInfo, err error) error {
|
|
|
|
- if file != nil && file.Mode().IsRegular() && file.Size() > 0 && isMarkdown(path) {
|
|
|
|
- g.pages = append(g.pages, path)
|
|
|
|
|
|
+// This checks the extension against a list of supported extensions.
|
|
|
|
+func (m *Markdown) valid(file string) bool {
|
|
|
|
+ for i := range extensions {
|
|
|
|
+ if filepath.Ext(file) == extensions[i] {
|
|
|
|
+ return true
|
|
|
|
+ }
|
|
}
|
|
}
|
|
- return err
|
|
|
|
|
|
+ return false
|
|
}
|
|
}
|
|
|
|
|
|
-func (g *Markdown) multi(run operation) error {
|
|
|
|
- navi := make(map[string][]navigation)
|
|
|
|
- var err error
|
|
|
|
-
|
|
|
|
- for i, _ := range g.pages {
|
|
|
|
- out := g.ior(g.pages[i])
|
|
|
|
- dir := filepath.Dir(g.ior(out))
|
|
|
|
- nav := navigation{}
|
|
|
|
-
|
|
|
|
- if filepath.Dir(out) != g.Output && strings.ToLower(basename(out)) == "index" {
|
|
|
|
- nav.Title = basename(dir)
|
|
|
|
- if g.Relative {
|
|
|
|
- nav.Link = filepath.Join(strings.TrimPrefix(dir, filepath.Dir(dir)+string(os.PathSeparator)), filepath.Base(out))
|
|
|
|
- } else {
|
|
|
|
- nav.Link = strings.TrimPrefix(dir, g.Output) + string(os.PathSeparator)
|
|
|
|
- }
|
|
|
|
- dir = filepath.Dir(dir)
|
|
|
|
- } else {
|
|
|
|
- nav.Title = basename(out)
|
|
|
|
- if g.Relative {
|
|
|
|
- nav.Link = strings.TrimPrefix(out, filepath.Dir(out)+string(os.PathSeparator))
|
|
|
|
- } else {
|
|
|
|
- nav.Link = strings.TrimPrefix(out, g.Output)
|
|
|
|
- }
|
|
|
|
- }
|
|
|
|
-
|
|
|
|
- if _, ok := navi[dir]; !ok {
|
|
|
|
- navi[dir] = make([]navigation, 0)
|
|
|
|
- if ok, _ := exists(dir); !ok {
|
|
|
|
- if err = mkdirall(dir, 0770); err != nil {
|
|
|
|
- g.L.Error("failed to create path: %s, %s", dir, err)
|
|
|
|
- }
|
|
|
|
- }
|
|
|
|
- }
|
|
|
|
-
|
|
|
|
- navi[dir] = append(navi[dir], nav)
|
|
|
|
|
|
+// When walking through files we collect errors but do not return them, so that
|
|
|
|
+// the entire operation is not canceled due to a single failure.
|
|
|
|
+//
|
|
|
|
+// If there is an error, the file is a directory, the file is irregular, the
|
|
|
|
+// file does not have a markdown extension, or the file name minus its
|
|
|
|
+// extention is already in our list, then we skip that file.
|
|
|
|
+//
|
|
|
|
+// Thus the first file matched is the only file processed, which is to deal
|
|
|
|
+// with multiple valid markdown extensions for the same file basename.
|
|
|
|
+//
|
|
|
|
+// Each verified file is added to the list of files, which we will process
|
|
|
|
+// after we finish iterating all files.
|
|
|
|
+func (m *Markdown) walk(file string, f os.FileInfo, e error) error {
|
|
|
|
+ m.errors(e)
|
|
|
|
+ if e != nil || f.IsDir() || !f.Mode().IsRegular() || f.Size() == 0 || !m.valid(file) || m.matches(file) {
|
|
|
|
+ return nil
|
|
}
|
|
}
|
|
|
|
+ m.files = append(m.files, file)
|
|
|
|
+ return nil
|
|
|
|
+}
|
|
|
|
|
|
- for _, p := range g.pages {
|
|
|
|
- var markdown []byte
|
|
|
|
- if markdown, err = readfile(p); err != nil {
|
|
|
|
- g.L.Error("failed to read file: %s, %s", p, err)
|
|
|
|
- return err
|
|
|
|
- }
|
|
|
|
|
|
+// A way to abstract the process of getting a template
|
|
|
|
+func (m *Markdown) template() (*template.Template, error) {
|
|
|
|
+ if m.Template != "" {
|
|
|
|
+ return template.ParseFiles(m.Template)
|
|
|
|
+ }
|
|
|
|
+ var assetFile = "templates/book.tmpl"
|
|
|
|
+ if m.Web {
|
|
|
|
+ assetFile = "templates/web.tmpl"
|
|
|
|
+ }
|
|
|
|
+ d, e := Asset(assetFile)
|
|
|
|
+ if e != nil {
|
|
|
|
+ return nil, e
|
|
|
|
+ }
|
|
|
|
+ t := template.New("markdown")
|
|
|
|
+ return t.Parse(string(d))
|
|
|
|
+}
|
|
|
|
|
|
- out := g.ior(p)
|
|
|
|
- dir := filepath.Dir(out)
|
|
|
|
- page := page{
|
|
|
|
- Name: basename(p),
|
|
|
|
- Version: g.version,
|
|
|
|
- Nav: navi[g.Output],
|
|
|
|
- Depth: g.depth(out),
|
|
|
|
|
|
+// This operation processes each file independently, which includes passing to
|
|
|
|
+// each its own page structure.
|
|
|
|
+//
|
|
|
|
+// In the future, when buffered markdown parsers exist, this should leverage
|
|
|
|
+// concurrency, but the current implementation is bottlenecked at disk IO.
|
|
|
|
+//
|
|
|
|
+// The template is created first, using the compiled bindata by default, or the
|
|
|
|
+// supplied template file if able.
|
|
|
|
+func (m *Markdown) web(o operation) error {
|
|
|
|
+ t, e := m.template()
|
|
|
|
+ if e != nil {
|
|
|
|
+ return e
|
|
|
|
+ }
|
|
|
|
+ for i := range m.files {
|
|
|
|
+ in, e := open(m.files[i])
|
|
|
|
+ if e != nil {
|
|
|
|
+ m.errors(e)
|
|
|
|
+ continue
|
|
}
|
|
}
|
|
-
|
|
|
|
- if dir != g.Output && strings.ToLower(basename(p)) == "index" {
|
|
|
|
- toc := "\n## Table of Contents:\n\n"
|
|
|
|
- for i, _ := range navi[dir] {
|
|
|
|
- toc = toc + "- [" + navi[dir][i].Title + "](" + navi[dir][i].Link + ")\n"
|
|
|
|
- }
|
|
|
|
- g.L.Debug("table of contents for %s, %s", out, toc)
|
|
|
|
- markdown = append([]byte(toc), markdown...)
|
|
|
|
|
|
+ b, e := readall(in)
|
|
|
|
+ m.errors(in.Close())
|
|
|
|
+ if e != nil {
|
|
|
|
+ m.errors(e)
|
|
|
|
+ continue
|
|
}
|
|
}
|
|
-
|
|
|
|
- page.Content = template.HTML(run(markdown))
|
|
|
|
-
|
|
|
|
- var f *os.File
|
|
|
|
- if f, err = create(out); err != nil {
|
|
|
|
- g.L.Error("%s\n", err)
|
|
|
|
- return err
|
|
|
|
|
|
+ d := o(b)
|
|
|
|
+ m.errors(mkdirall(filepath.Dir(filepath.Join(m.Output, strings.TrimSuffix(strings.TrimPrefix(m.files[i], m.Input), filepath.Ext(m.files[i]))+".html")), os.ModePerm))
|
|
|
|
+ out, e := create(filepath.Join(m.Output, strings.TrimSuffix(strings.TrimPrefix(m.files[i], m.Input), filepath.Ext(m.files[i]))+".html"))
|
|
|
|
+ if e != nil {
|
|
|
|
+ m.errors(e)
|
|
|
|
+ continue
|
|
}
|
|
}
|
|
- defer f.Close()
|
|
|
|
|
|
|
|
- fb := bufio.NewWriter(f)
|
|
|
|
- defer fb.Flush()
|
|
|
|
-
|
|
|
|
- if err = g.template.Execute(fb, page); err != nil {
|
|
|
|
- g.L.Error("%s\n", err)
|
|
|
|
|
|
+ if e := t.Execute(out, struct {
|
|
|
|
+ Title string
|
|
|
|
+ Name string
|
|
|
|
+ Content template.HTML
|
|
|
|
+ Version string
|
|
|
|
+ }{
|
|
|
|
+ Content: template.HTML(string(d)),
|
|
|
|
+ Title: m.Title,
|
|
|
|
+ Name: strings.TrimSuffix(filepath.Base(m.files[i]), filepath.Ext(m.files[i])),
|
|
|
|
+ Version: m.Version,
|
|
|
|
+ }); e != nil {
|
|
|
|
+ m.errors(e)
|
|
}
|
|
}
|
|
|
|
+ m.errors(out.Close())
|
|
}
|
|
}
|
|
-
|
|
|
|
- return err
|
|
|
|
|
|
+ return nil
|
|
}
|
|
}
|
|
|
|
|
|
-func (g *Markdown) single(run operation) error {
|
|
|
|
- content := make([]byte, 0)
|
|
|
|
- toc := "\n"
|
|
|
|
- previous_depth := 0
|
|
|
|
- var err error
|
|
|
|
-
|
|
|
|
- for _, p := range g.pages {
|
|
|
|
- shorthand := strings.TrimPrefix(p, g.Input+string(os.PathSeparator))
|
|
|
|
- depth := strings.Count(shorthand, string(os.PathSeparator))
|
|
|
|
- if depth > previous_depth {
|
|
|
|
- toc = toc + strings.Repeat("\t", depth-1) + "- " + basename(filepath.Dir(p)) + "\n"
|
|
|
|
- }
|
|
|
|
- anchor := strings.Replace(shorthand, string(os.PathSeparator), "-", -1)
|
|
|
|
- toc = toc + strings.Repeat("\t", depth) + "- [" + basename(p) + "](#" + anchor + ")\n"
|
|
|
|
-
|
|
|
|
- var markdown []byte
|
|
|
|
- if markdown, err = readfile(p); err != nil {
|
|
|
|
- g.L.Error("failed to read file: %s (%s)", p, err)
|
|
|
|
|
|
+// This operation processes each file sequentially, and keeps the bytes for all
|
|
|
|
+// files in memory so it can write the output to a single file.
|
|
|
|
+//
|
|
|
|
+// In the future, it would be best if each file were loaded into a buffered
|
|
|
|
+// markdown parser and piped to a buffered template so that the system could
|
|
|
|
+// avoid storing all bytes in memory.
|
|
|
|
+//
|
|
|
|
+// Once every file has been loaded into a single byte array, we run it through
|
|
|
|
+// the markdown processor `operation`, and pass that into a template which then
|
|
|
|
+// pushes the output to a single file.
|
|
|
|
+func (m *Markdown) book(o operation) error {
|
|
|
|
+ t, e := m.template()
|
|
|
|
+ if e != nil {
|
|
|
|
+ return e
|
|
|
|
+ }
|
|
|
|
+ var b []byte
|
|
|
|
+ for i := range m.files {
|
|
|
|
+ in, e := open(m.files[i])
|
|
|
|
+ if e != nil {
|
|
|
|
+ m.errors(e)
|
|
continue
|
|
continue
|
|
}
|
|
}
|
|
-
|
|
|
|
- markdown = append([]byte("\n<a id='"+anchor+"'/>\n\n"), markdown...)
|
|
|
|
- markdown = append(markdown, []byte("\n[back to top](#top)\n\n")...)
|
|
|
|
- content = append(content, markdown...)
|
|
|
|
- previous_depth = depth
|
|
|
|
- }
|
|
|
|
-
|
|
|
|
- content = append([]byte(toc), content...)
|
|
|
|
-
|
|
|
|
- if ok, _ := exists(g.Output); !ok {
|
|
|
|
- if err = mkdirall(g.Output, 0770); err != nil {
|
|
|
|
- g.L.Error("failed to create path: %s (%s)", g.Output, err)
|
|
|
|
- return err
|
|
|
|
|
|
+ d, e := readall(in)
|
|
|
|
+ m.errors(in.Close())
|
|
|
|
+ if e != nil {
|
|
|
|
+ m.errors(e)
|
|
|
|
+ continue
|
|
}
|
|
}
|
|
|
|
+ b = append(b, d...)
|
|
}
|
|
}
|
|
-
|
|
|
|
- page := page{
|
|
|
|
- Version: g.version,
|
|
|
|
- Content: template.HTML(run(content)),
|
|
|
|
|
|
+ d := o(b)
|
|
|
|
+ m.errors(mkdirall(filepath.Dir(m.Output), os.ModePerm))
|
|
|
|
+ out, e := create(m.Output)
|
|
|
|
+ if e != nil {
|
|
|
|
+ return e
|
|
}
|
|
}
|
|
- out := filepath.Join(g.Output, "index.html")
|
|
|
|
-
|
|
|
|
- var f *os.File
|
|
|
|
- if f, err = create(out); err != nil {
|
|
|
|
- g.L.Error("%s\n", err)
|
|
|
|
- return err
|
|
|
|
- }
|
|
|
|
- defer f.Close()
|
|
|
|
-
|
|
|
|
- fb := bufio.NewWriter(f)
|
|
|
|
- defer fb.Flush()
|
|
|
|
-
|
|
|
|
- if err = g.template.Execute(fb, page); err != nil {
|
|
|
|
- g.L.Error("%s\n", err)
|
|
|
|
- }
|
|
|
|
-
|
|
|
|
- return err
|
|
|
|
|
|
+ defer out.Close()
|
|
|
|
+ return t.Execute(out, struct {
|
|
|
|
+ Title string
|
|
|
|
+ Content template.HTML
|
|
|
|
+ Version string
|
|
|
|
+ }{
|
|
|
|
+ Content: template.HTML(string(d)),
|
|
|
|
+ Title: m.Title,
|
|
|
|
+ Version: m.Version,
|
|
|
|
+ })
|
|
}
|
|
}
|
|
|
|
|
|
-func (g *Markdown) Generate(run operation) error {
|
|
|
|
- var err error
|
|
|
|
- if g.template, err = parseFiles(g.TemplateFile); err != nil {
|
|
|
|
- g.L.Error("%s\n", err)
|
|
|
|
- return err
|
|
|
|
|
|
+// The primary function, which accepts the operation used to convert markdown
|
|
|
|
+// into html. Unfortunately there are currently no markdown parsers that
|
|
|
|
+// operate on a stream, but in the future I would like to switch to an
|
|
|
|
+// io.Reader interface.
|
|
|
|
+//
|
|
|
|
+// The operation begins by capturing the input path so that we can translate
|
|
|
|
+// the output path when creating files from the input path, including matching
|
|
|
|
+// directories.
|
|
|
|
+//
|
|
|
|
+// If no title has been supplied it will default to the parent directories
|
|
|
|
+// name, but this might be better placed in package main.
|
|
|
|
+//
|
|
|
|
+// The default output for web is `public/`, otherwise when in book mode the
|
|
|
|
+// default is the title.
|
|
|
|
+//
|
|
|
|
+// We walk the input path, which assembles the list of markdown files and then
|
|
|
|
+// we gather any errors returned.
|
|
|
|
+//
|
|
|
|
+// Finally we process the files according to the desired output mode.
|
|
|
|
+func (m *Markdown) Run(o operation) error {
|
|
|
|
+ var e error
|
|
|
|
+ if m.Input == "" {
|
|
|
|
+ if m.Input, e = os.Getwd(); e != nil {
|
|
|
|
+ m.errors(e)
|
|
|
|
+ return e
|
|
|
|
+ }
|
|
}
|
|
}
|
|
-
|
|
|
|
- g.version = version(g.Input)
|
|
|
|
- g.Input, _ = filepath.Abs(g.Input)
|
|
|
|
- g.Output, _ = filepath.Abs(g.Output)
|
|
|
|
- g.Input = filepath.Clean(g.Input)
|
|
|
|
- g.Output = filepath.Clean(g.Output)
|
|
|
|
-
|
|
|
|
- if err := walk(g.Input, g.walk); err != nil {
|
|
|
|
- g.L.Error("%s\n", err)
|
|
|
|
- return err
|
|
|
|
|
|
+ if m.Title == "" {
|
|
|
|
+ m.Title = filepath.Base(filepath.Dir(m.Input))
|
|
}
|
|
}
|
|
- g.L.Debug("Markdown state: %+v", g)
|
|
|
|
-
|
|
|
|
- if g.Book {
|
|
|
|
- return g.single(run)
|
|
|
|
|
|
+ if m.Web && m.Output == "" {
|
|
|
|
+ m.Output = filepath.Join(m.Input, "public")
|
|
|
|
+ } else if m.Output == "" {
|
|
|
|
+ m.Output = filepath.Join(m.Input, m.Title+".html")
|
|
|
|
+ }
|
|
|
|
+ m.errors(filepath.Walk(m.Input, m.walk))
|
|
|
|
+ m.L.Debug("Status: %#v", m)
|
|
|
|
+ if m.Web {
|
|
|
|
+ m.errors(m.web(o))
|
|
|
|
+ } else {
|
|
|
|
+ m.errors(m.book(o))
|
|
}
|
|
}
|
|
- return g.multi(run)
|
|
|
|
|
|
+ return m.err
|
|
}
|
|
}
|