2020-11-20 18:07:38 +01:00
|
|
|
package main
|
|
|
|
|
|
|
|
import (
|
2020-11-22 21:14:50 +01:00
|
|
|
"bytes"
|
2021-04-21 20:39:13 +02:00
|
|
|
"fmt"
|
2021-04-21 03:28:29 +02:00
|
|
|
"io"
|
2020-11-20 18:07:38 +01:00
|
|
|
"io/ioutil"
|
2021-04-21 03:28:29 +02:00
|
|
|
"log"
|
2020-11-20 18:07:38 +01:00
|
|
|
"os"
|
2021-04-21 03:28:29 +02:00
|
|
|
"os/exec"
|
2020-11-20 18:07:38 +01:00
|
|
|
pathpkg "path"
|
|
|
|
"sort"
|
|
|
|
"strings"
|
|
|
|
"time"
|
2021-05-09 22:14:50 +02:00
|
|
|
|
|
|
|
"gopkg.in/yaml.v3"
|
2020-11-20 18:07:38 +01:00
|
|
|
)
|
|
|
|
|
|
|
|
// Dir represents a directory.
|
|
|
|
type Dir struct {
|
2021-05-09 22:14:50 +02:00
|
|
|
Path string
|
|
|
|
Pages []*Page
|
|
|
|
Dirs []*Dir
|
|
|
|
index *Page // The index page.
|
|
|
|
feed []byte // Atom feed.
|
|
|
|
}
|
|
|
|
|
|
|
|
// Page represents a page.
|
|
|
|
type Page struct {
|
2021-05-12 22:21:06 +02:00
|
|
|
Title string
|
|
|
|
Date time.Time
|
|
|
|
Weight int
|
|
|
|
Path string `yaml:"-"`
|
|
|
|
FilePath string `yaml:"-"`
|
|
|
|
Content string `yaml:"-"`
|
|
|
|
Params map[string]string
|
|
|
|
Prev *Page `yaml:"-"`
|
|
|
|
Next *Page `yaml:"-"`
|
2020-11-20 18:07:38 +01:00
|
|
|
}
|
|
|
|
|
2021-05-10 16:35:54 +02:00
|
|
|
// read reads from a directory and indexes the files and directories within it.
|
2021-05-14 06:17:10 +02:00
|
|
|
func (d *Dir) read(srcDir string, task *Task, cfg *Site) error {
|
2021-05-10 17:06:55 +02:00
|
|
|
return d._read(srcDir, "", task, cfg)
|
2021-04-21 03:28:29 +02:00
|
|
|
}
|
|
|
|
|
2021-05-14 06:17:10 +02:00
|
|
|
func (d *Dir) _read(srcDir, path string, task *Task, cfg *Site) error {
|
2020-11-20 18:07:38 +01:00
|
|
|
entries, err := ioutil.ReadDir(pathpkg.Join(srcDir, path))
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
for _, entry := range entries {
|
|
|
|
name := entry.Name()
|
|
|
|
path := pathpkg.Join(path, name)
|
|
|
|
if entry.IsDir() {
|
2021-05-12 21:20:14 +02:00
|
|
|
// Ignore directories beginning with "_"
|
|
|
|
if strings.HasPrefix(name, "_") {
|
|
|
|
continue
|
|
|
|
}
|
2020-11-20 18:07:38 +01:00
|
|
|
// Gather directory data
|
2021-05-14 06:17:10 +02:00
|
|
|
dir := &Dir{Path: "/" + path + "/"}
|
2021-05-10 17:06:55 +02:00
|
|
|
if err := dir._read(srcDir, path, task, cfg); err != nil {
|
2020-11-20 18:07:38 +01:00
|
|
|
return err
|
|
|
|
}
|
|
|
|
d.Dirs = append(d.Dirs, dir)
|
2021-05-10 18:23:27 +02:00
|
|
|
} else if ext := pathpkg.Ext(name); task.Match(ext) {
|
2021-05-12 21:20:14 +02:00
|
|
|
// Ignore pages beginning with "_" with the exception of _index pages
|
|
|
|
namePrefix := strings.TrimSuffix(name, ext)
|
|
|
|
if strings.HasPrefix(name, "_") && namePrefix != "_index" {
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
|
2020-11-20 18:07:38 +01:00
|
|
|
srcPath := pathpkg.Join(srcDir, path)
|
|
|
|
content, err := ioutil.ReadFile(srcPath)
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
2021-04-21 03:28:29 +02:00
|
|
|
|
2021-05-09 22:14:50 +02:00
|
|
|
page := &Page{}
|
|
|
|
|
|
|
|
// Try to parse the date from the page filename
|
|
|
|
const layout = "2006-01-02"
|
|
|
|
base := pathpkg.Base(path)
|
|
|
|
if len(base) >= len(layout) {
|
|
|
|
dateStr := base[:len(layout)]
|
|
|
|
if time, err := time.Parse(layout, dateStr); err == nil {
|
|
|
|
page.Date = time
|
|
|
|
// Remove the date from the path
|
|
|
|
base = base[len(layout):]
|
|
|
|
if len(base) > 0 {
|
|
|
|
// Remove a leading dash
|
|
|
|
if base[0] == '-' {
|
|
|
|
base = base[1:]
|
|
|
|
}
|
|
|
|
if len(base) > 0 {
|
|
|
|
dir := pathpkg.Dir(path)
|
|
|
|
if dir == "." {
|
|
|
|
dir = ""
|
|
|
|
}
|
|
|
|
path = pathpkg.Join(dir, base)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
// Extract frontmatter from content
|
|
|
|
frontmatter, content := extractFrontmatter(content)
|
|
|
|
if len(frontmatter) != 0 {
|
|
|
|
if err := yaml.Unmarshal(frontmatter, page); err != nil {
|
|
|
|
log.Printf("failed to parse frontmatter for %q: %v", path, err)
|
|
|
|
}
|
|
|
|
|
|
|
|
// Trim leading newlines from content
|
|
|
|
content = bytes.TrimLeft(content, "\r\n")
|
|
|
|
}
|
|
|
|
|
2021-05-10 18:23:27 +02:00
|
|
|
if cmd, ok := task.Preprocess[strings.TrimPrefix(ext, ".")]; ok {
|
|
|
|
content = process(cmd, bytes.NewReader(content))
|
2021-04-21 03:28:29 +02:00
|
|
|
}
|
2021-05-09 22:14:50 +02:00
|
|
|
page.Content = string(content)
|
2021-04-21 03:28:29 +02:00
|
|
|
|
2021-05-12 22:21:06 +02:00
|
|
|
page.FilePath = path
|
|
|
|
|
2021-05-12 21:20:14 +02:00
|
|
|
if namePrefix == "_index" {
|
2021-05-09 22:14:50 +02:00
|
|
|
page.Path = d.Path
|
|
|
|
d.index = page
|
2021-04-21 03:28:29 +02:00
|
|
|
} else {
|
2021-05-12 21:20:14 +02:00
|
|
|
if namePrefix == "index" {
|
|
|
|
path = "/" + strings.TrimSuffix(path, name)
|
|
|
|
} else {
|
|
|
|
path = "/" + strings.TrimSuffix(path, ext)
|
2021-05-12 21:29:13 +02:00
|
|
|
if task.UglyURLs {
|
|
|
|
path += task.OutputExt
|
|
|
|
} else {
|
|
|
|
path += "/"
|
|
|
|
}
|
2021-05-10 06:44:25 +02:00
|
|
|
}
|
|
|
|
page.Path = path
|
2021-05-10 17:06:55 +02:00
|
|
|
if permalink, ok := cfg.permalinks[d.Path]; ok {
|
|
|
|
var b strings.Builder
|
|
|
|
permalink.Execute(&b, page)
|
|
|
|
page.Path = b.String()
|
|
|
|
}
|
2021-05-09 22:14:50 +02:00
|
|
|
d.Pages = append(d.Pages, page)
|
2020-11-20 18:07:38 +01:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
2021-05-10 16:35:54 +02:00
|
|
|
// process processes the directory's contents.
|
2021-05-14 06:17:10 +02:00
|
|
|
func (d *Dir) process(cfg *Site, task *Task) error {
|
2021-04-21 03:28:29 +02:00
|
|
|
if task.TemplateExt != "" {
|
2021-03-21 04:17:58 +01:00
|
|
|
// Create index
|
|
|
|
if d.index != nil {
|
2021-05-09 02:27:13 +02:00
|
|
|
tmpl, ok := cfg.templates.FindTemplate(d.Path, "index"+task.TemplateExt)
|
2021-04-21 20:39:13 +02:00
|
|
|
if ok {
|
|
|
|
var b strings.Builder
|
|
|
|
if err := tmpl.Execute(&b, d); err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
d.index.Content = b.String()
|
2021-03-21 04:17:58 +01:00
|
|
|
}
|
2020-11-20 18:07:38 +01:00
|
|
|
}
|
|
|
|
|
2021-04-21 03:28:29 +02:00
|
|
|
// Process pages
|
2021-03-21 04:17:58 +01:00
|
|
|
for i := range d.Pages {
|
|
|
|
var b strings.Builder
|
2021-05-09 02:27:13 +02:00
|
|
|
tmpl, ok := cfg.templates.FindTemplate(d.Path, "page"+task.TemplateExt)
|
2021-04-21 20:39:13 +02:00
|
|
|
if ok {
|
|
|
|
if err := tmpl.Execute(&b, d.Pages[i]); err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
d.Pages[i].Content = b.String()
|
2021-03-21 04:17:58 +01:00
|
|
|
}
|
2020-11-20 18:07:38 +01:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
// Feed represents a feed.
|
|
|
|
type Feed struct {
|
|
|
|
Title string // Feed title.
|
2020-11-20 21:03:41 +01:00
|
|
|
Path string // Feed path.
|
2020-11-20 18:07:38 +01:00
|
|
|
Updated time.Time // Last updated time.
|
|
|
|
Entries []*Page // Feed entries.
|
|
|
|
}
|
|
|
|
|
|
|
|
// Create feeds
|
|
|
|
if title, ok := cfg.Feeds[d.Path]; ok {
|
2020-11-22 21:14:50 +01:00
|
|
|
var b bytes.Buffer
|
2020-11-20 18:07:38 +01:00
|
|
|
feed := &Feed{
|
|
|
|
Title: title,
|
2020-11-20 21:03:41 +01:00
|
|
|
Path: d.Path,
|
2020-11-20 18:07:38 +01:00
|
|
|
Updated: time.Now(),
|
|
|
|
Entries: d.Pages,
|
|
|
|
}
|
2021-05-09 02:27:13 +02:00
|
|
|
tmpl, ok := cfg.templates.FindTemplate(d.Path, "atom.xml")
|
2021-04-21 20:39:13 +02:00
|
|
|
if ok {
|
|
|
|
if err := tmpl.Execute(&b, feed); err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
d.feed = b.Bytes()
|
|
|
|
} else {
|
|
|
|
fmt.Printf("Warning: failed to generate feed %q: missing template \"atom.xml\"\n", title)
|
2020-11-20 18:07:38 +01:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2021-04-21 03:28:29 +02:00
|
|
|
// Process subdirectories
|
2020-11-20 18:07:38 +01:00
|
|
|
for _, d := range d.Dirs {
|
2021-05-10 16:35:54 +02:00
|
|
|
if err := d.process(cfg, task); err != nil {
|
2020-11-20 18:07:38 +01:00
|
|
|
return err
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
2021-05-10 16:35:54 +02:00
|
|
|
// write writes the directory's contents to the provided destination path.
|
|
|
|
func (d *Dir) write(dstDir string, task *Task) error {
|
2020-11-20 18:07:38 +01:00
|
|
|
dirPath := pathpkg.Join(dstDir, d.Path)
|
|
|
|
|
2020-11-20 21:03:41 +01:00
|
|
|
// Write pages
|
2021-04-21 03:28:29 +02:00
|
|
|
pages := d.Pages
|
|
|
|
if d.index != nil {
|
|
|
|
pages = append(pages, d.index)
|
2020-11-20 18:07:38 +01:00
|
|
|
}
|
2021-04-21 03:28:29 +02:00
|
|
|
for _, page := range pages {
|
2021-05-10 06:44:25 +02:00
|
|
|
path := page.Path
|
|
|
|
if !task.UglyURLs || page == d.index {
|
|
|
|
path = pathpkg.Join(path, "index"+task.OutputExt)
|
|
|
|
}
|
2021-04-21 03:28:29 +02:00
|
|
|
var content []byte
|
2021-05-10 18:23:27 +02:00
|
|
|
if cmd := task.Postprocess; cmd != "" {
|
|
|
|
content = process(cmd, strings.NewReader(page.Content))
|
2021-04-21 03:28:29 +02:00
|
|
|
} else {
|
|
|
|
content = []byte(page.Content)
|
|
|
|
}
|
2020-11-20 18:07:38 +01:00
|
|
|
|
|
|
|
dstPath := pathpkg.Join(dstDir, path)
|
2020-11-20 21:03:41 +01:00
|
|
|
dir := pathpkg.Dir(dstPath)
|
|
|
|
os.MkdirAll(dir, 0755)
|
2021-04-21 03:28:29 +02:00
|
|
|
if err := os.WriteFile(dstPath, content, 0644); err != nil {
|
2020-11-20 18:07:38 +01:00
|
|
|
return err
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2021-04-20 20:49:45 +02:00
|
|
|
// Write the atom feed
|
|
|
|
if d.feed != nil {
|
|
|
|
const path = "atom.xml"
|
2021-05-16 03:04:15 +02:00
|
|
|
dstPath := pathpkg.Join(dirPath, path)
|
2021-05-16 03:27:09 +02:00
|
|
|
os.MkdirAll(dirPath, 0755)
|
2021-04-21 03:28:29 +02:00
|
|
|
if err := os.WriteFile(dstPath, d.feed, 0644); err != nil {
|
2021-04-20 20:49:45 +02:00
|
|
|
return err
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2020-11-20 18:07:38 +01:00
|
|
|
// Write subdirectories
|
|
|
|
for _, dir := range d.Dirs {
|
2021-05-10 16:35:54 +02:00
|
|
|
dir.write(dstDir, task)
|
2020-11-20 18:07:38 +01:00
|
|
|
}
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
2021-05-12 22:35:00 +02:00
|
|
|
// sort sorts the directory's pages by weight, then date, then filepath.
|
2020-11-20 18:07:38 +01:00
|
|
|
func (d *Dir) sort() {
|
|
|
|
sort.Slice(d.Pages, func(i, j int) bool {
|
2021-05-12 22:35:00 +02:00
|
|
|
pi, pj := d.Pages[i], d.Pages[j]
|
2021-05-17 18:51:32 +02:00
|
|
|
return pi.Weight < pj.Weight || pi.Date.After(pj.Date) ||
|
2021-05-12 22:35:00 +02:00
|
|
|
pi.FilePath < pj.FilePath
|
2020-11-20 18:07:38 +01:00
|
|
|
})
|
2021-05-12 21:10:40 +02:00
|
|
|
|
|
|
|
for i := range d.Pages {
|
2021-05-12 22:20:19 +02:00
|
|
|
if i-1 >= 0 {
|
2021-05-12 21:10:40 +02:00
|
|
|
d.Pages[i].Prev = d.Pages[i-1]
|
|
|
|
}
|
|
|
|
if i+1 < len(d.Pages) {
|
|
|
|
d.Pages[i].Next = d.Pages[i+1]
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2020-11-20 18:07:38 +01:00
|
|
|
// Sort subdirectories
|
|
|
|
for _, d := range d.Dirs {
|
|
|
|
d.sort()
|
|
|
|
}
|
|
|
|
}
|
2021-04-21 03:28:29 +02:00
|
|
|
|
2021-05-10 18:23:27 +02:00
|
|
|
// process runs a process command.
|
|
|
|
func process(command string, input io.Reader) []byte {
|
2021-04-21 03:28:29 +02:00
|
|
|
split := strings.Split(command, " ")
|
|
|
|
cmd := exec.Command(split[0], split[1:]...)
|
|
|
|
cmd.Stdin = input
|
|
|
|
cmd.Stderr = os.Stderr
|
|
|
|
output, err := cmd.Output()
|
|
|
|
if err != nil {
|
|
|
|
log.Fatal(err)
|
|
|
|
}
|
|
|
|
return output
|
|
|
|
}
|
2021-05-09 22:14:50 +02:00
|
|
|
|
|
|
|
func (d *Dir) Title() string {
|
|
|
|
return d.index.Title
|
|
|
|
}
|
|
|
|
|
|
|
|
func (d *Dir) Date() time.Time {
|
|
|
|
return d.index.Date
|
|
|
|
}
|
|
|
|
|
|
|
|
func (d *Dir) Content() string {
|
|
|
|
return d.index.Content
|
|
|
|
}
|
2021-05-14 06:17:10 +02:00
|
|
|
|
|
|
|
func (d *Dir) getDir(path string) *Dir {
|
|
|
|
// XXX: This is inefficient
|
|
|
|
if d.Path == path {
|
|
|
|
return d
|
|
|
|
}
|
|
|
|
for _, dir := range d.Dirs {
|
|
|
|
if dir.Path == path {
|
|
|
|
return dir
|
|
|
|
}
|
|
|
|
}
|
|
|
|
for i := range d.Dirs {
|
|
|
|
if dir := d.Dirs[i].getDir(path); dir != nil {
|
|
|
|
return dir
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
|
|
|
func (d *Dir) getPage(path string) *Page {
|
|
|
|
// XXX: This is inefficient
|
|
|
|
for _, page := range d.Pages {
|
|
|
|
if page.FilePath == path {
|
|
|
|
return page
|
|
|
|
}
|
|
|
|
}
|
|
|
|
for _, dir := range d.Dirs {
|
|
|
|
if page := dir.getPage(path); page != nil {
|
|
|
|
return page
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return nil
|
|
|
|
}
|