mirror of
https://git.sr.ht/~adnano/kiln
synced 2024-11-08 14:19:20 +01:00
ecd9f72eeb
Implements: https://todo.sr.ht/~adnano/kiln/15
394 lines
8.8 KiB
Go
394 lines
8.8 KiB
Go
package main
|
|
|
|
import (
|
|
"bytes"
|
|
"fmt"
|
|
"io"
|
|
"io/fs"
|
|
"log"
|
|
"os"
|
|
"os/exec"
|
|
pathpkg "path"
|
|
"sort"
|
|
"strings"
|
|
"time"
|
|
|
|
"gopkg.in/yaml.v3"
|
|
)
|
|
|
|
// Page represents a page.
|
|
type Page struct {
|
|
Title string `yaml:"title"`
|
|
Date time.Time `yaml:"date"`
|
|
Weight int `yaml:"weight"`
|
|
Outputs []string `yaml:"outputs"`
|
|
Params map[string]interface{} `yaml:"params"`
|
|
Path string `yaml:"-"`
|
|
FilePath string `yaml:"-"`
|
|
URL string `yaml:"-"`
|
|
Content string `yaml:"-"`
|
|
Prev *Page `yaml:"-"`
|
|
Next *Page `yaml:"-"`
|
|
Pages []*Page `yaml:"-"`
|
|
Dirs []*Page `yaml:"-"`
|
|
feeds map[string][]byte
|
|
index bool
|
|
}
|
|
|
|
// read reads from a directory and indexes the files and directories within it.
|
|
func (p *Page) read(srcDir string, task *Task, cfg *Site) error {
|
|
return p._read(os.DirFS(srcDir), ".", task, cfg)
|
|
}
|
|
|
|
func (p *Page) _read(fsys fs.FS, path string, task *Task, cfg *Site) error {
|
|
entries, err := fs.ReadDir(fsys, path)
|
|
if err != nil {
|
|
return err
|
|
}
|
|
for _, entry := range entries {
|
|
name := entry.Name()
|
|
path := pathpkg.Join(path, name)
|
|
if entry.IsDir() {
|
|
// Ignore directories beginning with "_"
|
|
if strings.HasPrefix(name, "_") {
|
|
continue
|
|
}
|
|
// Gather directory data
|
|
dirPath := "/" + path + "/"
|
|
dir := &Page{
|
|
Path: dirPath,
|
|
FilePath: path,
|
|
URL: task.URL + dirPath,
|
|
}
|
|
if err := dir._read(fsys, path, task, cfg); err != nil {
|
|
return err
|
|
}
|
|
p.Dirs = append(p.Dirs, dir)
|
|
} else if ext := pathpkg.Ext(name); task.Match(ext) {
|
|
// Ignore pages beginning with "_" with the exception of _index pages
|
|
namePrefix := strings.TrimSuffix(name, ext)
|
|
if strings.HasPrefix(name, "_") && namePrefix != "_index" {
|
|
continue
|
|
}
|
|
|
|
content, err := fs.ReadFile(fsys, path)
|
|
if err != nil {
|
|
return err
|
|
}
|
|
|
|
page := &Page{
|
|
FilePath: path,
|
|
}
|
|
if namePrefix == "_index" {
|
|
p.index = true
|
|
page = p
|
|
}
|
|
|
|
// Try to parse the date from the page filename
|
|
const layout = "2006-01-02"
|
|
base := pathpkg.Base(path)
|
|
if len(base) >= len(layout) {
|
|
dateStr := base[:len(layout)]
|
|
if time, err := time.Parse(layout, dateStr); err == nil {
|
|
page.Date = time
|
|
// Remove the date from the path
|
|
base = base[len(layout):]
|
|
if len(base) > 0 {
|
|
// Remove a leading dash
|
|
if base[0] == '-' {
|
|
base = base[1:]
|
|
}
|
|
if len(base) > 0 {
|
|
dir := pathpkg.Dir(path)
|
|
if dir == "." {
|
|
dir = ""
|
|
}
|
|
path = pathpkg.Join(dir, base)
|
|
}
|
|
}
|
|
}
|
|
}
|
|
|
|
// Extract frontmatter from content
|
|
frontmatter, content := extractFrontmatter(content)
|
|
if len(frontmatter) != 0 {
|
|
if err := yaml.Unmarshal(frontmatter, page); err != nil {
|
|
log.Printf("failed to parse frontmatter for %q: %v", path, err)
|
|
} else if page.Outputs != nil {
|
|
// Enforce page outputs
|
|
if len(page.Outputs) == 0 || task.Name == "" {
|
|
continue
|
|
}
|
|
found := false
|
|
for _, output := range page.Outputs {
|
|
if output == task.Name {
|
|
found = true
|
|
break
|
|
}
|
|
}
|
|
if !found {
|
|
continue
|
|
}
|
|
}
|
|
|
|
// Trim leading newlines from content
|
|
content = bytes.TrimLeft(content, "\r\n")
|
|
}
|
|
|
|
if cmd, ok := task.preprocess[strings.TrimPrefix(ext, ".")]; ok {
|
|
var buf bytes.Buffer
|
|
if err := execute(cmd, bytes.NewReader(content), &buf); err != nil {
|
|
return err
|
|
}
|
|
content = buf.Bytes()
|
|
}
|
|
page.Content = string(content)
|
|
|
|
if !page.index {
|
|
if namePrefix == "index" {
|
|
path = "/" + strings.TrimSuffix(path, name)
|
|
} else {
|
|
path = "/" + strings.TrimSuffix(path, ext)
|
|
if task.UglyURLs {
|
|
path += task.OutputExt
|
|
} else {
|
|
path += "/"
|
|
}
|
|
}
|
|
page.Path = path
|
|
if permalink, ok := cfg.permalinks[p.Path]; ok {
|
|
var b strings.Builder
|
|
permalink.Execute(&b, page)
|
|
page.Path = b.String()
|
|
}
|
|
page.URL = task.URL + page.Path
|
|
p.Pages = append(p.Pages, page)
|
|
}
|
|
}
|
|
}
|
|
return nil
|
|
}
|
|
|
|
// process processes the directory's contents.
|
|
func (p *Page) process(cfg *Site, task *Task) error {
|
|
// Build feeds before templates are applied to the page contents
|
|
for _, feed := range task.feeds[p.FilePath] {
|
|
b, err := p.buildFeed(cfg, feed)
|
|
if err != nil {
|
|
return err
|
|
}
|
|
p.addFeed(feed.Output, b)
|
|
}
|
|
|
|
if task.TemplateExt != "" {
|
|
// Create index
|
|
if p.index {
|
|
tmpl, ok := cfg.templates.FindTemplate(p.FilePath, "index"+task.TemplateExt)
|
|
if !ok {
|
|
// Try the base template
|
|
tmpl, ok = cfg.templates.FindTemplate(p.FilePath, "base"+task.TemplateExt)
|
|
}
|
|
if ok {
|
|
var b strings.Builder
|
|
if err := tmpl.Execute(&b, p); err != nil {
|
|
return err
|
|
}
|
|
p.Content = b.String()
|
|
}
|
|
}
|
|
|
|
// Process pages
|
|
for i := range p.Pages {
|
|
var b strings.Builder
|
|
tmpl, ok := cfg.templates.FindTemplate(p.FilePath, "page"+task.TemplateExt)
|
|
if !ok {
|
|
// Try the base template
|
|
tmpl, ok = cfg.templates.FindTemplate(p.FilePath, "base"+task.TemplateExt)
|
|
}
|
|
if ok {
|
|
if err := tmpl.Execute(&b, p.Pages[i]); err != nil {
|
|
return err
|
|
}
|
|
p.Pages[i].Content = b.String()
|
|
}
|
|
}
|
|
}
|
|
|
|
// Process subdirectories
|
|
for _, d := range p.Dirs {
|
|
if err := d.process(cfg, task); err != nil {
|
|
return err
|
|
}
|
|
}
|
|
return nil
|
|
}
|
|
|
|
// buildFeed build the feed of the directory
|
|
func (p *Page) buildFeed(cfg *Site, feed Feed) ([]byte, error) {
|
|
// Feed represents a feed.
|
|
type Feed struct {
|
|
Title string
|
|
Path string
|
|
URL string
|
|
Pages []*Page
|
|
}
|
|
|
|
tmpl, ok := cfg.templates.FindTemplate(p.FilePath, feed.Template)
|
|
if !ok {
|
|
return nil, fmt.Errorf("failed to generate feed %q: missing feed template %q", feed.Title, feed.Template)
|
|
}
|
|
|
|
var b bytes.Buffer
|
|
data := Feed{
|
|
Title: feed.Title,
|
|
Path: p.Path,
|
|
URL: p.URL,
|
|
Pages: p.Pages,
|
|
}
|
|
if err := tmpl.Execute(&b, data); err != nil {
|
|
return nil, err
|
|
}
|
|
return b.Bytes(), nil
|
|
}
|
|
|
|
func (p *Page) addFeed(name string, content []byte) {
|
|
if p.feeds == nil {
|
|
p.feeds = map[string][]byte{}
|
|
}
|
|
p.feeds[name] = content
|
|
}
|
|
|
|
// write writes the directory's contents to the provided destination path.
|
|
func (p *Page) write(dstDir string, task *Task) error {
|
|
dirPath := pathpkg.Join(dstDir, p.Path)
|
|
|
|
// Write pages
|
|
for _, page := range p.Pages {
|
|
dstPath := pathpkg.Join(dstDir, page.Path)
|
|
if !task.UglyURLs {
|
|
dstPath = pathpkg.Join(dstPath, "index"+task.OutputExt)
|
|
}
|
|
if err := page.writeTo(dstPath, task); err != nil {
|
|
return err
|
|
}
|
|
}
|
|
// Write index page
|
|
if p.index {
|
|
dstPath := pathpkg.Join(dstDir, p.Path, "index"+task.OutputExt)
|
|
if err := p.writeTo(dstPath, task); err != nil {
|
|
return err
|
|
}
|
|
}
|
|
|
|
// Write feeds
|
|
for name, content := range p.feeds {
|
|
dstPath := pathpkg.Join(dstDir, name)
|
|
os.MkdirAll(dirPath, 0755)
|
|
if err := os.WriteFile(dstPath, content, 0644); err != nil {
|
|
return err
|
|
}
|
|
}
|
|
|
|
// Write subdirectories
|
|
for _, dir := range p.Dirs {
|
|
dir.write(dstDir, task)
|
|
}
|
|
return nil
|
|
}
|
|
|
|
func (p *Page) writeTo(dstPath string, task *Task) error {
|
|
var content []byte
|
|
if cmd := task.postprocess; cmd != nil {
|
|
var buf bytes.Buffer
|
|
if err := execute(cmd, strings.NewReader(p.Content), &buf); err != nil {
|
|
return err
|
|
}
|
|
content = buf.Bytes()
|
|
} else {
|
|
content = []byte(p.Content)
|
|
}
|
|
|
|
dir := pathpkg.Dir(dstPath)
|
|
os.MkdirAll(dir, 0755)
|
|
if err := os.WriteFile(dstPath, content, 0644); err != nil {
|
|
return err
|
|
}
|
|
return nil
|
|
}
|
|
|
|
// sort sorts the directory's pages by weight, then date, then filepath.
|
|
func (p *Page) sort() {
|
|
sortPages(p.Pages)
|
|
sortPages(p.Dirs)
|
|
|
|
// Sort subdirectories
|
|
for _, d := range p.Dirs {
|
|
d.sort()
|
|
}
|
|
}
|
|
|
|
func sortPages(pages []*Page) {
|
|
sort.Slice(pages, func(i, j int) bool {
|
|
pi, pj := pages[i], pages[j]
|
|
return pi.FilePath < pj.FilePath
|
|
})
|
|
|
|
sort.SliceStable(pages, func(i, j int) bool {
|
|
pi, pj := pages[i], pages[j]
|
|
return pi.Date.After(pj.Date)
|
|
})
|
|
|
|
sort.SliceStable(pages, func(i, j int) bool {
|
|
pi, pj := pages[i], pages[j]
|
|
return pi.Weight < pj.Weight
|
|
})
|
|
|
|
for i := range pages {
|
|
if i-1 >= 0 {
|
|
pages[i].Prev = pages[i-1]
|
|
}
|
|
if i+1 < len(pages) {
|
|
pages[i].Next = pages[i+1]
|
|
}
|
|
}
|
|
}
|
|
|
|
// execute runs a command.
|
|
func execute(command []string, input io.Reader, output io.Writer) error {
|
|
if len(command) == 0 {
|
|
return nil
|
|
}
|
|
cmd := exec.Command(command[0], command[1:]...)
|
|
cmd.Stdin = input
|
|
cmd.Stderr = os.Stderr
|
|
cmd.Stdout = output
|
|
err := cmd.Run()
|
|
if err != nil {
|
|
return err
|
|
}
|
|
return nil
|
|
}
|
|
|
|
func (p *Page) getPage(path string) *Page {
|
|
// XXX: This is inefficient
|
|
if p.Path == path {
|
|
return p
|
|
}
|
|
for _, page := range p.Pages {
|
|
if page.FilePath == path {
|
|
return page
|
|
}
|
|
}
|
|
for _, dir := range p.Dirs {
|
|
if dir.Path == path {
|
|
return dir
|
|
}
|
|
}
|
|
for _, dir := range p.Dirs {
|
|
if page := dir.getPage(path); page != nil {
|
|
return page
|
|
}
|
|
}
|
|
return nil
|
|
}
|