feat: add static site generation functionality
This commit is contained in:
committed by
Evrard Van Espen (aider)
parent
50412cb819
commit
08c64388cf
61
main.go
Normal file
61
main.go
Normal file
@@ -0,0 +1,61 @@
|
|||||||
|
package main
|
||||||
|
|
||||||
|
import (
|
||||||
|
"log"
|
||||||
|
"slices"
|
||||||
|
)
|
||||||
|
|
||||||
|
func main() {
|
||||||
|
// main is the entry point of the application.
|
||||||
|
// It orchestrates the process of generating the static website by:
|
||||||
|
// 1. Listing all posts.
|
||||||
|
// 2. Compiling SCSS styles.
|
||||||
|
// 3. Rendering each post.
|
||||||
|
// 4. Rendering the home page.
|
||||||
|
// 5. Copying static files.
|
||||||
|
// 6. Rendering tag pages.
|
||||||
|
// 7. Copying media files.
|
||||||
|
posts, _ := listPosts()
|
||||||
|
var tags []string
|
||||||
|
postsByTag := make(map[string][]Post)
|
||||||
|
|
||||||
|
for _, p := range posts {
|
||||||
|
for _, t := range p.Tags {
|
||||||
|
if !slices.Contains(tags, t) {
|
||||||
|
tags = append(tags, t)
|
||||||
|
}
|
||||||
|
|
||||||
|
if postsByTag[t] == nil {
|
||||||
|
postsByTag[t] = []Post{}
|
||||||
|
}
|
||||||
|
|
||||||
|
postsByTag[t] = append(postsByTag[t], p)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
css, _ := compileSCSS()
|
||||||
|
|
||||||
|
log.Println(len(posts), "posts to handle")
|
||||||
|
|
||||||
|
for _, p := range posts {
|
||||||
|
_ = renderPost(p, css, tags)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Process the index.html template
|
||||||
|
if err := renderHome(posts, tags, css); err != nil {
|
||||||
|
log.Fatal("Error processing index template:", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Copy the "static" folder to the "build" folder
|
||||||
|
if err := copyDir("static/", "build"); err != nil {
|
||||||
|
log.Fatal("Error copying static files:", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, t := range tags {
|
||||||
|
renderTagPage(t, postsByTag[t], tags, css)
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := copyMedias(); err != nil {
|
||||||
|
log.Fatal("Erro copying media files:", err)
|
||||||
|
}
|
||||||
|
}
|
||||||
39
medias.go
Normal file
39
medias.go
Normal file
@@ -0,0 +1,39 @@
|
|||||||
|
package main
|
||||||
|
|
||||||
|
import (
|
||||||
|
"errors"
|
||||||
|
"io/fs"
|
||||||
|
"log"
|
||||||
|
"os"
|
||||||
|
"path/filepath"
|
||||||
|
"strings"
|
||||||
|
)
|
||||||
|
|
||||||
|
// copyMedias copies media files from the posts directory to the build/medias directory.
|
||||||
|
// It creates the build/medias directory if it doesn't exist.
|
||||||
|
// It walks through the posts directory and copies all .jpg, .jpeg, .png, and .mp4 files.
|
||||||
|
// Returns any error encountered during the process.
|
||||||
|
func copyMedias() error {
|
||||||
|
if err := os.MkdirAll("build/medias", os.ModePerm); err != nil {
|
||||||
|
log.Fatal("Error creating directory:", err)
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
filepath.WalkDir("posts/", func(s string, d fs.DirEntry, err error) error {
|
||||||
|
if filepath.Ext(s) == ".jpg" || filepath.Ext(s) == ".jpeg" || filepath.Ext(s) == ".png" || filepath.Ext(s) == ".mp4" {
|
||||||
|
newPath := strings.ReplaceAll(s, "posts/", "build/medias/")
|
||||||
|
|
||||||
|
if _, err := os.Stat(newPath); err == nil {
|
||||||
|
log.Println("Media", newPath, "already handled")
|
||||||
|
} else if errors.Is(err, os.ErrNotExist) {
|
||||||
|
err := os.Link(s, newPath)
|
||||||
|
if err != nil {
|
||||||
|
log.Fatal("Failed to handle media", s)
|
||||||
|
}
|
||||||
|
log.Println("Copyied media from", s, "to", newPath)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
})
|
||||||
|
return nil
|
||||||
|
}
|
||||||
127
parse.go
Normal file
127
parse.go
Normal file
@@ -0,0 +1,127 @@
|
|||||||
|
package main
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"log"
|
||||||
|
"os"
|
||||||
|
"path"
|
||||||
|
"path/filepath"
|
||||||
|
"sort"
|
||||||
|
"strings"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"github.com/niklasfasching/go-org/org"
|
||||||
|
)
|
||||||
|
|
||||||
|
// Post represents a blog post with metadata and content.
|
||||||
|
type Post struct {
|
||||||
|
Title string // Title of the post
|
||||||
|
Slug string // URL-friendly identifier for the post
|
||||||
|
Tags []string // Tags associated with the post
|
||||||
|
Description string // Brief description of the post
|
||||||
|
Date time.Time // Date when the post was published
|
||||||
|
DateStr string // Date when the post was published (YYYY-MM-DD)
|
||||||
|
Timestamp int64 // Unix timestamp of the publication date
|
||||||
|
Path string // File path to the original .org file
|
||||||
|
PathHtml string // URL path to the rendered HTML file
|
||||||
|
Content *org.Document // Parsed content of the post
|
||||||
|
ReadTime uint8 // Estimated reading time in minutes
|
||||||
|
Hero string // URL path to the hero image for the post
|
||||||
|
}
|
||||||
|
|
||||||
|
// listPosts reads the posts directory and returns a slice of Post structs.
|
||||||
|
// It filters out non-.org files, parses each .org file, and sorts the posts by date in descending order.
|
||||||
|
// Returns the slice of posts and any error encountered during the process.
|
||||||
|
func listPosts() ([]Post, error) {
|
||||||
|
entries, err := os.ReadDir("posts")
|
||||||
|
if err != nil {
|
||||||
|
fmt.Println("Error reading directory:", err)
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
entries = filter(entries, func(e os.DirEntry) bool { return filepath.Ext(e.Name()) == ".org" })
|
||||||
|
|
||||||
|
var posts []Post
|
||||||
|
for _, entry := range entries {
|
||||||
|
filePath := filepath.Join("posts", entry.Name())
|
||||||
|
|
||||||
|
post, err := parseOrg(filePath)
|
||||||
|
if err != nil {
|
||||||
|
log.Println("[!] Unable to parse ", filePath)
|
||||||
|
} else {
|
||||||
|
posts = append(posts, post)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
sort.Slice(posts, func(i, j int) bool {
|
||||||
|
return posts[i].Timestamp > posts[j].Timestamp
|
||||||
|
})
|
||||||
|
|
||||||
|
return posts, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// handleImages processes image and video links in the org document.
|
||||||
|
// It updates the URL of the link to point to the media directory.
|
||||||
|
// Parameters:
|
||||||
|
// - protocol: The protocol of the link (e.g., "file", "http").
|
||||||
|
// - description: The description of the link.
|
||||||
|
// - link: The URL of the link.
|
||||||
|
//
|
||||||
|
// Returns:
|
||||||
|
// - The processed link node.
|
||||||
|
func handleImages(protocol string, description []org.Node, link string) org.Node {
|
||||||
|
linked := org.RegularLink{protocol, description, link, false}
|
||||||
|
if linked.Kind() == "image" || linked.Kind() == "video" {
|
||||||
|
linked.URL = path.Join("/medias/", linked.URL)
|
||||||
|
}
|
||||||
|
return linked
|
||||||
|
}
|
||||||
|
|
||||||
|
// parseOrg parses an org file and returns a Post struct.
|
||||||
|
// It reads the file, extracts metadata, and calculates the reading time.
|
||||||
|
// Parameters:
|
||||||
|
// - filePath: The path to the org file.
|
||||||
|
//
|
||||||
|
// Returns:
|
||||||
|
// - The parsed Post struct.
|
||||||
|
// - Any error encountered during the process.
|
||||||
|
func parseOrg(filePath string) (Post, error) {
|
||||||
|
file, err := os.Open(filePath)
|
||||||
|
if err != nil {
|
||||||
|
log.Fatal("Error reading file")
|
||||||
|
return Post{}, err
|
||||||
|
}
|
||||||
|
|
||||||
|
config := org.New()
|
||||||
|
config.ResolveLink = handleImages
|
||||||
|
|
||||||
|
orgData := config.Parse(file, filePath)
|
||||||
|
|
||||||
|
title := orgData.Get("TITLE")
|
||||||
|
description := orgData.Get("DESCRIPTION")
|
||||||
|
dateStr := strings.Split(orgData.Get("DATE"), "T")[0]
|
||||||
|
slug := orgData.Get("SLUG")
|
||||||
|
tags := strings.Split(orgData.Get("TAGS"), ", ")
|
||||||
|
hero := path.Join("/medias", orgData.Get("HERO"))
|
||||||
|
|
||||||
|
date, _ := time.Parse("2006-01-02", dateStr)
|
||||||
|
ts := date.Unix()
|
||||||
|
|
||||||
|
raw, _ := os.ReadFile(filePath)
|
||||||
|
readTime := len(strings.Split(string(raw), " ")) / 200
|
||||||
|
|
||||||
|
return Post{
|
||||||
|
Title: title,
|
||||||
|
Slug: slug,
|
||||||
|
Tags: tags,
|
||||||
|
Description: description,
|
||||||
|
Date: date,
|
||||||
|
DateStr: date.Format("2006-01-02"),
|
||||||
|
Timestamp: ts,
|
||||||
|
Path: filePath,
|
||||||
|
PathHtml: "/posts/" + slug + ".html",
|
||||||
|
Content: orgData,
|
||||||
|
ReadTime: uint8(readTime),
|
||||||
|
Hero: hero,
|
||||||
|
}, nil
|
||||||
|
}
|
||||||
254
render.go
Normal file
254
render.go
Normal file
@@ -0,0 +1,254 @@
|
|||||||
|
package main
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"html/template"
|
||||||
|
"log"
|
||||||
|
"os"
|
||||||
|
"strings"
|
||||||
|
|
||||||
|
"github.com/alecthomas/chroma/v2"
|
||||||
|
"github.com/alecthomas/chroma/v2/formatters/html"
|
||||||
|
"github.com/alecthomas/chroma/v2/lexers"
|
||||||
|
"github.com/alecthomas/chroma/v2/styles"
|
||||||
|
"github.com/niklasfasching/go-org/org"
|
||||||
|
)
|
||||||
|
|
||||||
|
// renderHome renders the home page of the website.
|
||||||
|
// It processes the index template, executes it with the provided posts and tags,
|
||||||
|
// and writes the resulting HTML to the build directory.
|
||||||
|
// Parameters:
|
||||||
|
// - posts: A slice of Post structs representing the blog posts.
|
||||||
|
// - tags: A slice of strings representing the tags.
|
||||||
|
// - css: A string containing the compiled CSS styles.
|
||||||
|
// Returns:
|
||||||
|
// - An error if any step of the process fails, otherwise nil.
|
||||||
|
func renderHome(posts []Post, tags []string, css string) error {
|
||||||
|
indexTmpl, _ := template.ParseFiles("templates/parts/index.html")
|
||||||
|
var indexContentBuf strings.Builder
|
||||||
|
indexData := struct {
|
||||||
|
Posts []Post
|
||||||
|
}{
|
||||||
|
Posts: posts,
|
||||||
|
}
|
||||||
|
_ = indexTmpl.Execute(&indexContentBuf, indexData)
|
||||||
|
|
||||||
|
// Parse the index.html template
|
||||||
|
tmpl, err := template.ParseFiles("templates/layout.html", "templates/parts/header.html")
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("error parsing template: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create a buffer to hold the template output
|
||||||
|
var buf strings.Builder
|
||||||
|
|
||||||
|
// Execute the template with the necessary data
|
||||||
|
data := struct {
|
||||||
|
Css template.CSS
|
||||||
|
Content template.HTML
|
||||||
|
Hero template.HTML
|
||||||
|
Tags []string
|
||||||
|
ShowSidebar bool
|
||||||
|
}{
|
||||||
|
Css: template.CSS(css),
|
||||||
|
Content: template.HTML(indexContentBuf.String()),
|
||||||
|
Hero: template.HTML("<div id=\"hero\"></div>"),
|
||||||
|
Tags: tags,
|
||||||
|
ShowSidebar: true,
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := tmpl.Execute(&buf, data); err != nil {
|
||||||
|
return fmt.Errorf("error executing template: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create the build directory if it doesn't exist
|
||||||
|
if err := os.MkdirAll("build", os.ModePerm); err != nil {
|
||||||
|
return fmt.Errorf("error creating directory: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Write the HTML content to the index.html file in the build directory
|
||||||
|
if err := os.WriteFile("build/index.html", []byte(buf.String()), 0644); err != nil {
|
||||||
|
return fmt.Errorf("error writing HTML file: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
log.Println("Wrote build/index.html")
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// highlightCodeBlock highlights a code block using the specified language and parameters.
|
||||||
|
// It uses the chroma library to tokenize and format the code block.
|
||||||
|
// Parameters:
|
||||||
|
// - source: The source code to highlight.
|
||||||
|
// - lang: The programming language of the code.
|
||||||
|
// - inline: Whether the code block is inline or not.
|
||||||
|
// - params: Additional parameters for highlighting, such as highlighted lines.
|
||||||
|
// Returns:
|
||||||
|
// - A string containing the highlighted code block in HTML format.
|
||||||
|
func highlightCodeBlock(source, lang string, inline bool, params map[string]string) string {
|
||||||
|
var w strings.Builder
|
||||||
|
l := lexers.Get(lang)
|
||||||
|
if l == nil {
|
||||||
|
l = lexers.Fallback
|
||||||
|
}
|
||||||
|
l = chroma.Coalesce(l)
|
||||||
|
it, _ := l.Tokenise(nil, source)
|
||||||
|
options := []html.Option{}
|
||||||
|
if params[":hl_lines"] != "" {
|
||||||
|
ranges := org.ParseRanges(params[":hl_lines"])
|
||||||
|
if ranges != nil {
|
||||||
|
options = append(options, html.HighlightLines(ranges))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
_ = html.New(options...).Format(&w, styles.Get("dracula"), it)
|
||||||
|
if inline {
|
||||||
|
return `<div class="highlight-inline">` + "\n" + w.String() + "\n" + `</div>`
|
||||||
|
}
|
||||||
|
return `<div class="highlight">` + "\n" + w.String() + "\n" + `</div>`
|
||||||
|
}
|
||||||
|
|
||||||
|
// renderPost renders a single blog post to an HTML file.
|
||||||
|
// It processes the post content, applies syntax highlighting to code blocks,
|
||||||
|
// and writes the resulting HTML to the build directory.
|
||||||
|
// Parameters:
|
||||||
|
// - post: The Post struct representing the blog post.
|
||||||
|
// - css: A string containing the compiled CSS styles.
|
||||||
|
// - tags: A slice of strings representing the tags.
|
||||||
|
// Returns:
|
||||||
|
// - An error if any step of the process fails, otherwise nil.
|
||||||
|
func renderPost(post Post, css string, tags []string) error {
|
||||||
|
htmlFilePath := "build/posts/" + post.Slug + ".html"
|
||||||
|
render := func(w org.Writer) string {
|
||||||
|
out, err := post.Content.Write(w)
|
||||||
|
if err != nil {
|
||||||
|
log.Fatal(err)
|
||||||
|
}
|
||||||
|
return out
|
||||||
|
}
|
||||||
|
|
||||||
|
renderer := org.NewHTMLWriter()
|
||||||
|
renderer.HighlightCodeBlock = highlightCodeBlock
|
||||||
|
htmlContent := render(renderer)
|
||||||
|
|
||||||
|
if err := os.MkdirAll("build/posts", os.ModePerm); err != nil {
|
||||||
|
log.Fatal("Error creating directory:", err)
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
// Generate the new file path for the HTML output
|
||||||
|
tmpl, err := template.ParseFiles("templates/layout.html", "templates/parts/header.html")
|
||||||
|
if err != nil {
|
||||||
|
log.Fatal("Error parsing template:", err)
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
hero := func(post Post) template.HTML {
|
||||||
|
if post.Hero != "" {
|
||||||
|
return template.HTML(fmt.Sprintf("<img id=\"hero\" src=\"%s\"/>", post.Hero))
|
||||||
|
} else {
|
||||||
|
return template.HTML("")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create a buffer to hold the template output
|
||||||
|
var buf strings.Builder
|
||||||
|
|
||||||
|
// Execute the template with the necessary data
|
||||||
|
data := struct {
|
||||||
|
Content template.HTML
|
||||||
|
Css template.CSS
|
||||||
|
Hero template.HTML
|
||||||
|
Tags []string
|
||||||
|
ShowSidebar bool
|
||||||
|
}{
|
||||||
|
Content: template.HTML(htmlContent),
|
||||||
|
Css: template.CSS(css),
|
||||||
|
Hero: hero(post),
|
||||||
|
Tags: tags,
|
||||||
|
ShowSidebar: false,
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := tmpl.Execute(&buf, data); err != nil {
|
||||||
|
log.Fatal("Error executing template:", err)
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
// Write the HTML content to the new file
|
||||||
|
if err := os.WriteFile(htmlFilePath, []byte(buf.String()), 0644); err != nil {
|
||||||
|
log.Fatal("Error writing HTML file:", err)
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
log.Println("Wrote", htmlFilePath)
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// renderTagPage renders a tag page for a specific tag.
|
||||||
|
// It processes the tag page template, executes it with the provided tag and posts,
|
||||||
|
// and writes the resulting HTML to the build directory.
|
||||||
|
// Parameters:
|
||||||
|
// - tag: The tag for which the page is being rendered.
|
||||||
|
// - posts: A slice of Post structs representing the blog posts associated with the tag.
|
||||||
|
// - tags: A slice of strings representing all tags.
|
||||||
|
// - css: A string containing the compiled CSS styles.
|
||||||
|
// Returns:
|
||||||
|
// - An error if any step of the process fails, otherwise nil.
|
||||||
|
func renderTagPage(tag string, posts []Post, tags []string, css string) error {
|
||||||
|
htmlFilePath := "build/tags/" + tag + ".html"
|
||||||
|
|
||||||
|
if err := os.MkdirAll("build/tags", os.ModePerm); err != nil {
|
||||||
|
log.Fatal("Error creating directory:", err)
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
tagPageTmpl, _ := template.ParseFiles("templates/parts/tagPage.html")
|
||||||
|
var tagPageContentBuf strings.Builder
|
||||||
|
tagPageData := struct {
|
||||||
|
Tag string
|
||||||
|
Posts []Post
|
||||||
|
}{
|
||||||
|
Tag: tag,
|
||||||
|
Posts: posts,
|
||||||
|
}
|
||||||
|
_ = tagPageTmpl.Execute(&tagPageContentBuf, tagPageData)
|
||||||
|
|
||||||
|
// Generate the new file path for the HTML output
|
||||||
|
tmpl, err := template.ParseFiles("templates/layout.html", "templates/parts/header.html")
|
||||||
|
if err != nil {
|
||||||
|
log.Fatal("Error parsing template:", err)
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create a buffer to hold the template output
|
||||||
|
var buf strings.Builder
|
||||||
|
|
||||||
|
// Execute the template with the necessary data
|
||||||
|
data := struct {
|
||||||
|
Content template.HTML
|
||||||
|
Css template.CSS
|
||||||
|
Hero template.HTML
|
||||||
|
Tags []string
|
||||||
|
ShowSidebar bool
|
||||||
|
Tag string
|
||||||
|
}{
|
||||||
|
Content: template.HTML(tagPageContentBuf.String()),
|
||||||
|
Css: template.CSS(css),
|
||||||
|
Hero: template.HTML(""),
|
||||||
|
Tags: tags,
|
||||||
|
ShowSidebar: false,
|
||||||
|
Tag: tag,
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := tmpl.Execute(&buf, data); err != nil {
|
||||||
|
log.Fatal("Error executing template:", err)
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
// Write the HTML content to the new file
|
||||||
|
if err := os.WriteFile(htmlFilePath, []byte(buf.String()), 0644); err != nil {
|
||||||
|
log.Fatal("Error writing HTML file:", err)
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
log.Println("Wrote", htmlFilePath)
|
||||||
|
return nil
|
||||||
|
}
|
||||||
65
static.go
Normal file
65
static.go
Normal file
@@ -0,0 +1,65 @@
|
|||||||
|
package main
|
||||||
|
|
||||||
|
import (
|
||||||
|
"io"
|
||||||
|
"os"
|
||||||
|
"path/filepath"
|
||||||
|
)
|
||||||
|
|
||||||
|
// copyFile copies a file from src to dst.
|
||||||
|
// It reads the source file and writes the content to the destination file.
|
||||||
|
// Parameters:
|
||||||
|
// - src: The path to the source file.
|
||||||
|
// - dst: The path to the destination file.
|
||||||
|
//
|
||||||
|
// Returns:
|
||||||
|
// - Any error encountered during the process.
|
||||||
|
func copyFile(src, dst string) error {
|
||||||
|
sourceFile, err := os.Open(src)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
defer sourceFile.Close()
|
||||||
|
|
||||||
|
destFile, err := os.Create(dst)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
defer destFile.Close()
|
||||||
|
|
||||||
|
_, err = io.Copy(destFile, sourceFile)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// copyDir copies a directory and its contents from src to dst.
|
||||||
|
// It walks through the source directory and copies each file and subdirectory to the destination.
|
||||||
|
// Parameters:
|
||||||
|
// - src: The path to the source directory.
|
||||||
|
// - dst: The path to the destination directory.
|
||||||
|
//
|
||||||
|
// Returns:
|
||||||
|
// - Any error encountered during the process.
|
||||||
|
func copyDir(src, dst string) error {
|
||||||
|
return filepath.Walk(src, func(path string, info os.FileInfo, err error) error {
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
relPath, err := filepath.Rel(src, path)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
destPath := filepath.Join(dst, relPath)
|
||||||
|
|
||||||
|
if info.IsDir() {
|
||||||
|
return os.MkdirAll(destPath, info.Mode())
|
||||||
|
}
|
||||||
|
|
||||||
|
return copyFile(path, destPath)
|
||||||
|
})
|
||||||
|
}
|
||||||
63
styles.go
Normal file
63
styles.go
Normal file
@@ -0,0 +1,63 @@
|
|||||||
|
package main
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"log"
|
||||||
|
"os"
|
||||||
|
"path/filepath"
|
||||||
|
|
||||||
|
"github.com/bep/godartsass/v2"
|
||||||
|
)
|
||||||
|
|
||||||
|
type importResolver struct {
|
||||||
|
baseDir string
|
||||||
|
}
|
||||||
|
|
||||||
|
func (t importResolver) CanonicalizeURL(url string) (string, error) {
|
||||||
|
fullPath := filepath.Join("/", url)
|
||||||
|
return fullPath, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (t importResolver) Load(url string) (godartsass.Import, error) {
|
||||||
|
fullPath := filepath.Join(t.baseDir, url)
|
||||||
|
data, err := os.ReadFile(fullPath)
|
||||||
|
if err != nil {
|
||||||
|
return godartsass.Import{}, fmt.Errorf("cannot read %s: %v", fullPath, err)
|
||||||
|
}
|
||||||
|
|
||||||
|
return godartsass.Import{
|
||||||
|
Content: string(data),
|
||||||
|
SourceSyntax: godartsass.SourceSyntaxSCSS,
|
||||||
|
}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func compileSCSS() (string, error) {
|
||||||
|
data, _ := os.ReadFile("styles/main.scss")
|
||||||
|
|
||||||
|
args := godartsass.Args{
|
||||||
|
Source: string(data),
|
||||||
|
URL: "styles/main.scss",
|
||||||
|
IncludePaths: []string{"styles/"},
|
||||||
|
ImportResolver: importResolver{
|
||||||
|
baseDir: "styles/",
|
||||||
|
},
|
||||||
|
OutputStyle: godartsass.OutputStyleExpanded,
|
||||||
|
EnableSourceMap: false,
|
||||||
|
SourceMapIncludeSources: false,
|
||||||
|
}
|
||||||
|
|
||||||
|
transpiler, err := godartsass.Start(godartsass.Options{})
|
||||||
|
if err != nil {
|
||||||
|
log.Fatal(err)
|
||||||
|
}
|
||||||
|
|
||||||
|
css, err := transpiler.Execute(args)
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
log.Fatal(err)
|
||||||
|
}
|
||||||
|
|
||||||
|
log.Println("CSS compiled")
|
||||||
|
|
||||||
|
return css.CSS, nil
|
||||||
|
}
|
||||||
19
utils.go
Normal file
19
utils.go
Normal file
@@ -0,0 +1,19 @@
|
|||||||
|
package main
|
||||||
|
|
||||||
|
// filter filters a slice based on a predicate function.
|
||||||
|
// It returns a new slice containing only the elements for which the predicate returns true.
|
||||||
|
// Parameters:
|
||||||
|
// - s: The slice to filter.
|
||||||
|
// - predicate: The function to test each element of the slice.
|
||||||
|
//
|
||||||
|
// Returns:
|
||||||
|
// - A new slice containing the filtered elements.
|
||||||
|
func filter[T any](s []T, predicate func(T) bool) []T {
|
||||||
|
result := make([]T, 0, len(s)) // Pre-allocate for efficiency
|
||||||
|
for _, v := range s {
|
||||||
|
if predicate(v) {
|
||||||
|
result = append(result, v)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return result
|
||||||
|
}
|
||||||
Reference in New Issue
Block a user