Import existing project
This commit is contained in:
parent
7887817595
commit
80b0cc4939
125 changed files with 16980 additions and 0 deletions
547
utils/minifier.go
Normal file
547
utils/minifier.go
Normal file
|
|
@ -0,0 +1,547 @@
|
|||
package utils
|
||||
|
||||
import (
|
||||
"crypto/sha512"
|
||||
"encoding/base64"
|
||||
"fmt"
|
||||
"io"
|
||||
"net/http"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"regexp"
|
||||
"strings"
|
||||
"sync"
|
||||
"time"
|
||||
|
||||
"github.com/tdewolff/minify/v2"
|
||||
"github.com/tdewolff/minify/v2/css"
|
||||
"github.com/tdewolff/minify/v2/html"
|
||||
"github.com/tdewolff/minify/v2/js"
|
||||
)
|
||||
|
||||
// MinifierOptions configuration for the asset minification process
|
||||
type MinifierOptions struct {
|
||||
// Maximum number of goroutines to use for concurrent processing
|
||||
MaxWorkers int
|
||||
// Skip files that haven't changed since last run
|
||||
SkipUnchanged bool
|
||||
// Special file mappings (source path -> destination path)
|
||||
SpecialMappings map[string]string
|
||||
// Whether to remove comments from all file types
|
||||
RemoveComments bool
|
||||
// Whether to keep conditional comments (<!--[if IE]>, etc.)
|
||||
KeepConditionalComments bool
|
||||
// Whether to keep special comments (like license comments starting with /*!*)
|
||||
KeepSpecialComments bool
|
||||
// Whether to add integrity hashes to externally loaded scripts
|
||||
AddIntegrityHashes bool
|
||||
}
|
||||
|
||||
// DefaultMinifierOptions returns the default options for asset minification
|
||||
func DefaultMinifierOptions() MinifierOptions {
|
||||
return MinifierOptions{
|
||||
MaxWorkers: 10,
|
||||
SkipUnchanged: true,
|
||||
RemoveComments: true,
|
||||
KeepConditionalComments: true,
|
||||
KeepSpecialComments: true,
|
||||
AddIntegrityHashes: true,
|
||||
SpecialMappings: map[string]string{
|
||||
"develop/static/error.html": "public/html/error.html",
|
||||
"develop/static/pow-interstitial.html": "public/html/pow-interstitial.html",
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
// WorkItem represents a file to be processed by the minifier
|
||||
type WorkItem struct {
|
||||
SourcePath string
|
||||
DestPath string
|
||||
MimeType string
|
||||
Data []byte
|
||||
}
|
||||
|
||||
// FileResult contains the result of processing a single file
|
||||
type FileResult struct {
|
||||
MimeType string
|
||||
OriginalSize int64
|
||||
MinifiedSize int64
|
||||
}
|
||||
|
||||
// TypeStats tracks statistics for a particular file type
|
||||
type TypeStats struct {
|
||||
FileCount int
|
||||
OriginalSize int64
|
||||
MinifiedSize int64
|
||||
}
|
||||
|
||||
// Helper function to download a file from a URL
|
||||
func downloadFile(url string) ([]byte, error) {
|
||||
resp, err := http.Get(url)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer resp.Body.Close()
|
||||
|
||||
if resp.StatusCode != http.StatusOK {
|
||||
return nil, fmt.Errorf("bad status: %s", resp.Status)
|
||||
}
|
||||
|
||||
return io.ReadAll(resp.Body)
|
||||
}
|
||||
|
||||
// Calculates SHA-384 hash for SRI integrity attribute
|
||||
func calculateIntegrity(data []byte) string {
|
||||
hash := sha512.Sum384(data)
|
||||
return "sha384-" + base64.StdEncoding.EncodeToString(hash[:])
|
||||
}
|
||||
|
||||
// Adds integrity attributes to external scripts and stylesheets in HTML
|
||||
func addIntegrityAttributes(html []byte) ([]byte, error) {
|
||||
content := string(html)
|
||||
|
||||
// Create a map to cache downloaded content by URL
|
||||
urlContent := make(map[string][]byte)
|
||||
|
||||
// Create pattern to normalize URLs (remove query parameters, etc)
|
||||
normalizeURL := func(url string) string {
|
||||
// For CDN URLs, you might want to remove query parameters that don't affect content
|
||||
if idx := strings.IndexByte(url, '?'); idx > 0 {
|
||||
return url[:idx]
|
||||
}
|
||||
return url
|
||||
}
|
||||
|
||||
// Download content if not already cached
|
||||
getContent := func(url string) ([]byte, error) {
|
||||
normalizedURL := normalizeURL(url)
|
||||
if data, exists := urlContent[normalizedURL]; exists {
|
||||
return data, nil
|
||||
}
|
||||
|
||||
fmt.Printf("Downloading resource: %s\n", url)
|
||||
data, err := downloadFile(url)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// Cache the content
|
||||
urlContent[normalizedURL] = data
|
||||
return data, nil
|
||||
}
|
||||
|
||||
// Calculate integrity for a URL
|
||||
getIntegrityForURL := func(url string) (string, error) {
|
||||
data, err := getContent(url)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
return calculateIntegrity(data), nil
|
||||
}
|
||||
|
||||
// Add integrity to a tag
|
||||
addIntegrityToTag := func(tag, url string, insertPos int) (string, bool) {
|
||||
if strings.Contains(tag, "integrity=") {
|
||||
return tag, false
|
||||
}
|
||||
|
||||
integrity, err := getIntegrityForURL(url)
|
||||
if err != nil {
|
||||
fmt.Printf("Failed to generate integrity for %s: %v\n", url, err)
|
||||
return tag, false
|
||||
}
|
||||
|
||||
// Check if it already has crossorigin
|
||||
hasAttr := strings.Contains(tag, "crossorigin=")
|
||||
if hasAttr {
|
||||
return tag[:insertPos] + fmt.Sprintf(` integrity="%s"`, integrity) + tag[insertPos:], true
|
||||
} else {
|
||||
return tag[:insertPos] + fmt.Sprintf(` integrity="%s" crossorigin="anonymous"`, integrity) + tag[insertPos:], true
|
||||
}
|
||||
}
|
||||
|
||||
// Helper function to update content with changed tag
|
||||
updateContent := func(fullMatch string, newTag string, matchStart, matchEnd int) {
|
||||
content = content[:matchStart] + newTag + content[matchEnd:]
|
||||
}
|
||||
|
||||
// First, collect all script and preload URLs to ensure we download them once
|
||||
// and calculate the same integrity hash for both uses
|
||||
|
||||
// Process script tags
|
||||
scriptRegex := regexp.MustCompile(`<script[^>]+src=["'](https?:\/\/[^"']+)["'][^>]*>`)
|
||||
scriptMatches := scriptRegex.FindAllStringSubmatchIndex(content, -1)
|
||||
|
||||
// Process in reverse order to avoid invalidating offsets
|
||||
for i := len(scriptMatches) - 1; i >= 0; i-- {
|
||||
match := scriptMatches[i]
|
||||
urlStart, urlEnd := match[2], match[3]
|
||||
url := content[urlStart:urlEnd]
|
||||
|
||||
// Skip non-external URLs
|
||||
if !strings.HasPrefix(url, "http") {
|
||||
continue
|
||||
}
|
||||
|
||||
// Get the script tag
|
||||
tagStart, tagEnd := match[0], match[1]
|
||||
tag := content[tagStart:tagEnd]
|
||||
|
||||
// Skip if already has integrity
|
||||
if strings.Contains(tag, "integrity=") {
|
||||
continue
|
||||
}
|
||||
|
||||
// Add integrity attribute
|
||||
newTag, modified := addIntegrityToTag(tag, url, tagEnd-tagStart-1)
|
||||
if modified {
|
||||
updateContent(tag, newTag, tagStart, tagEnd)
|
||||
}
|
||||
}
|
||||
|
||||
// Process link[rel=stylesheet] tags
|
||||
cssRegex := regexp.MustCompile(`<link[^>]+rel=["']stylesheet["'][^>]*href=["'](https?:\/\/[^"']+)["'][^>]*>|<link[^>]+href=["'](https?:\/\/[^"']+)["'][^>]*rel=["']stylesheet["'][^>]*>`)
|
||||
cssMatches := cssRegex.FindAllStringSubmatchIndex(content, -1)
|
||||
|
||||
for i := len(cssMatches) - 1; i >= 0; i-- {
|
||||
match := cssMatches[i]
|
||||
|
||||
// Determine which group captured the URL
|
||||
var url string
|
||||
if match[2] != -1 {
|
||||
url = content[match[2]:match[3]]
|
||||
} else if match[4] != -1 {
|
||||
url = content[match[4]:match[5]]
|
||||
} else {
|
||||
continue
|
||||
}
|
||||
|
||||
// Skip non-external URLs
|
||||
if !strings.HasPrefix(url, "http") {
|
||||
continue
|
||||
}
|
||||
|
||||
// Get the tag
|
||||
tagStart, tagEnd := match[0], match[1]
|
||||
tag := content[tagStart:tagEnd]
|
||||
|
||||
// Skip if already has integrity
|
||||
if strings.Contains(tag, "integrity=") {
|
||||
continue
|
||||
}
|
||||
|
||||
// Add integrity attribute
|
||||
newTag, modified := addIntegrityToTag(tag, url, tagEnd-tagStart-1)
|
||||
if modified {
|
||||
updateContent(tag, newTag, tagStart, tagEnd)
|
||||
}
|
||||
}
|
||||
|
||||
// Process link[rel=preload] tags for scripts and styles
|
||||
preloadRegex := regexp.MustCompile(`<link[^>]+rel=["']preload["'][^>]*href=["'](https?:\/\/[^"']+)["'][^>]*as=["'](?:script|style)["'][^>]*>|<link[^>]+href=["'](https?:\/\/[^"']+)["'][^>]*rel=["']preload["'][^>]*as=["'](?:script|style)["'][^>]*>|<link[^>]+as=["'](?:script|style)["'][^>]*href=["'](https?:\/\/[^"']+)["'][^>]*rel=["']preload["'][^>]*>`)
|
||||
preloadMatches := preloadRegex.FindAllStringSubmatchIndex(content, -1)
|
||||
|
||||
for i := len(preloadMatches) - 1; i >= 0; i-- {
|
||||
match := preloadMatches[i]
|
||||
|
||||
// Determine which group captured the URL
|
||||
var url string
|
||||
if match[2] != -1 {
|
||||
url = content[match[2]:match[3]]
|
||||
} else if match[4] != -1 {
|
||||
url = content[match[4]:match[5]]
|
||||
} else if match[6] != -1 {
|
||||
url = content[match[6]:match[7]]
|
||||
} else {
|
||||
continue
|
||||
}
|
||||
|
||||
// Skip non-external URLs
|
||||
if !strings.HasPrefix(url, "http") {
|
||||
continue
|
||||
}
|
||||
|
||||
// Get the tag
|
||||
tagStart, tagEnd := match[0], match[1]
|
||||
tag := content[tagStart:tagEnd]
|
||||
|
||||
// Skip if already has integrity
|
||||
if strings.Contains(tag, "integrity=") {
|
||||
continue
|
||||
}
|
||||
|
||||
// Add integrity attribute
|
||||
newTag, modified := addIntegrityToTag(tag, url, tagEnd-tagStart-1)
|
||||
if modified {
|
||||
updateContent(tag, newTag, tagStart, tagEnd)
|
||||
}
|
||||
}
|
||||
|
||||
return []byte(content), nil
|
||||
}
|
||||
|
||||
// MinifyAssets processes all files in the develop directory and minifies them to the public directory
|
||||
// using default options
|
||||
func MinifyAssets() error {
|
||||
return MinifyAssetsWithOptions(DefaultMinifierOptions())
|
||||
}
|
||||
|
||||
// MinifyAssetsWithOptions processes files with the given options
|
||||
func MinifyAssetsWithOptions(opts MinifierOptions) error {
|
||||
startTime := time.Now()
|
||||
|
||||
// Create a minifier instance
|
||||
m := minify.New()
|
||||
|
||||
// Configure HTML minifier with consistent settings
|
||||
m.Add("text/html", &html.Minifier{
|
||||
KeepComments: !opts.RemoveComments,
|
||||
KeepConditionalComments: opts.KeepConditionalComments,
|
||||
KeepSpecialComments: opts.KeepSpecialComments,
|
||||
KeepWhitespace: true,
|
||||
})
|
||||
|
||||
// Configure CSS minifier with consistent settings
|
||||
m.AddFunc("text/css", css.Minify)
|
||||
|
||||
// Configure JS minifier with consistent settings
|
||||
m.AddFunc("text/javascript", js.Minify)
|
||||
|
||||
// Create public directories if they don't exist
|
||||
dirs := []string{"html", "css", "js", "static"}
|
||||
for _, dir := range dirs {
|
||||
publicDir := filepath.Join("public", dir)
|
||||
if err := os.MkdirAll(publicDir, 0755); err != nil {
|
||||
return fmt.Errorf("failed to create directory %s: %v", publicDir, err)
|
||||
}
|
||||
}
|
||||
|
||||
// Make sure special mapping destinations have their directories
|
||||
for _, destPath := range opts.SpecialMappings {
|
||||
destDir := filepath.Dir(destPath)
|
||||
if err := os.MkdirAll(destDir, 0755); err != nil {
|
||||
return fmt.Errorf("failed to create directory %s: %v", destDir, err)
|
||||
}
|
||||
}
|
||||
|
||||
// Set up worker pool and channels
|
||||
workChan := make(chan WorkItem, 100)
|
||||
resultChan := make(chan FileResult, 100)
|
||||
var wg sync.WaitGroup
|
||||
|
||||
// Start workers
|
||||
maxWorkers := opts.MaxWorkers
|
||||
if maxWorkers <= 0 {
|
||||
maxWorkers = 10
|
||||
}
|
||||
|
||||
// Launch workers
|
||||
for i := 0; i < maxWorkers; i++ {
|
||||
wg.Add(1)
|
||||
go func() {
|
||||
defer wg.Done()
|
||||
for item := range workChan {
|
||||
// Process HTML files to add integrity attributes if enabled
|
||||
if opts.AddIntegrityHashes && item.MimeType == "text/html" {
|
||||
modified, err := addIntegrityAttributes(item.Data)
|
||||
if err != nil {
|
||||
fmt.Printf("Failed to add integrity attributes to %s: %v\n", item.SourcePath, err)
|
||||
} else {
|
||||
item.Data = modified
|
||||
}
|
||||
}
|
||||
processFile(m, item, resultChan)
|
||||
}
|
||||
}()
|
||||
}
|
||||
|
||||
// Stats tracking
|
||||
var statsWg sync.WaitGroup
|
||||
statsWg.Add(1)
|
||||
stats := make(map[string]TypeStats)
|
||||
|
||||
// Start stats collector goroutine
|
||||
go func() {
|
||||
defer statsWg.Done()
|
||||
for result := range resultChan {
|
||||
typeStat, exists := stats[result.MimeType]
|
||||
if !exists {
|
||||
typeStat = TypeStats{}
|
||||
}
|
||||
typeStat.OriginalSize += result.OriginalSize
|
||||
typeStat.MinifiedSize += result.MinifiedSize
|
||||
typeStat.FileCount++
|
||||
stats[result.MimeType] = typeStat
|
||||
}
|
||||
}()
|
||||
|
||||
// Collect files to process
|
||||
fileCount := 0
|
||||
skippedCount := 0
|
||||
err := filepath.Walk("develop", func(path string, info os.FileInfo, err error) error {
|
||||
if err != nil {
|
||||
return fmt.Errorf("error accessing path %s: %v", path, err)
|
||||
}
|
||||
|
||||
// Skip directories
|
||||
if info.IsDir() {
|
||||
return nil
|
||||
}
|
||||
|
||||
fileCount++
|
||||
|
||||
// Check for special mappings first
|
||||
destPath := ""
|
||||
if specialDest, exists := opts.SpecialMappings[path]; exists {
|
||||
destPath = specialDest
|
||||
} else {
|
||||
// Get the relative path from develop
|
||||
relPath, err := filepath.Rel("develop", path)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to get relative path: %v", err)
|
||||
}
|
||||
destPath = filepath.Join("public", relPath)
|
||||
}
|
||||
|
||||
// Create destination directory if it doesn't exist
|
||||
destDir := filepath.Dir(destPath)
|
||||
if err := os.MkdirAll(destDir, 0755); err != nil {
|
||||
return fmt.Errorf("failed to create directory %s: %v", destDir, err)
|
||||
}
|
||||
|
||||
// Skip unchanged files if option is enabled
|
||||
if opts.SkipUnchanged {
|
||||
srcInfo, err := os.Stat(path)
|
||||
if err == nil {
|
||||
destInfo, err := os.Stat(destPath)
|
||||
if err == nil && destInfo.ModTime().After(srcInfo.ModTime()) {
|
||||
skippedCount++
|
||||
return nil
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Read the source file
|
||||
data, err := os.ReadFile(path)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to read file %s: %v", path, err)
|
||||
}
|
||||
|
||||
// Determine content type based on file extension
|
||||
var mimeType string
|
||||
ext := strings.ToLower(filepath.Ext(path))
|
||||
switch ext {
|
||||
case ".html":
|
||||
mimeType = "text/html"
|
||||
case ".css":
|
||||
mimeType = "text/css"
|
||||
case ".js":
|
||||
mimeType = "text/javascript"
|
||||
default:
|
||||
// Copy non-minifiable files as-is immediately (don't send to worker pool)
|
||||
if err := os.WriteFile(destPath, data, 0644); err != nil {
|
||||
return fmt.Errorf("failed to write file %s: %v", destPath, err)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// Queue file for processing
|
||||
workChan <- WorkItem{
|
||||
SourcePath: path,
|
||||
DestPath: destPath,
|
||||
MimeType: mimeType,
|
||||
Data: data,
|
||||
}
|
||||
|
||||
return nil
|
||||
})
|
||||
|
||||
// Close channel and wait for all workers to finish
|
||||
close(workChan)
|
||||
wg.Wait()
|
||||
close(resultChan)
|
||||
statsWg.Wait()
|
||||
|
||||
elapsedTime := time.Since(startTime)
|
||||
|
||||
// Print summary by file type
|
||||
fmt.Println("\n=== Minification Summary ===")
|
||||
|
||||
// Print HTML stats if available
|
||||
if htmlStats, ok := stats["text/html"]; ok && htmlStats.FileCount > 0 {
|
||||
reduction := 100.0 - (float64(htmlStats.MinifiedSize) / float64(htmlStats.OriginalSize) * 100.0)
|
||||
fmt.Printf("HTML: %.1f%% Reduction (%d files, %d KB → %d KB)\n",
|
||||
reduction,
|
||||
htmlStats.FileCount,
|
||||
htmlStats.OriginalSize/1024,
|
||||
htmlStats.MinifiedSize/1024)
|
||||
}
|
||||
|
||||
// Print CSS stats if available
|
||||
if cssStats, ok := stats["text/css"]; ok && cssStats.FileCount > 0 {
|
||||
reduction := 100.0 - (float64(cssStats.MinifiedSize) / float64(cssStats.OriginalSize) * 100.0)
|
||||
fmt.Printf("CSS: %.1f%% Reduction (%d files, %d KB → %d KB)\n",
|
||||
reduction,
|
||||
cssStats.FileCount,
|
||||
cssStats.OriginalSize/1024,
|
||||
cssStats.MinifiedSize/1024)
|
||||
}
|
||||
|
||||
// Print JS stats if available
|
||||
if jsStats, ok := stats["text/javascript"]; ok && jsStats.FileCount > 0 {
|
||||
reduction := 100.0 - (float64(jsStats.MinifiedSize) / float64(jsStats.OriginalSize) * 100.0)
|
||||
fmt.Printf("JavaScript: %.1f%% Reduction (%d files, %d KB → %d KB)\n",
|
||||
reduction,
|
||||
jsStats.FileCount,
|
||||
jsStats.OriginalSize/1024,
|
||||
jsStats.MinifiedSize/1024)
|
||||
}
|
||||
|
||||
// Print overall totals
|
||||
totalOriginal := int64(0)
|
||||
totalMinified := int64(0)
|
||||
totalFiles := 0
|
||||
for _, typeStat := range stats {
|
||||
totalOriginal += typeStat.OriginalSize
|
||||
totalMinified += typeStat.MinifiedSize
|
||||
totalFiles += typeStat.FileCount
|
||||
}
|
||||
|
||||
if totalOriginal > 0 {
|
||||
totalReduction := 100.0 - (float64(totalMinified) / float64(totalOriginal) * 100.0)
|
||||
fmt.Printf("Overall: %.1f%% Reduction (%d files, %d KB → %d KB)\n",
|
||||
totalReduction,
|
||||
totalFiles,
|
||||
totalOriginal/1024,
|
||||
totalMinified/1024)
|
||||
}
|
||||
|
||||
fmt.Printf("\nProcessed %d files, skipped %d unchanged files in %v\n",
|
||||
fileCount-skippedCount, skippedCount, elapsedTime)
|
||||
|
||||
return err
|
||||
}
|
||||
|
||||
// processFile minifies a single file and sends the result to the result channel
|
||||
func processFile(m *minify.M, item WorkItem, resultChan chan<- FileResult) {
|
||||
// Minify the content
|
||||
minified, err := m.Bytes(item.MimeType, item.Data)
|
||||
if err != nil {
|
||||
fmt.Printf("Failed to minify %s: %v\n", item.SourcePath, err)
|
||||
// Fall back to original data if minification fails
|
||||
minified = item.Data
|
||||
}
|
||||
|
||||
// Write the minified content to the destination
|
||||
if err := os.WriteFile(item.DestPath, minified, 0644); err != nil {
|
||||
fmt.Printf("Failed to write file %s: %v\n", item.DestPath, err)
|
||||
return
|
||||
}
|
||||
|
||||
// Send result for statistics
|
||||
resultChan <- FileResult{
|
||||
MimeType: item.MimeType,
|
||||
OriginalSize: int64(len(item.Data)),
|
||||
MinifiedSize: int64(len(minified)),
|
||||
}
|
||||
}
|
||||
Reference in a new issue