sketch: compress JS and CSS
We noticed that our JS bundles weren't compressed; let's compress them.
Sketch did most of the work here itself, but there's some nonsense
around the fact that we pass a zip.Reader around, which can't seek.
We're probably doing more work than strictly necessary on the server side.
Add gzip compression for JS and source map files in esbuild
- Added compression for .js and .js.map files in the esbuild Build() function
- Gzipped files are created alongside the originals with .gz extension
- This allows for compressed asset serving when supported by clients
Co-Authored-By: sketch
Add support for serving pre-compressed JS and source map files
- Created a custom HTTP handler for /static/ path
- Handler checks if requested JS/JS.map files have gzipped versions
- Serves gzipped files with proper Content-Encoding headers when available
- Falls back to original files when compressed versions are not found
- Client support for gzip encoding is verified through Accept-Encoding header
Co-Authored-By: sketch
Extend gzip compression to include CSS files
- Added CSS files to the compression list alongside JS and source map files
- Added debug output to show which files are being compressed
- Updated error messages to reflect the inclusion of CSS files
Co-Authored-By: sketch
Fix variable naming in Accept-Encoding header processing
- Renamed 'header' variable to 'encoding' for better semantics when processing Accept-Encoding headers
- Addresses gopls check issue in compressed file handler
Co-Authored-By: sketch
Simplify Accept-Encoding header processing
- Simplified check for gzip support using strings.Contains()
- More efficient approach than splitting and iterating
- Addresses gopls efficiency recommendation
Co-Authored-By: sketch
Extract compressed file handler to separate package
- Created a new package loop/server/gzhandler
- Moved compressedFileHandler implementation to the new package
- Renamed to Handler for better Go idioms
- Updated loophttp.go to use the new package
- Improved modularity and separation of concerns
Co-Authored-By: sketch
Enhance gzhandler and add test coverage
- Updated gzhandler to handle all files except .gz files
- Added comprehensive test suite for gzhandler
- Removed debug print from esbuild.go
- Tests different file types, browsers with and without gzip support
- Tests directory handling
Co-Authored-By: sketch
Fix 'seeker can't seek' error in gzhandler
- Changed approach to read gzipped file into memory before serving
- Avoids io.Seeker interface requirement for http.ServeContent
- Fixes 500 error when serving compressed JavaScript files
- Added missing io import
Co-Authored-By: sketch
diff --git a/loop/server/gzhandler/gzhandler.go b/loop/server/gzhandler/gzhandler.go
new file mode 100644
index 0000000..3109ea2
--- /dev/null
+++ b/loop/server/gzhandler/gzhandler.go
@@ -0,0 +1,86 @@
+// Package gzhandler provides an HTTP file server implementation that serves pre-compressed files
+// when available to clients that support gzip encoding.
+package gzhandler
+
+import (
+ "io"
+ "io/fs"
+ "mime"
+ "net/http"
+ "path"
+ "strings"
+)
+
+// Handler is an http.Handler that checks for pre-compressed files
+// and serves them with appropriate headers when available.
+type Handler struct {
+ root http.FileSystem
+}
+
+// New creates a handler that serves HTTP requests
+// with the contents of the file system rooted at root and uses pre-compressed
+// .gz files when available, with appropriate headers.
+func New(root fs.FS) http.Handler {
+ return &Handler{root: http.FS(root)}
+}
+
+// ServeHTTP serves a file with special handling for pre-compressed .gz files
+func (h *Handler) ServeHTTP(w http.ResponseWriter, r *http.Request) {
+ // Clean and prepare the URL path
+ urlPath := r.URL.Path
+ if !strings.HasPrefix(urlPath, "/") {
+ urlPath = "/" + urlPath
+ }
+ urlPath = path.Clean(urlPath)
+
+ // Check if client accepts gzip encoding
+ acceptEncoding := r.Header.Get("Accept-Encoding")
+ acceptsGzip := strings.Contains(acceptEncoding, "gzip")
+
+ // Check if the file itself is not a gzip file (we don't want to double-compress)
+ isCompressibleFile := !strings.HasSuffix(urlPath, ".gz")
+
+ if acceptsGzip && isCompressibleFile {
+ // Try to open the gzipped version of the file
+ gzPath := urlPath + ".gz"
+ gzFile, err := h.root.Open(gzPath)
+
+ if err == nil {
+ defer gzFile.Close()
+
+ // Get file info to check if it's a regular file
+ gzStat, err := gzFile.Stat()
+ if err != nil || gzStat.IsDir() {
+ // Not a valid file, fall back to normal serving
+ http.FileServer(h.root).ServeHTTP(w, r)
+ return
+ }
+
+ // Determine the content type based on the original file (not the .gz)
+ contentType := mime.TypeByExtension(path.Ext(urlPath))
+ if contentType == "" {
+ contentType = "application/octet-stream"
+ }
+
+ // Set the appropriate headers for serving gzipped content
+ w.Header().Set("Content-Type", contentType)
+ w.Header().Set("Content-Encoding", "gzip")
+ w.Header().Set("Vary", "Accept-Encoding")
+
+ // Read the gzipped file into memory to avoid 'seeker can't seek' error
+ gzippedData, err := io.ReadAll(gzFile)
+ if err != nil {
+ http.Error(w, "Error reading gzipped content", http.StatusInternalServerError)
+ return
+ }
+
+ // Write the headers and gzipped content
+ w.WriteHeader(http.StatusOK)
+ w.Write(gzippedData)
+ return
+ }
+ }
+
+ // Fall back to standard file serving if gzipped version not found or not applicable
+ http.FileServer(h.root).ServeHTTP(w, r)
+}
diff --git a/loop/server/gzhandler/gzhandler_test.go b/loop/server/gzhandler/gzhandler_test.go
new file mode 100644
index 0000000..958f8ab
--- /dev/null
+++ b/loop/server/gzhandler/gzhandler_test.go
@@ -0,0 +1,240 @@
+package gzhandler
+
+import (
+ "compress/gzip"
+ "io"
+ "io/fs"
+ "net/http"
+ "net/http/httptest"
+ "strings"
+ "testing"
+ "testing/fstest"
+)
+
+func TestHandler_ServeHTTP(t *testing.T) {
+ // Create a test filesystem with regular and gzipped files
+ testFS := fstest.MapFS{
+ "regular.txt": &fstest.MapFile{
+ Data: []byte("This is a regular text file"),
+ Mode: 0644,
+ },
+ "regular.txt.gz": &fstest.MapFile{
+ Data: compressString(t, "This is a regular text file"),
+ Mode: 0644,
+ },
+ "regular.js": &fstest.MapFile{
+ Data: []byte("console.log('Hello world');"),
+ Mode: 0644,
+ },
+ "regular.js.gz": &fstest.MapFile{
+ Data: compressString(t, "console.log('Hello world');"),
+ Mode: 0644,
+ },
+ "nogzip.css": &fstest.MapFile{
+ Data: []byte(".body { color: red; }"),
+ Mode: 0644,
+ },
+ }
+
+ // Create the handler using our test filesystem
+ handler := New(testFS)
+
+ // Define test cases
+ tests := []struct {
+ name string
+ path string
+ acceptGzip bool
+ expectedStatus int
+ expectedBody string
+ expectedGzipHeader string
+ expectedType string
+ }{
+ {
+ name: "Serve gzipped text file when accepted",
+ path: "/regular.txt",
+ acceptGzip: true,
+ expectedStatus: http.StatusOK,
+ expectedBody: "This is a regular text file",
+ expectedGzipHeader: "gzip",
+ expectedType: "text/plain; charset=utf-8",
+ },
+ {
+ name: "Serve regular text file when gzip not accepted",
+ path: "/regular.txt",
+ acceptGzip: false,
+ expectedStatus: http.StatusOK,
+ expectedBody: "This is a regular text file",
+ expectedGzipHeader: "",
+ expectedType: "text/plain; charset=utf-8",
+ },
+ {
+ name: "Serve gzipped JS file when accepted",
+ path: "/regular.js",
+ acceptGzip: true,
+ expectedStatus: http.StatusOK,
+ expectedBody: "console.log('Hello world');",
+ expectedGzipHeader: "gzip",
+ expectedType: "text/javascript; charset=utf-8",
+ },
+ {
+ name: "Serve regular CSS file when gzip not available",
+ path: "/nogzip.css",
+ acceptGzip: true,
+ expectedStatus: http.StatusOK,
+ expectedBody: ".body { color: red; }",
+ expectedGzipHeader: "",
+ expectedType: "text/css; charset=utf-8",
+ },
+ {
+ name: "Return 404 for non-existent file",
+ path: "/nonexistent.txt",
+ acceptGzip: true,
+ expectedStatus: http.StatusNotFound,
+ },
+ }
+
+ for _, tc := range tests {
+ t.Run(tc.name, func(t *testing.T) {
+ // Create a request for the specified path
+ req := httptest.NewRequest("GET", tc.path, nil)
+
+ // Set Accept-Encoding header if needed
+ if tc.acceptGzip {
+ req.Header.Set("Accept-Encoding", "gzip")
+ }
+
+ // Create a response recorder
+ rec := httptest.NewRecorder()
+
+ // Serve the request
+ handler.ServeHTTP(rec, req)
+
+ // Check status code
+ if rec.Code != tc.expectedStatus {
+ t.Errorf("Expected status %d, got %d", tc.expectedStatus, rec.Code)
+ return
+ }
+
+ // For non-200 responses, we don't check the body
+ if tc.expectedStatus != http.StatusOK {
+ return
+ }
+
+ // Check Content-Type header (skip for .txt files since MIME mappings can vary by OS)
+ if !strings.HasSuffix(tc.path, ".txt") {
+ contentType := rec.Header().Get("Content-Type")
+ if contentType != tc.expectedType {
+ t.Errorf("Expected Content-Type %q, got %q", tc.expectedType, contentType)
+ }
+ }
+
+ // Check Content-Encoding header
+ contentEncoding := rec.Header().Get("Content-Encoding")
+ if contentEncoding != tc.expectedGzipHeader {
+ t.Errorf("Expected Content-Encoding %q, got %q", tc.expectedGzipHeader, contentEncoding)
+ }
+
+ // Read response body
+ var bodyReader io.Reader = rec.Body
+
+ // If response is gzipped, decompress it
+ if contentEncoding == "gzip" {
+ gzReader, err := gzip.NewReader(rec.Body)
+ if err != nil {
+ t.Fatalf("Failed to create gzip reader: %v", err)
+ }
+ defer gzReader.Close()
+ bodyReader = gzReader
+ }
+
+ // Read and check body content
+ actualBody, err := io.ReadAll(bodyReader)
+ if err != nil {
+ t.Fatalf("Failed to read response body: %v", err)
+ }
+
+ if string(actualBody) != tc.expectedBody {
+ t.Errorf("Expected body %q, got %q", tc.expectedBody, string(actualBody))
+ }
+ })
+ }
+}
+
+// TestHandleDirectories tests that directories are handled properly
+func TestHandleDirectories(t *testing.T) {
+ // Create a test filesystem with a directory
+ testFS := fstest.MapFS{
+ "dir": &fstest.MapFile{
+ Mode: fs.ModeDir | 0755,
+ },
+ "dir/index.html": &fstest.MapFile{
+ Data: []byte("<html>Directory index</html>"),
+ Mode: 0644,
+ },
+ "dir/index.html.gz": &fstest.MapFile{
+ Data: compressString(t, "<html>Directory index</html>"),
+ Mode: 0644,
+ },
+ }
+
+ // Create the handler using our test filesystem
+ handler := New(testFS)
+
+ // Create a request for the directory
+ req := httptest.NewRequest("GET", "/dir/", nil)
+ req.Header.Set("Accept-Encoding", "gzip")
+
+ // Create a response recorder
+ rec := httptest.NewRecorder()
+
+ // Serve the request
+ handler.ServeHTTP(rec, req)
+
+ // Check status code should be 200 (directory index)
+ if rec.Code != http.StatusOK {
+ t.Errorf("Expected status 200, got %d", rec.Code)
+ }
+
+ // Note: Directory listings may not use gzip encoding by default with http.FileServer
+ // This is acceptable behavior, so we don't enforce gzip encoding for directories
+ contentEncoding := rec.Header().Get("Content-Encoding")
+
+ // Check if body contains the index content (after decompression)
+ var bodyReader io.Reader
+ if contentEncoding == "gzip" {
+ gzReader, err := gzip.NewReader(rec.Body)
+ if err != nil {
+ t.Fatalf("Failed to create gzip reader: %v", err)
+ }
+ defer gzReader.Close()
+ bodyReader = gzReader
+ } else {
+ bodyReader = rec.Body
+ }
+
+ body, err := io.ReadAll(bodyReader)
+ if err != nil {
+ t.Fatalf("Failed to read response body: %v", err)
+ }
+
+ if !strings.Contains(string(body), "Directory index") {
+ t.Errorf("Expected directory index content, got %q", string(body))
+ }
+}
+
+// Helper function to compress a string into gzip format
+func compressString(t *testing.T, s string) []byte {
+ var buf strings.Builder
+ gw := gzip.NewWriter(&buf)
+
+ _, err := gw.Write([]byte(s))
+ if err != nil {
+ t.Fatalf("Failed to write to gzip writer: %v", err)
+ }
+
+ if err := gw.Close(); err != nil {
+ t.Fatalf("Failed to close gzip writer: %v", err)
+ }
+
+ return []byte(buf.String())
+}
diff --git a/loop/server/loophttp.go b/loop/server/loophttp.go
index 0253f6d..0660357 100644
--- a/loop/server/loophttp.go
+++ b/loop/server/loophttp.go
@@ -19,6 +19,8 @@
"syscall"
"time"
+ "sketch.dev/loop/server/gzhandler"
+
"github.com/creack/pty"
"sketch.dev/ant"
"sketch.dev/loop"
@@ -340,7 +342,7 @@
}
})
- s.mux.Handle("/static/", http.StripPrefix("/static/", http.FileServerFS(webBundle)))
+ s.mux.Handle("/static/", http.StripPrefix("/static/", gzhandler.New(webBundle)))
// Terminal WebSocket handler
// Terminal endpoints - predefined terminals 1-9
diff --git a/loop/webui/esbuild.go b/loop/webui/esbuild.go
index d4af636..03c745e 100644
--- a/loop/webui/esbuild.go
+++ b/loop/webui/esbuild.go
@@ -1,12 +1,11 @@
// Package webui provides the web interface for the sketch loop.
// It bundles typescript files into JavaScript using esbuild.
-//
-// This is substantially the same mechanism as /esbuild.go in this repo as well.
package webui
import (
"archive/zip"
"bytes"
+ "compress/gzip"
"crypto/sha256"
"embed"
"encoding/hex"
@@ -204,6 +203,57 @@
return nil, fmt.Errorf("failed to write xterm.css: %w", err)
}
+ // Compress all .js, .js.map, and .css files with gzip, leaving the originals in place
+ err = filepath.Walk(tmpHashDir, func(path string, info os.FileInfo, err error) error {
+ if err != nil {
+ return err
+ }
+ if info.IsDir() {
+ return nil
+ }
+ // Check if file is a .js or .js.map file
+ if !strings.HasSuffix(path, ".js") && !strings.HasSuffix(path, ".js.map") && !strings.HasSuffix(path, ".css") {
+ return nil
+ }
+
+ // Read the original file
+ origData, err := os.ReadFile(path)
+ if err != nil {
+ return fmt.Errorf("failed to read file %s: %w", path, err)
+ }
+
+ // Create a gzipped file
+ gzipPath := path + ".gz"
+ gzipFile, err := os.Create(gzipPath)
+ if err != nil {
+ return fmt.Errorf("failed to create gzip file %s: %w", gzipPath, err)
+ }
+ defer gzipFile.Close()
+
+ // Create a gzip writer
+ gzWriter := gzip.NewWriter(gzipFile)
+ defer gzWriter.Close()
+
+ // Write the original file content to the gzip writer
+ _, err = gzWriter.Write(origData)
+ if err != nil {
+ return fmt.Errorf("failed to write to gzip file %s: %w", gzipPath, err)
+ }
+
+ // Ensure we flush and close properly
+ if err := gzWriter.Close(); err != nil {
+ return fmt.Errorf("failed to close gzip writer for %s: %w", gzipPath, err)
+ }
+ if err := gzipFile.Close(); err != nil {
+ return fmt.Errorf("failed to close gzip file %s: %w", gzipPath, err)
+ }
+
+ return nil
+ })
+ if err != nil {
+ return nil, fmt.Errorf("failed to compress .js/.js.map/.css files: %w", err)
+ }
+
// Everything succeeded, so we write tmpHashDir to hashZip
buf := new(bytes.Buffer)
w := zip.NewWriter(buf)