client/web: add zstd, remove brotli for precompressed assets

This will enable us to reduce the size of these embedded assets. As of
February 2026 zstd is now part of the web baseline and supported by all
major evergreens.

Updates tailscale/corp#20099

Signed-off-by: James Tucker <james@tailscale.com>
This commit is contained in:
James Tucker
2024-05-17 13:47:27 -07:00
parent 439d84134d
commit 8dc0802f59
19 changed files with 585 additions and 51 deletions

View File

@@ -4,6 +4,7 @@
package web
import (
"fmt"
"io"
"io/fs"
"log"
@@ -16,7 +17,9 @@
"strings"
"time"
"github.com/klauspost/compress/zstd"
prebuilt "github.com/tailscale/web-client-prebuilt"
"tailscale.com/tsweb/tswebutil"
)
var start = time.Now()
@@ -63,10 +66,76 @@ func assetsHandler(devMode bool) (_ http.Handler, cleanup func()) {
}), nil
}
func openPrecompressedFile(w http.ResponseWriter, r *http.Request, path string, fs fs.FS) (fs.File, error) {
if f, err := fs.Open(path + ".gz"); err == nil {
w.Header().Set("Content-Encoding", "gzip")
return f, nil
// zstFile wraps a zstd-compressed fs.File and provides transparent
// decompression. It implements io.ReadSeeker so that it can be used with
// http.ServeContent. Note that Seek is implemented by decompressing from the
// start, so http.ServeContent's size detection (SeekEnd then SeekStart) will
// decompress the content twice. This is acceptable for the small web client
// assets served here, but would not be appropriate for large files.
type zstFile struct {
f fs.File
*zstd.Decoder
}
func newZSTFile(f fs.File) (*zstFile, error) {
zr, err := zstd.NewReader(f)
if err != nil {
f.Close()
return nil, err
}
return &zstFile{f: f, Decoder: zr}, nil
}
func (z *zstFile) Seek(offset int64, whence int) (int64, error) {
reset := func() error {
if seeker, ok := z.f.(io.Seeker); ok {
if _, err := seeker.Seek(0, io.SeekStart); err != nil {
return err
}
} else {
return fmt.Errorf("not seekable: %w", os.ErrInvalid)
}
return z.Decoder.Reset(z.f)
}
switch whence {
case io.SeekStart:
if err := reset(); err != nil {
return 0, err
}
return io.CopyN(io.Discard, z, offset)
case io.SeekCurrent:
if offset >= 0 {
return io.CopyN(io.Discard, z, offset)
}
return 0, fmt.Errorf("unsupported negative seek: %w", os.ErrInvalid)
case io.SeekEnd:
if offset != 0 {
return 0, fmt.Errorf("unsupported non-zero offset for SeekEnd: %w", os.ErrInvalid)
}
return io.Copy(io.Discard, z)
}
return 0, os.ErrInvalid
}
func (z *zstFile) Close() error {
z.Decoder.Close()
return z.f.Close()
}
func openPrecompressedFile(w http.ResponseWriter, r *http.Request, path string, fs fs.FS) (io.ReadCloser, error) {
if f, err := fs.Open(path + ".zst"); err == nil {
if tswebutil.AcceptsEncoding(r, "zstd") {
w.Header().Set("Content-Encoding", "zstd")
return f, nil
}
return newZSTFile(f)
}
if tswebutil.AcceptsEncoding(r, "gzip") {
if f, err := fs.Open(path + ".gz"); err == nil {
w.Header().Set("Content-Encoding", "gzip")
return f, nil
}
}
return fs.Open(path) // fallback
}

394
client/web/assets_test.go Normal file
View File

@@ -0,0 +1,394 @@
// Copyright (c) Tailscale Inc & contributors
// SPDX-License-Identifier: BSD-3-Clause
package web
import (
"bytes"
"io"
"io/fs"
"net/http"
"net/http/httptest"
"testing"
"testing/fstest"
"time"
"github.com/klauspost/compress/zstd"
)
func compressZstd(t *testing.T, data []byte) []byte {
t.Helper()
var buf bytes.Buffer
w, err := zstd.NewWriter(&buf, zstd.WithWindowSize(8<<20))
if err != nil {
t.Fatal(err)
}
if _, err := w.Write(data); err != nil {
t.Fatal(err)
}
if err := w.Close(); err != nil {
t.Fatal(err)
}
return buf.Bytes()
}
func TestOpenPrecompressedFile_ZstdPassthrough(t *testing.T) {
original := []byte("hello world")
compressed := compressZstd(t, original)
tfs := fstest.MapFS{
"test.js.zst": &fstest.MapFile{Data: compressed},
}
r := httptest.NewRequest("GET", "/test.js", nil)
r.Header.Set("Accept-Encoding", "zstd, gzip")
w := httptest.NewRecorder()
f, err := openPrecompressedFile(w, r, "test.js", tfs)
if err != nil {
t.Fatal(err)
}
defer f.Close()
if got := w.Header().Get("Content-Encoding"); got != "zstd" {
t.Errorf("Content-Encoding = %q, want %q", got, "zstd")
}
got, err := io.ReadAll(f)
if err != nil {
t.Fatal(err)
}
// Should return the raw compressed bytes when client accepts zstd.
if !bytes.Equal(got, compressed) {
t.Errorf("got decompressed data, want raw compressed passthrough")
}
}
func TestOpenPrecompressedFile_ZstdDecompress(t *testing.T) {
original := []byte("hello world")
compressed := compressZstd(t, original)
tfs := fstest.MapFS{
"test.js.zst": &fstest.MapFile{Data: compressed},
}
// Client does not accept zstd.
r := httptest.NewRequest("GET", "/test.js", nil)
r.Header.Set("Accept-Encoding", "gzip")
w := httptest.NewRecorder()
f, err := openPrecompressedFile(w, r, "test.js", tfs)
if err != nil {
t.Fatal(err)
}
defer f.Close()
if got := w.Header().Get("Content-Encoding"); got != "" {
t.Errorf("Content-Encoding = %q, want empty (transparent decompression)", got)
}
got, err := io.ReadAll(f)
if err != nil {
t.Fatal(err)
}
if !bytes.Equal(got, original) {
t.Errorf("got %q, want %q", got, original)
}
}
func TestOpenPrecompressedFile_GzipFallback(t *testing.T) {
gzData := []byte("fake-gzip-data")
tfs := fstest.MapFS{
"test.js.gz": &fstest.MapFile{Data: gzData},
}
r := httptest.NewRequest("GET", "/test.js", nil)
r.Header.Set("Accept-Encoding", "gzip")
w := httptest.NewRecorder()
f, err := openPrecompressedFile(w, r, "test.js", tfs)
if err != nil {
t.Fatal(err)
}
defer f.Close()
if got := w.Header().Get("Content-Encoding"); got != "gzip" {
t.Errorf("Content-Encoding = %q, want %q", got, "gzip")
}
}
func TestOpenPrecompressedFile_GzipNotAccepted(t *testing.T) {
tfs := fstest.MapFS{
"test.js": &fstest.MapFile{Data: []byte("raw js")},
"test.js.gz": &fstest.MapFile{Data: []byte("fake-gzip-data")},
}
// Client accepts neither zstd nor gzip.
r := httptest.NewRequest("GET", "/test.js", nil)
w := httptest.NewRecorder()
f, err := openPrecompressedFile(w, r, "test.js", tfs)
if err != nil {
t.Fatal(err)
}
defer f.Close()
if got := w.Header().Get("Content-Encoding"); got != "" {
t.Errorf("Content-Encoding = %q, want empty (no compression accepted)", got)
}
got, err := io.ReadAll(f)
if err != nil {
t.Fatal(err)
}
if string(got) != "raw js" {
t.Errorf("got %q, want %q", got, "raw js")
}
}
func TestOpenPrecompressedFile_PlainFallback(t *testing.T) {
tfs := fstest.MapFS{
"test.js": &fstest.MapFile{Data: []byte("raw js")},
}
r := httptest.NewRequest("GET", "/test.js", nil)
r.Header.Set("Accept-Encoding", "zstd, gzip")
w := httptest.NewRecorder()
f, err := openPrecompressedFile(w, r, "test.js", tfs)
if err != nil {
t.Fatal(err)
}
defer f.Close()
if got := w.Header().Get("Content-Encoding"); got != "" {
t.Errorf("Content-Encoding = %q, want empty", got)
}
got, err := io.ReadAll(f)
if err != nil {
t.Fatal(err)
}
if string(got) != "raw js" {
t.Errorf("got %q, want %q", got, "raw js")
}
}
func TestZstFile_Seek(t *testing.T) {
original := []byte("hello world, this is a test of zstd seeking")
compressed := compressZstd(t, original)
tfs := fstest.MapFS{
"test.zst": &fstest.MapFile{Data: compressed},
}
f, err := tfs.Open("test.zst")
if err != nil {
t.Fatal(err)
}
zf, err := newZSTFile(f)
if err != nil {
t.Fatal(err)
}
defer zf.Close()
// SeekEnd with offset 0 should return the total decompressed size.
n, err := zf.Seek(0, io.SeekEnd)
if err != nil {
t.Fatalf("Seek(0, SeekEnd) error: %v", err)
}
if n != int64(len(original)) {
t.Errorf("Seek(0, SeekEnd) = %d, want %d", n, len(original))
}
// SeekStart with offset 0 should reset to the beginning.
n, err = zf.Seek(0, io.SeekStart)
if err != nil {
t.Fatalf("Seek(0, SeekStart) error: %v", err)
}
if n != 0 {
t.Errorf("Seek(0, SeekStart) = %d, want 0", n)
}
// Read all content after reset.
got, err := io.ReadAll(zf)
if err != nil {
t.Fatal(err)
}
if !bytes.Equal(got, original) {
t.Errorf("after Seek(0, SeekStart) + ReadAll: got %q, want %q", got, original)
}
}
func TestZstFile_SeekCurrent(t *testing.T) {
original := []byte("hello world")
compressed := compressZstd(t, original)
tfs := fstest.MapFS{
"test.zst": &fstest.MapFile{Data: compressed},
}
f, err := tfs.Open("test.zst")
if err != nil {
t.Fatal(err)
}
zf, err := newZSTFile(f)
if err != nil {
t.Fatal(err)
}
defer zf.Close()
// Skip forward 6 bytes.
n, err := zf.Seek(6, io.SeekCurrent)
if err != nil {
t.Fatalf("Seek(6, SeekCurrent) error: %v", err)
}
if n != 6 {
t.Errorf("Seek(6, SeekCurrent) = %d, want 6", n)
}
// Read remaining.
got, err := io.ReadAll(zf)
if err != nil {
t.Fatal(err)
}
if string(got) != "world" {
t.Errorf("after Seek(6, SeekCurrent) + ReadAll: got %q, want %q", got, "world")
}
}
func TestZstFile_SeekNegativeCurrentErrors(t *testing.T) {
original := []byte("hello world")
compressed := compressZstd(t, original)
tfs := fstest.MapFS{
"test.zst": &fstest.MapFile{Data: compressed},
}
f, err := tfs.Open("test.zst")
if err != nil {
t.Fatal(err)
}
zf, err := newZSTFile(f)
if err != nil {
t.Fatal(err)
}
defer zf.Close()
_, err = zf.Seek(-1, io.SeekCurrent)
if err == nil {
t.Error("Seek(-1, SeekCurrent) should return error")
}
}
func TestZstFile_SeekEndNonZeroErrors(t *testing.T) {
original := []byte("hello")
compressed := compressZstd(t, original)
tfs := fstest.MapFS{
"test.zst": &fstest.MapFile{Data: compressed},
}
f, err := tfs.Open("test.zst")
if err != nil {
t.Fatal(err)
}
zf, err := newZSTFile(f)
if err != nil {
t.Fatal(err)
}
defer zf.Close()
_, err = zf.Seek(-1, io.SeekEnd)
if err == nil {
t.Error("Seek(-1, SeekEnd) should return error")
}
}
func TestZstFile_ServeContent(t *testing.T) {
// Integration test: verify that zstFile works correctly with
// http.ServeContent, which uses Seek to determine content length.
original := []byte("hello world, served via http.ServeContent")
compressed := compressZstd(t, original)
tfs := fstest.MapFS{
"test.js.zst": &fstest.MapFile{Data: compressed},
}
handler := http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
f, err := tfs.Open("test.js.zst")
if err != nil {
http.Error(w, err.Error(), 500)
return
}
zf, err := newZSTFile(f)
if err != nil {
http.Error(w, err.Error(), 500)
return
}
defer zf.Close()
http.ServeContent(w, r, "test.js", time.Time{}, zf)
})
r := httptest.NewRequest("GET", "/test.js", nil)
w := httptest.NewRecorder()
handler.ServeHTTP(w, r)
if w.Code != 200 {
t.Fatalf("status = %d, want 200; body: %s", w.Code, w.Body.String())
}
if !bytes.Equal(w.Body.Bytes(), original) {
t.Errorf("body = %q, want %q", w.Body.String(), original)
}
}
func TestNewZSTFile_CloseOnSuccess(t *testing.T) {
// Verify that newZSTFile produces a valid zstFile that, when closed,
// closes the underlying file.
original := []byte("hello")
compressed := compressZstd(t, original)
closed := false
f := &fakeFile{
data: compressed,
closeFn: func() error { closed = true; return nil },
}
zf, err := newZSTFile(f)
if err != nil {
t.Fatal(err)
}
got, err := io.ReadAll(zf)
if err != nil {
t.Fatal(err)
}
if !bytes.Equal(got, original) {
t.Errorf("got %q, want %q", got, original)
}
zf.Close()
if !closed {
t.Error("underlying file was not closed")
}
}
// fakeFile implements fs.File with controllable behavior for testing.
type fakeFile struct {
data []byte
offset int
closeFn func() error
}
func (f *fakeFile) Read(p []byte) (int, error) {
if f.offset >= len(f.data) {
return 0, io.EOF
}
n := copy(p, f.data[f.offset:])
f.offset += n
return n, nil
}
func (f *fakeFile) Close() error {
if f.closeFn != nil {
return f.closeFn()
}
return nil
}
func (f *fakeFile) Stat() (fs.FileInfo, error) {
return nil, fs.ErrInvalid
}

View File

@@ -73,8 +73,8 @@ func build(toolDir, appDir string) error {
if err := os.Remove(f); err != nil {
log.Printf("Failed to cleanup %q: %v", f, err)
}
// Removing intermediate ".br" version, we use ".gz" asset.
if err := os.Remove(f + ".br"); err != nil {
// Removing ".gz" version, we use the ".zst" asset.
if err := os.Remove(f + ".gz"); err != nil {
log.Printf("Failed to cleanup %q: %v", f+".gz", err)
}
}

View File

@@ -125,6 +125,7 @@ tailscale.com/cmd/derper dependencies: (generated by github.com/tailscale/depawa
tailscale.com/tstime/rate from tailscale.com/derp/derpserver
tailscale.com/tsweb from tailscale.com/cmd/derper+
tailscale.com/tsweb/promvarz from tailscale.com/cmd/derper
tailscale.com/tsweb/tswebutil from tailscale.com/tsweb
tailscale.com/tsweb/varz from tailscale.com/tsweb+
tailscale.com/types/appctype from tailscale.com/client/local
tailscale.com/types/dnstype from tailscale.com/tailcfg+

View File

@@ -147,7 +147,7 @@ tailscale.com/cmd/k8s-operator dependencies: (generated by github.com/tailscale/
github.com/klauspost/compress/internal/cpuinfo from github.com/klauspost/compress/huff0+
💣 github.com/klauspost/compress/internal/le from github.com/klauspost/compress/huff0+
github.com/klauspost/compress/internal/snapref from github.com/klauspost/compress/zstd
github.com/klauspost/compress/zstd from tailscale.com/util/zstdframe
github.com/klauspost/compress/zstd from tailscale.com/util/zstdframe+
github.com/klauspost/compress/zstd/internal/xxhash from github.com/klauspost/compress/zstd
github.com/mailru/easyjson/buffer from github.com/mailru/easyjson/jwriter
💣 github.com/mailru/easyjson/jlexer from github.com/go-openapi/swag
@@ -908,6 +908,7 @@ tailscale.com/cmd/k8s-operator dependencies: (generated by github.com/tailscale/
tailscale.com/tstime/mono from tailscale.com/net/tstun+
tailscale.com/tstime/rate from tailscale.com/wgengine/filter
tailscale.com/tsweb from tailscale.com/util/eventbus
tailscale.com/tsweb/tswebutil from tailscale.com/client/web+
tailscale.com/tsweb/varz from tailscale.com/util/usermetric+
tailscale.com/types/appctype from tailscale.com/ipn/ipnlocal+
tailscale.com/types/bools from tailscale.com/tsnet+

View File

@@ -62,6 +62,7 @@ tailscale.com/cmd/stund dependencies: (generated by github.com/tailscale/depawar
tailscale.com/tailcfg from tailscale.com/version+
tailscale.com/tsweb from tailscale.com/cmd/stund+
tailscale.com/tsweb/promvarz from tailscale.com/cmd/stund
tailscale.com/tsweb/tswebutil from tailscale.com/tsweb
tailscale.com/tsweb/varz from tailscale.com/tsweb+
tailscale.com/types/dnstype from tailscale.com/tailcfg
tailscale.com/types/ipproto from tailscale.com/tailcfg

View File

@@ -126,6 +126,14 @@ tailscale.com/cmd/tailscale dependencies: (generated by github.com/tailscale/dep
L 💣 github.com/jsimonetti/rtnetlink from tailscale.com/net/netmon
L github.com/jsimonetti/rtnetlink/internal/unix from github.com/jsimonetti/rtnetlink
github.com/kballard/go-shellquote from tailscale.com/cmd/tailscale/cli
github.com/klauspost/compress from github.com/klauspost/compress/zstd
github.com/klauspost/compress/fse from github.com/klauspost/compress/huff0
github.com/klauspost/compress/huff0 from github.com/klauspost/compress/zstd
github.com/klauspost/compress/internal/cpuinfo from github.com/klauspost/compress/huff0+
💣 github.com/klauspost/compress/internal/le from github.com/klauspost/compress/huff0+
github.com/klauspost/compress/internal/snapref from github.com/klauspost/compress/zstd
github.com/klauspost/compress/zstd from tailscale.com/client/web
github.com/klauspost/compress/zstd/internal/xxhash from github.com/klauspost/compress/zstd
💣 github.com/mattn/go-colorable from tailscale.com/cmd/tailscale/cli
💣 github.com/mattn/go-isatty from tailscale.com/cmd/tailscale/cli+
L 💣 github.com/mdlayher/netlink from github.com/jsimonetti/rtnetlink+
@@ -239,6 +247,7 @@ tailscale.com/cmd/tailscale dependencies: (generated by github.com/tailscale/dep
tailscale.com/tstime/mono from tailscale.com/tstime/rate
tailscale.com/tstime/rate from tailscale.com/cmd/tailscale/cli
tailscale.com/tsweb from tailscale.com/util/eventbus
tailscale.com/tsweb/tswebutil from tailscale.com/client/web+
tailscale.com/tsweb/varz from tailscale.com/util/usermetric+
tailscale.com/types/appctype from tailscale.com/client/local+
tailscale.com/types/dnstype from tailscale.com/tailcfg+

View File

@@ -153,7 +153,7 @@ tailscale.com/cmd/tailscaled dependencies: (generated by github.com/tailscale/de
github.com/klauspost/compress/internal/cpuinfo from github.com/klauspost/compress/zstd+
💣 github.com/klauspost/compress/internal/le from github.com/klauspost/compress/huff0+
github.com/klauspost/compress/internal/snapref from github.com/klauspost/compress/zstd
github.com/klauspost/compress/zstd from tailscale.com/util/zstdframe
github.com/klauspost/compress/zstd from tailscale.com/util/zstdframe+
github.com/klauspost/compress/zstd/internal/xxhash from github.com/klauspost/compress/zstd
github.com/kortschak/wol from tailscale.com/feature/wakeonlan
LD github.com/kr/fs from github.com/pkg/sftp
@@ -400,6 +400,7 @@ tailscale.com/cmd/tailscaled dependencies: (generated by github.com/tailscale/de
tailscale.com/tstime/mono from tailscale.com/net/tstun+
tailscale.com/tstime/rate from tailscale.com/wgengine/filter
tailscale.com/tsweb from tailscale.com/util/eventbus
tailscale.com/tsweb/tswebutil from tailscale.com/client/web+
tailscale.com/tsweb/varz from tailscale.com/cmd/tailscaled+
tailscale.com/types/appctype from tailscale.com/ipn/ipnlocal+
tailscale.com/types/bools from tailscale.com/wgengine/netlog

View File

@@ -120,7 +120,7 @@ tailscale.com/cmd/tsidp dependencies: (generated by github.com/tailscale/depawar
github.com/klauspost/compress/internal/cpuinfo from github.com/klauspost/compress/huff0+
💣 github.com/klauspost/compress/internal/le from github.com/klauspost/compress/huff0+
github.com/klauspost/compress/internal/snapref from github.com/klauspost/compress/zstd
github.com/klauspost/compress/zstd from tailscale.com/util/zstdframe
github.com/klauspost/compress/zstd from tailscale.com/util/zstdframe+
github.com/klauspost/compress/zstd/internal/xxhash from github.com/klauspost/compress/zstd
L 💣 github.com/mdlayher/netlink from github.com/jsimonetti/rtnetlink+
L 💣 github.com/mdlayher/netlink/nlenc from github.com/jsimonetti/rtnetlink+
@@ -309,6 +309,7 @@ tailscale.com/cmd/tsidp dependencies: (generated by github.com/tailscale/depawar
tailscale.com/tstime/mono from tailscale.com/net/tstun+
tailscale.com/tstime/rate from tailscale.com/wgengine/filter
tailscale.com/tsweb from tailscale.com/util/eventbus
tailscale.com/tsweb/tswebutil from tailscale.com/client/web+
tailscale.com/tsweb/varz from tailscale.com/tsweb+
tailscale.com/types/appctype from tailscale.com/ipn/ipnlocal+
tailscale.com/types/bools from tailscale.com/tsnet+

View File

@@ -151,4 +151,4 @@
});
};
}
# nix-direnv cache busting line: sha256-Lr+5B0LEFk66WahPczRcfzH8rSL5Cc2qvNJuW6B0Llc=
# nix-direnv cache busting line: sha256-nbh8U6vPFal6/m/c4p7rX6LU6uuxAXAdzv9oUhD4bVg=

3
go.mod
View File

@@ -8,7 +8,6 @@ require (
github.com/Kodeworks/golang-image-ico v0.0.0-20141118225523-73f0f4cfade9
github.com/akutz/memconn v0.1.0
github.com/alexbrainman/sspi v0.0.0-20231016080023-1a75b4708caa
github.com/andybalholm/brotli v1.1.0
github.com/anmitsu/go-shlex v0.0.0-20200514113438-38f4b401e2be
github.com/atotto/clipboard v0.1.4
github.com/aws/aws-sdk-go-v2 v1.41.0
@@ -98,7 +97,7 @@ require (
github.com/tailscale/netlink v1.1.1-0.20240822203006-4d49adab4de7
github.com/tailscale/peercred v0.0.0-20250107143737-35a0c7bd7edc
github.com/tailscale/setec v0.0.0-20251203133219-2ab774e4129a
github.com/tailscale/web-client-prebuilt v0.0.0-20250124233751-d4cd19a26976
github.com/tailscale/web-client-prebuilt v0.0.0-20251127225136-f19339b67368
github.com/tailscale/wf v0.0.0-20240214030419-6fbb0a674ee6
github.com/tailscale/wireguard-go v0.0.0-20250716170648-1d0488a3d7da
github.com/tailscale/xnet v0.0.0-20240729143630-8497ac4dab2e

View File

@@ -1 +1 @@
sha256-Lr+5B0LEFk66WahPczRcfzH8rSL5Cc2qvNJuW6B0Llc=
sha256-nbh8U6vPFal6/m/c4p7rX6LU6uuxAXAdzv9oUhD4bVg=

6
go.sum
View File

@@ -125,8 +125,6 @@ github.com/alexkohler/prealloc v1.0.0 h1:Hbq0/3fJPQhNkN0dR95AVrr6R7tou91y0uHG5pO
github.com/alexkohler/prealloc v1.0.0/go.mod h1:VetnK3dIgFBBKmg0YnD9F9x6Icjd+9cvfHR56wJVlKE=
github.com/alingse/asasalint v0.0.11 h1:SFwnQXJ49Kx/1GghOFz1XGqHYKp21Kq1nHad/0WQRnw=
github.com/alingse/asasalint v0.0.11/go.mod h1:nCaoMhw7a9kSJObvQyVzNTPBDbNpdocqrSP7t/cW5+I=
github.com/andybalholm/brotli v1.1.0 h1:eLKJA0d02Lf0mVpIDgYnqXcUn0GqVmEFny3VuID1U3M=
github.com/andybalholm/brotli v1.1.0/go.mod h1:sms7XGricyQI9K10gOSf56VKKWS4oLer58Q+mhRPtnY=
github.com/anmitsu/go-shlex v0.0.0-20200514113438-38f4b401e2be h1:9AeTilPcZAjCFIImctFaOjnTIavg87rW78vTPkQqLI8=
github.com/anmitsu/go-shlex v0.0.0-20200514113438-38f4b401e2be/go.mod h1:ySMOLuWl6zY27l47sB3qLNK6tF2fkHG55UZxx8oIVo4=
github.com/armon/go-metrics v0.4.1 h1:hR91U9KYmb6bLBYLQjyM+3j+rcd/UhE+G78SFnF8gJA=
@@ -1148,8 +1146,8 @@ github.com/tailscale/peercred v0.0.0-20250107143737-35a0c7bd7edc h1:24heQPtnFR+y
github.com/tailscale/peercred v0.0.0-20250107143737-35a0c7bd7edc/go.mod h1:f93CXfllFsO9ZQVq+Zocb1Gp4G5Fz0b0rXHLOzt/Djc=
github.com/tailscale/setec v0.0.0-20251203133219-2ab774e4129a h1:TApskGPim53XY5WRt5hX4DnO8V6CmVoimSklryIoGMM=
github.com/tailscale/setec v0.0.0-20251203133219-2ab774e4129a/go.mod h1:+6WyG6kub5/5uPsMdYQuSti8i6F5WuKpFWLQnZt/Mms=
github.com/tailscale/web-client-prebuilt v0.0.0-20250124233751-d4cd19a26976 h1:UBPHPtv8+nEAy2PD8RyAhOYvau1ek0HDJqLS/Pysi14=
github.com/tailscale/web-client-prebuilt v0.0.0-20250124233751-d4cd19a26976/go.mod h1:agQPE6y6ldqCOui2gkIh7ZMztTkIQKH049tv8siLuNQ=
github.com/tailscale/web-client-prebuilt v0.0.0-20251127225136-f19339b67368 h1:0tpDdAj9sSfSZg4gMwNTdqMP592sBrq2Sm0w6ipnh7k=
github.com/tailscale/web-client-prebuilt v0.0.0-20251127225136-f19339b67368/go.mod h1:agQPE6y6ldqCOui2gkIh7ZMztTkIQKH049tv8siLuNQ=
github.com/tailscale/wf v0.0.0-20240214030419-6fbb0a674ee6 h1:l10Gi6w9jxvinoiq15g8OToDdASBni4CyJOdHY1Hr8M=
github.com/tailscale/wf v0.0.0-20240214030419-6fbb0a674ee6/go.mod h1:ZXRML051h7o4OcI0d3AaILDIad/Xw0IkXaHM17dic1Y=
github.com/tailscale/wireguard-go v0.0.0-20250716170648-1d0488a3d7da h1:jVRUZPRs9sqyKlYHHzHjAqKN+6e/Vog6NpHYeNPJqOw=

View File

@@ -16,4 +16,4 @@
) {
src = ./.;
}).shellNix
# nix-direnv cache busting line: sha256-Lr+5B0LEFk66WahPczRcfzH8rSL5Cc2qvNJuW6B0Llc=
# nix-direnv cache busting line: sha256-nbh8U6vPFal6/m/c4p7rX6LU6uuxAXAdzv9oUhD4bVg=

View File

@@ -120,7 +120,7 @@ tailscale.com/tsnet dependencies: (generated by github.com/tailscale/depaware)
github.com/klauspost/compress/internal/cpuinfo from github.com/klauspost/compress/huff0+
💣 github.com/klauspost/compress/internal/le from github.com/klauspost/compress/huff0+
github.com/klauspost/compress/internal/snapref from github.com/klauspost/compress/zstd
github.com/klauspost/compress/zstd from tailscale.com/util/zstdframe
github.com/klauspost/compress/zstd from tailscale.com/util/zstdframe+
github.com/klauspost/compress/zstd/internal/xxhash from github.com/klauspost/compress/zstd
L 💣 github.com/mdlayher/netlink from github.com/jsimonetti/rtnetlink+
L 💣 github.com/mdlayher/netlink/nlenc from github.com/jsimonetti/rtnetlink+
@@ -304,6 +304,7 @@ tailscale.com/tsnet dependencies: (generated by github.com/tailscale/depaware)
tailscale.com/tstime/mono from tailscale.com/net/tstun+
tailscale.com/tstime/rate from tailscale.com/wgengine/filter
LDW tailscale.com/tsweb from tailscale.com/util/eventbus
LDW tailscale.com/tsweb/tswebutil from tailscale.com/client/web+
tailscale.com/tsweb/varz from tailscale.com/tsweb+
tailscale.com/types/appctype from tailscale.com/ipn/ipnlocal+
tailscale.com/types/bools from tailscale.com/tsnet+

View File

@@ -26,10 +26,10 @@
"sync"
"time"
"go4.org/mem"
"tailscale.com/envknob"
"tailscale.com/metrics"
"tailscale.com/net/tsaddr"
"tailscale.com/tsweb/tswebutil"
"tailscale.com/tsweb/varz"
"tailscale.com/types/logger"
"tailscale.com/util/ctxkey"
@@ -93,26 +93,11 @@ func allowDebugAccessWithKey(r *http.Request) bool {
}
// AcceptsEncoding reports whether r accepts the named encoding
// ("gzip", "br", etc).
// ("gzip", "zstd", etc).
//
// Deprecated: use [tswebutil.AcceptsEncoding] instead.
func AcceptsEncoding(r *http.Request, enc string) bool {
h := r.Header.Get("Accept-Encoding")
if h == "" {
return false
}
if !strings.Contains(h, enc) && !mem.ContainsFold(mem.S(h), mem.S(enc)) {
return false
}
remain := h
for len(remain) > 0 {
var part string
part, remain, _ = strings.Cut(remain, ",")
part = strings.TrimSpace(part)
part, _, _ = strings.Cut(part, ";")
if part == enc {
return true
}
}
return false
return tswebutil.AcceptsEncoding(r, enc)
}
// Protected wraps a provided debug handler, h, returning a Handler

View File

@@ -0,0 +1,36 @@
// Copyright (c) Tailscale Inc & contributors
// SPDX-License-Identifier: BSD-3-Clause
// Package tswebutil contains helper code used in various Tailscale webservers,
// without the tsweb kitchen sink.
package tswebutil
import (
"net/http"
"strings"
"go4.org/mem"
)
// AcceptsEncoding reports whether r accepts the named encoding
// ("gzip", "zstd", etc).
func AcceptsEncoding(r *http.Request, enc string) bool {
h := r.Header.Get("Accept-Encoding")
if h == "" {
return false
}
if !strings.Contains(h, enc) && !mem.ContainsFold(mem.S(h), mem.S(enc)) {
return false
}
remain := h
for len(remain) > 0 {
var part string
part, remain, _ = strings.Cut(remain, ",")
part = strings.TrimSpace(part)
part, _, _ = strings.Cut(part, ";")
if part == enc {
return true
}
}
return false
}

View File

@@ -0,0 +1,37 @@
// Copyright (c) Tailscale Inc & contributors
// SPDX-License-Identifier: BSD-3-Clause
package tswebutil
import (
"net/http"
"testing"
)
func TestAcceptsEncoding(t *testing.T) {
tests := []struct {
in, enc string
want bool
}{
{"", "gzip", false},
{"gzip", "gzip", true},
{"foo,gzip", "gzip", true},
{"foo, gzip", "gzip", true},
{"foo, gzip ", "gzip", true},
{"gzip, foo ", "gzip", true},
{"gzip, foo ", "br", false},
{"gzip, foo ", "fo", false},
{"gzip;q=1.2, foo ", "gzip", true},
{" gzip;q=1.2, foo ", "gzip", true},
}
for i, tt := range tests {
h := make(http.Header)
if tt.in != "" {
h.Set("Accept-Encoding", tt.in)
}
got := AcceptsEncoding(&http.Request{Header: h}, tt.enc)
if got != tt.want {
t.Errorf("%d. got %v; want %v", i, got, tt.want)
}
}
}

View File

@@ -16,13 +16,13 @@
"path"
"path/filepath"
"github.com/andybalholm/brotli"
"github.com/klauspost/compress/zstd"
"golang.org/x/sync/errgroup"
"tailscale.com/tsweb"
"tailscale.com/tsweb/tswebutil"
)
// PrecompressDir compresses static assets in dirPath using Gzip and Brotli, so
// that they can be later served with OpenPrecompressedFile.
// PrecompressDir compresses static assets in dirPath using Gzip and Zstandard,
// so that they can be later served with OpenPrecompressedFile.
func PrecompressDir(dirPath string, options Options) error {
var eg errgroup.Group
err := fs.WalkDir(os.DirFS(dirPath), ".", func(p string, d fs.DirEntry, err error) error {
@@ -63,13 +63,13 @@ type Options struct {
// OpenPrecompressedFile opens a file from fs, preferring compressed versions
// generated by PrecompressDir if possible.
func OpenPrecompressedFile(w http.ResponseWriter, r *http.Request, path string, fs fs.FS) (fs.File, error) {
if tsweb.AcceptsEncoding(r, "br") {
if f, err := fs.Open(path + ".br"); err == nil {
w.Header().Set("Content-Encoding", "br")
if tswebutil.AcceptsEncoding(r, "zstd") {
if f, err := fs.Open(path + ".zst"); err == nil {
w.Header().Set("Content-Encoding", "zstd")
return f, nil
}
}
if tsweb.AcceptsEncoding(r, "gzip") {
if tswebutil.AcceptsEncoding(r, "gzip") {
if f, err := fs.Open(path + ".gz"); err == nil {
w.Header().Set("Content-Encoding", "gzip")
return f, nil
@@ -104,13 +104,14 @@ func Precompress(path string, options Options) error {
if err != nil {
return err
}
brotliLevel := brotli.BestCompression
zstdLevel := zstd.WithEncoderLevel(zstd.SpeedBestCompression)
if options.FastCompression {
brotliLevel = brotli.BestSpeed
zstdLevel = zstd.WithEncoderLevel(zstd.SpeedFastest)
}
return writeCompressed(contents, func(w io.Writer) (io.WriteCloser, error) {
return brotli.NewWriterLevel(w, brotliLevel), nil
}, path+".br", fi.Mode())
// Per RFC 8878, encoders should avoid window sizes larger than 8MB, which is the max that Chrome accepts.
return zstd.NewWriter(w, zstdLevel, zstd.WithWindowSize(8<<20))
}, path+".zst", fi.Mode())
}
func writeCompressed(contents []byte, compressedWriterCreator func(io.Writer) (io.WriteCloser, error), outputPath string, outputMode fs.FileMode) error {