mirror of
https://github.com/gofiber/fiber.git
synced 2025-02-06 11:02:01 +00:00
🔥 Feature: Add support for zstd compression (#3041)
* Add support for zstd compression * Update whats_new.md * Add benchmarks for Compress middleware --------- Co-authored-by: RW <rene@gofiber.io>
This commit is contained in:
parent
dd2625661d
commit
b9936a339d
2
.github/README.md
vendored
2
.github/README.md
vendored
@ -581,7 +581,7 @@ Here is a list of middleware that are included within the Fiber framework.
|
||||
| [adaptor](https://github.com/gofiber/fiber/tree/main/middleware/adaptor) | Converter for net/http handlers to/from Fiber request handlers. |
|
||||
| [basicauth](https://github.com/gofiber/fiber/tree/main/middleware/basicauth) | Provides HTTP basic authentication. It calls the next handler for valid credentials and 401 Unauthorized for missing or invalid credentials. |
|
||||
| [cache](https://github.com/gofiber/fiber/tree/main/middleware/cache) | Intercept and cache HTTP responses. |
|
||||
| [compress](https://github.com/gofiber/fiber/tree/main/middleware/compress) | Compression middleware for Fiber, with support for `deflate`, `gzip` and `brotli`. |
|
||||
| [compress](https://github.com/gofiber/fiber/tree/main/middleware/compress) | Compression middleware for Fiber, with support for `deflate`, `gzip`, `brotli` and `zstd`. |
|
||||
| [cors](https://github.com/gofiber/fiber/tree/main/middleware/cors) | Enable cross-origin resource sharing (CORS) with various options. |
|
||||
| [csrf](https://github.com/gofiber/fiber/tree/main/middleware/csrf) | Protect from CSRF exploits. |
|
||||
| [earlydata](https://github.com/gofiber/fiber/tree/main/middleware/earlydata) | Adds support for TLS 1.3's early data ("0-RTT") feature. |
|
||||
|
7
.gitignore
vendored
7
.gitignore
vendored
@ -19,7 +19,14 @@
|
||||
|
||||
# Misc
|
||||
*.fiber.gz
|
||||
*.fiber.zst
|
||||
*.fiber.br
|
||||
*.fasthttp.gz
|
||||
*.fasthttp.zst
|
||||
*.fasthttp.br
|
||||
*.test.gz
|
||||
*.test.zst
|
||||
*.test.br
|
||||
*.pprof
|
||||
*.workspace
|
||||
|
||||
|
5
Makefile
5
Makefile
@ -37,6 +37,11 @@ lint:
|
||||
test:
|
||||
go run gotest.tools/gotestsum@latest -f testname -- ./... -race -count=1 -shuffle=on
|
||||
|
||||
## longtest: 🚦 Execute all tests 10x
|
||||
.PHONY: longtest
|
||||
longtest:
|
||||
go run gotest.tools/gotestsum@latest -f testname -- ./... -race -count=10 -shuffle=on
|
||||
|
||||
## tidy: 📌 Clean and tidy dependencies
|
||||
.PHONY: tidy
|
||||
tidy:
|
||||
|
24
app.go
24
app.go
@ -219,11 +219,11 @@ type Config struct {
|
||||
// Default: 4096
|
||||
WriteBufferSize int `json:"write_buffer_size"`
|
||||
|
||||
// CompressedFileSuffix adds suffix to the original file name and
|
||||
// CompressedFileSuffixes adds suffix to the original file name and
|
||||
// tries saving the resulting compressed file under the new file name.
|
||||
//
|
||||
// Default: ".fiber.gz"
|
||||
CompressedFileSuffix string `json:"compressed_file_suffix"`
|
||||
// Default: map[string]string{"gzip": ".fiber.gz", "br": ".fiber.br", "zstd": ".fiber.zst"}
|
||||
CompressedFileSuffixes map[string]string `json:"compressed_file_suffixes"`
|
||||
|
||||
// ProxyHeader will enable c.IP() to return the value of the given header key
|
||||
// By default c.IP() will return the Remote IP from the TCP connection
|
||||
@ -391,11 +391,10 @@ type RouteMessage struct {
|
||||
|
||||
// Default Config values
|
||||
const (
|
||||
DefaultBodyLimit = 4 * 1024 * 1024
|
||||
DefaultConcurrency = 256 * 1024
|
||||
DefaultReadBufferSize = 4096
|
||||
DefaultWriteBufferSize = 4096
|
||||
DefaultCompressedFileSuffix = ".fiber.gz"
|
||||
DefaultBodyLimit = 4 * 1024 * 1024
|
||||
DefaultConcurrency = 256 * 1024
|
||||
DefaultReadBufferSize = 4096
|
||||
DefaultWriteBufferSize = 4096
|
||||
)
|
||||
|
||||
// HTTP methods enabled by default
|
||||
@ -477,9 +476,14 @@ func New(config ...Config) *App {
|
||||
if app.config.WriteBufferSize <= 0 {
|
||||
app.config.WriteBufferSize = DefaultWriteBufferSize
|
||||
}
|
||||
if app.config.CompressedFileSuffix == "" {
|
||||
app.config.CompressedFileSuffix = DefaultCompressedFileSuffix
|
||||
if app.config.CompressedFileSuffixes == nil {
|
||||
app.config.CompressedFileSuffixes = map[string]string{
|
||||
"gzip": ".fiber.gz",
|
||||
"br": ".fiber.br",
|
||||
"zstd": ".fiber.zst",
|
||||
}
|
||||
}
|
||||
|
||||
if app.config.Immutable {
|
||||
app.getBytes, app.getString = getBytesImmutable, getStringImmutable
|
||||
}
|
||||
|
130
bind_test.go
130
bind_test.go
@ -824,35 +824,61 @@ func Benchmark_Bind_RespHeader_Map(b *testing.B) {
|
||||
require.NoError(b, err)
|
||||
}
|
||||
|
||||
// go test -run Test_Bind_Body
|
||||
// go test -run Test_Bind_Body_Compression
|
||||
func Test_Bind_Body(t *testing.T) {
|
||||
t.Parallel()
|
||||
app := New()
|
||||
c := app.AcquireCtx(&fasthttp.RequestCtx{})
|
||||
reqBody := []byte(`{"name":"john"}`)
|
||||
|
||||
type Demo struct {
|
||||
Name string `json:"name" xml:"name" form:"name" query:"name"`
|
||||
}
|
||||
|
||||
{
|
||||
var gzipJSON bytes.Buffer
|
||||
w := gzip.NewWriter(&gzipJSON)
|
||||
_, err := w.Write([]byte(`{"name":"john"}`))
|
||||
require.NoError(t, err)
|
||||
err = w.Close()
|
||||
require.NoError(t, err)
|
||||
|
||||
// Helper function to test compressed bodies
|
||||
testCompressedBody := func(t *testing.T, compressedBody []byte, encoding string) {
|
||||
t.Helper()
|
||||
c := app.AcquireCtx(&fasthttp.RequestCtx{})
|
||||
c.Request().Header.SetContentType(MIMEApplicationJSON)
|
||||
c.Request().Header.Set(HeaderContentEncoding, "gzip")
|
||||
c.Request().SetBody(gzipJSON.Bytes())
|
||||
c.Request().Header.SetContentLength(len(gzipJSON.Bytes()))
|
||||
c.Request().Header.Set(fasthttp.HeaderContentEncoding, encoding)
|
||||
c.Request().SetBody(compressedBody)
|
||||
c.Request().Header.SetContentLength(len(compressedBody))
|
||||
d := new(Demo)
|
||||
require.NoError(t, c.Bind().Body(d))
|
||||
require.Equal(t, "john", d.Name)
|
||||
c.Request().Header.Del(HeaderContentEncoding)
|
||||
c.Request().Header.Del(fasthttp.HeaderContentEncoding)
|
||||
}
|
||||
|
||||
testDecodeParser := func(contentType, body string) {
|
||||
t.Run("Gzip", func(t *testing.T) {
|
||||
t.Parallel()
|
||||
compressedBody := fasthttp.AppendGzipBytes(nil, reqBody)
|
||||
require.NotEqual(t, reqBody, compressedBody)
|
||||
testCompressedBody(t, compressedBody, "gzip")
|
||||
})
|
||||
|
||||
t.Run("Deflate", func(t *testing.T) {
|
||||
t.Parallel()
|
||||
compressedBody := fasthttp.AppendDeflateBytes(nil, reqBody)
|
||||
require.NotEqual(t, reqBody, compressedBody)
|
||||
testCompressedBody(t, compressedBody, "deflate")
|
||||
})
|
||||
|
||||
t.Run("Brotli", func(t *testing.T) {
|
||||
t.Parallel()
|
||||
compressedBody := fasthttp.AppendBrotliBytes(nil, reqBody)
|
||||
require.NotEqual(t, reqBody, compressedBody)
|
||||
testCompressedBody(t, compressedBody, "br")
|
||||
})
|
||||
|
||||
t.Run("Zstd", func(t *testing.T) {
|
||||
t.Parallel()
|
||||
compressedBody := fasthttp.AppendZstdBytes(nil, reqBody)
|
||||
require.NotEqual(t, reqBody, compressedBody)
|
||||
testCompressedBody(t, compressedBody, "zstd")
|
||||
})
|
||||
|
||||
testDecodeParser := func(t *testing.T, contentType, body string) {
|
||||
t.Helper()
|
||||
c := app.AcquireCtx(&fasthttp.RequestCtx{})
|
||||
c.Request().Header.SetContentType(contentType)
|
||||
c.Request().SetBody([]byte(body))
|
||||
c.Request().Header.SetContentLength(len(body))
|
||||
@ -861,44 +887,68 @@ func Test_Bind_Body(t *testing.T) {
|
||||
require.Equal(t, "john", d.Name)
|
||||
}
|
||||
|
||||
testDecodeParser(MIMEApplicationJSON, `{"name":"john"}`)
|
||||
testDecodeParser(MIMEApplicationXML, `<Demo><name>john</name></Demo>`)
|
||||
testDecodeParser(MIMEApplicationForm, "name=john")
|
||||
testDecodeParser(MIMEMultipartForm+`;boundary="b"`, "--b\r\nContent-Disposition: form-data; name=\"name\"\r\n\r\njohn\r\n--b--")
|
||||
t.Run("JSON", func(t *testing.T) {
|
||||
testDecodeParser(t, MIMEApplicationJSON, `{"name":"john"}`)
|
||||
})
|
||||
|
||||
testDecodeParserError := func(contentType, body string) {
|
||||
t.Run("XML", func(t *testing.T) {
|
||||
testDecodeParser(t, MIMEApplicationXML, `<Demo><name>john</name></Demo>`)
|
||||
})
|
||||
|
||||
t.Run("Form", func(t *testing.T) {
|
||||
testDecodeParser(t, MIMEApplicationForm, "name=john")
|
||||
})
|
||||
|
||||
t.Run("MultipartForm", func(t *testing.T) {
|
||||
testDecodeParser(t, MIMEMultipartForm+`;boundary="b"`, "--b\r\nContent-Disposition: form-data; name=\"name\"\r\n\r\njohn\r\n--b--")
|
||||
})
|
||||
|
||||
testDecodeParserError := func(t *testing.T, contentType, body string) {
|
||||
t.Helper()
|
||||
c := app.AcquireCtx(&fasthttp.RequestCtx{})
|
||||
c.Request().Header.SetContentType(contentType)
|
||||
c.Request().SetBody([]byte(body))
|
||||
c.Request().Header.SetContentLength(len(body))
|
||||
require.Error(t, c.Bind().Body(nil))
|
||||
}
|
||||
|
||||
testDecodeParserError("invalid-content-type", "")
|
||||
testDecodeParserError(MIMEMultipartForm+`;boundary="b"`, "--b")
|
||||
t.Run("ErrorInvalidContentType", func(t *testing.T) {
|
||||
testDecodeParserError(t, "invalid-content-type", "")
|
||||
})
|
||||
|
||||
t.Run("ErrorMalformedMultipart", func(t *testing.T) {
|
||||
testDecodeParserError(t, MIMEMultipartForm+`;boundary="b"`, "--b")
|
||||
})
|
||||
|
||||
type CollectionQuery struct {
|
||||
Data []Demo `query:"data"`
|
||||
}
|
||||
|
||||
c.Request().Reset()
|
||||
c.Request().Header.SetContentType(MIMEApplicationForm)
|
||||
c.Request().SetBody([]byte("data[0][name]=john&data[1][name]=doe"))
|
||||
c.Request().Header.SetContentLength(len(c.Body()))
|
||||
cq := new(CollectionQuery)
|
||||
require.NoError(t, c.Bind().Body(cq))
|
||||
require.Len(t, cq.Data, 2)
|
||||
require.Equal(t, "john", cq.Data[0].Name)
|
||||
require.Equal(t, "doe", cq.Data[1].Name)
|
||||
t.Run("CollectionQuerySquareBrackets", func(t *testing.T) {
|
||||
c := app.AcquireCtx(&fasthttp.RequestCtx{})
|
||||
c.Request().Reset()
|
||||
c.Request().Header.SetContentType(MIMEApplicationForm)
|
||||
c.Request().SetBody([]byte("data[0][name]=john&data[1][name]=doe"))
|
||||
c.Request().Header.SetContentLength(len(c.Body()))
|
||||
cq := new(CollectionQuery)
|
||||
require.NoError(t, c.Bind().Body(cq))
|
||||
require.Len(t, cq.Data, 2)
|
||||
require.Equal(t, "john", cq.Data[0].Name)
|
||||
require.Equal(t, "doe", cq.Data[1].Name)
|
||||
})
|
||||
|
||||
c.Request().Reset()
|
||||
c.Request().Header.SetContentType(MIMEApplicationForm)
|
||||
c.Request().SetBody([]byte("data.0.name=john&data.1.name=doe"))
|
||||
c.Request().Header.SetContentLength(len(c.Body()))
|
||||
cq = new(CollectionQuery)
|
||||
require.NoError(t, c.Bind().Body(cq))
|
||||
require.Len(t, cq.Data, 2)
|
||||
require.Equal(t, "john", cq.Data[0].Name)
|
||||
require.Equal(t, "doe", cq.Data[1].Name)
|
||||
t.Run("CollectionQueryDotNotation", func(t *testing.T) {
|
||||
c := app.AcquireCtx(&fasthttp.RequestCtx{})
|
||||
c.Request().Reset()
|
||||
c.Request().Header.SetContentType(MIMEApplicationForm)
|
||||
c.Request().SetBody([]byte("data.0.name=john&data.1.name=doe"))
|
||||
c.Request().Header.SetContentLength(len(c.Body()))
|
||||
cq := new(CollectionQuery)
|
||||
require.NoError(t, c.Bind().Body(cq))
|
||||
require.Len(t, cq.Data, 2)
|
||||
require.Equal(t, "john", cq.Data[0].Name)
|
||||
require.Equal(t, "doe", cq.Data[1].Name)
|
||||
})
|
||||
}
|
||||
|
||||
// go test -run Test_Bind_Body_WithSetParserDecoder
|
||||
|
@ -300,6 +300,7 @@ const (
|
||||
StrBr = "br"
|
||||
StrDeflate = "deflate"
|
||||
StrBrotli = "brotli"
|
||||
StrZstd = "zstd"
|
||||
)
|
||||
|
||||
// Cookie SameSite
|
||||
|
19
ctx.go
19
ctx.go
@ -218,6 +218,8 @@ func (c *DefaultCtx) tryDecodeBodyInOrder(
|
||||
body, err = c.fasthttp.Request.BodyUnbrotli()
|
||||
case StrDeflate:
|
||||
body, err = c.fasthttp.Request.BodyInflate()
|
||||
case StrZstd:
|
||||
body, err = c.fasthttp.Request.BodyUnzstd()
|
||||
default:
|
||||
decodesRealized--
|
||||
if len(encodings) == 1 {
|
||||
@ -1429,14 +1431,15 @@ func (c *DefaultCtx) SendFile(file string, compress ...bool) error {
|
||||
sendFileOnce.Do(func() {
|
||||
const cacheDuration = 10 * time.Second
|
||||
sendFileFS = &fasthttp.FS{
|
||||
Root: "",
|
||||
AllowEmptyRoot: true,
|
||||
GenerateIndexPages: false,
|
||||
AcceptByteRange: true,
|
||||
Compress: true,
|
||||
CompressedFileSuffix: c.app.config.CompressedFileSuffix,
|
||||
CacheDuration: cacheDuration,
|
||||
IndexNames: []string{"index.html"},
|
||||
Root: "",
|
||||
AllowEmptyRoot: true,
|
||||
GenerateIndexPages: false,
|
||||
AcceptByteRange: true,
|
||||
Compress: true,
|
||||
CompressBrotli: true,
|
||||
CompressedFileSuffixes: c.app.config.CompressedFileSuffixes,
|
||||
CacheDuration: cacheDuration,
|
||||
IndexNames: []string{"index.html"},
|
||||
PathNotFound: func(ctx *fasthttp.RequestCtx) {
|
||||
ctx.Response.SetStatusCode(StatusNotFound)
|
||||
},
|
||||
|
@ -48,7 +48,7 @@ app := fiber.New(fiber.Config{
|
||||
| <Reference id="bodylimit">BodyLimit</Reference> | `int` | Sets the maximum allowed size for a request body, if the size exceeds the configured limit, it sends `413 - Request Entity Too Large` response. | `4 * 1024 * 1024` |
|
||||
| <Reference id="casesensitive">CaseSensitive</Reference> | `bool` | When enabled, `/Foo` and `/foo` are different routes. When disabled, `/Foo`and `/foo` are treated the same. | `false` |
|
||||
| <Reference id="colorscheme">ColorScheme</Reference> | [`Colors`](https://github.com/gofiber/fiber/blob/master/color.go) | You can define custom color scheme. They'll be used for startup message, route list and some middlewares. | [`DefaultColors`](https://github.com/gofiber/fiber/blob/master/color.go) |
|
||||
| <Reference id="compressedfilesuffix">CompressedFileSuffix</Reference> | `string` | Adds a suffix to the original file name and tries saving the resulting compressed file under the new file name. | `".fiber.gz"` |
|
||||
| <Reference id="compressedfilesuffixes">CompressedFileSuffixes</Reference> | `map[string]string` | Adds a suffix to the original file name and tries saving the resulting compressed file under the new file name. | `{"gzip": ".fiber.gz", "br": ".fiber.br", "zstd": ".fiber.zst"}` |
|
||||
| <Reference id="concurrency">Concurrency</Reference> | `int` | Maximum number of concurrent connections. | `256 * 1024` |
|
||||
| <Reference id="disabledefaultcontenttype">DisableDefaultContentType</Reference> | `bool` | When set to true, causes the default Content-Type header to be excluded from the Response. | `false` |
|
||||
| <Reference id="disabledefaultdate">DisableDefaultDate</Reference> | `bool` | When set to true causes the default date header to be excluded from the response. | `false` |
|
||||
|
@ -4,7 +4,7 @@ id: compress
|
||||
|
||||
# Compress
|
||||
|
||||
Compression middleware for [Fiber](https://github.com/gofiber/fiber) that will compress the response using `gzip`, `deflate` and `brotli` compression depending on the [Accept-Encoding](https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Accept-Encoding) header.
|
||||
Compression middleware for [Fiber](https://github.com/gofiber/fiber) that will compress the response using `gzip`, `deflate`, `brotli`, and `zstd` compression depending on the [Accept-Encoding](https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Accept-Encoding) header.
|
||||
|
||||
:::note
|
||||
The compression middleware refrains from compressing bodies that are smaller than 200 bytes. This decision is based on the observation that, in such cases, the compressed size is likely to exceed the original size, making compression inefficient. [more](https://github.com/valyala/fasthttp/blob/497922a21ef4b314f393887e9c6147b8c3e3eda4/http.go#L1713-L1715)
|
||||
|
@ -148,7 +148,7 @@ To define static routes using `Get`, append the wildcard (`*`) operator at the e
|
||||
|:-----------|:------------------------|:---------------------------------------------------------------------------------------------------------------------------|:-----------------------|
|
||||
| Next | `func(fiber.Ctx) bool` | Next defines a function to skip this middleware when returned true. | `nil` |
|
||||
| FS | `fs.FS` | FS is the file system to serve the static files from.<br /><br />You can use interfaces compatible with fs.FS like embed.FS, os.DirFS etc. | `nil` |
|
||||
| Compress | `bool` | When set to true, the server tries minimizing CPU usage by caching compressed files.<br /><br />This works differently than the github.com/gofiber/compression middleware. | `false` |
|
||||
| Compress | `bool` | When set to true, the server tries minimizing CPU usage by caching compressed files. The middleware will compress the response using `gzip`, `brotli`, or `zstd` compression depending on the [Accept-Encoding](https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Accept-Encoding) header. <br /><br />This works differently than the github.com/gofiber/compression middleware. | `false` |
|
||||
| ByteRange | `bool` | When set to true, enables byte range requests. | `false` |
|
||||
| Browse | `bool` | When set to true, enables directory browsing. | `false` |
|
||||
| Download | `bool` | When set to true, enables direct download. | `false` |
|
||||
|
@ -263,6 +263,10 @@ We've updated several fields from a single string (containing comma-separated va
|
||||
- `Config.AllowHeaders`: Now accepts a slice of strings, each representing an allowed header.
|
||||
- `Config.ExposeHeaders`: Now accepts a slice of strings, each representing an exposed header.
|
||||
|
||||
### Compression
|
||||
|
||||
We've added support for `zstd` compression on top of `gzip`, `deflate`, and `brotli`.
|
||||
|
||||
### Session
|
||||
|
||||
:::caution
|
||||
|
@ -316,11 +316,11 @@ func Test_Utils_GetSplicedStrList(t *testing.T) {
|
||||
func Benchmark_Utils_GetSplicedStrList(b *testing.B) {
|
||||
destination := make([]string, 5)
|
||||
result := destination
|
||||
const input = `deflate, gzip,br,brotli`
|
||||
const input = `deflate, gzip,br,brotli,zstd`
|
||||
for n := 0; n < b.N; n++ {
|
||||
result = getSplicedStrList(input, destination)
|
||||
}
|
||||
require.Equal(b, []string{"deflate", "gzip", "br", "brotli"}, result)
|
||||
require.Equal(b, []string{"deflate", "gzip", "br", "brotli", "zstd"}, result)
|
||||
}
|
||||
|
||||
func Test_Utils_SortAcceptedTypes(t *testing.T) {
|
||||
|
@ -11,6 +11,7 @@ import (
|
||||
|
||||
"github.com/gofiber/fiber/v3"
|
||||
"github.com/stretchr/testify/require"
|
||||
"github.com/valyala/fasthttp"
|
||||
)
|
||||
|
||||
var filedata []byte
|
||||
@ -38,7 +39,7 @@ func Test_Compress_Gzip(t *testing.T) {
|
||||
req := httptest.NewRequest(fiber.MethodGet, "/", nil)
|
||||
req.Header.Set("Accept-Encoding", "gzip")
|
||||
|
||||
resp, err := app.Test(req)
|
||||
resp, err := app.Test(req, 10*time.Second)
|
||||
require.NoError(t, err, "app.Test(req)")
|
||||
require.Equal(t, 200, resp.StatusCode, "Status code")
|
||||
require.Equal(t, "gzip", resp.Header.Get(fiber.HeaderContentEncoding))
|
||||
@ -52,33 +53,38 @@ func Test_Compress_Gzip(t *testing.T) {
|
||||
// go test -run Test_Compress_Different_Level
|
||||
func Test_Compress_Different_Level(t *testing.T) {
|
||||
t.Parallel()
|
||||
levels := []Level{LevelBestSpeed, LevelBestCompression}
|
||||
for _, level := range levels {
|
||||
level := level
|
||||
t.Run(fmt.Sprintf("level %d", level), func(t *testing.T) {
|
||||
t.Parallel()
|
||||
app := fiber.New()
|
||||
levels := []Level{LevelDefault, LevelBestSpeed, LevelBestCompression}
|
||||
algorithms := []string{"gzip", "deflate", "br", "zstd"}
|
||||
|
||||
app.Use(New(Config{Level: level}))
|
||||
for _, algo := range algorithms {
|
||||
algo := algo
|
||||
for _, level := range levels {
|
||||
level := level
|
||||
t.Run(fmt.Sprintf("%s_level %d", algo, level), func(t *testing.T) {
|
||||
t.Parallel()
|
||||
app := fiber.New()
|
||||
|
||||
app.Get("/", func(c fiber.Ctx) error {
|
||||
c.Set(fiber.HeaderContentType, fiber.MIMETextPlainCharsetUTF8)
|
||||
return c.Send(filedata)
|
||||
app.Use(New(Config{Level: level}))
|
||||
|
||||
app.Get("/", func(c fiber.Ctx) error {
|
||||
c.Set(fiber.HeaderContentType, fiber.MIMETextPlainCharsetUTF8)
|
||||
return c.Send(filedata)
|
||||
})
|
||||
|
||||
req := httptest.NewRequest(fiber.MethodGet, "/", nil)
|
||||
req.Header.Set("Accept-Encoding", algo)
|
||||
|
||||
resp, err := app.Test(req, 10*time.Second)
|
||||
require.NoError(t, err, "app.Test(req)")
|
||||
require.Equal(t, 200, resp.StatusCode, "Status code")
|
||||
require.Equal(t, algo, resp.Header.Get(fiber.HeaderContentEncoding))
|
||||
|
||||
// Validate that the file size has shrunk
|
||||
body, err := io.ReadAll(resp.Body)
|
||||
require.NoError(t, err)
|
||||
require.Less(t, len(body), len(filedata))
|
||||
})
|
||||
|
||||
req := httptest.NewRequest(fiber.MethodGet, "/", nil)
|
||||
req.Header.Set("Accept-Encoding", "gzip")
|
||||
|
||||
resp, err := app.Test(req)
|
||||
require.NoError(t, err, "app.Test(req)")
|
||||
require.Equal(t, 200, resp.StatusCode, "Status code")
|
||||
require.Equal(t, "gzip", resp.Header.Get(fiber.HeaderContentEncoding))
|
||||
|
||||
// Validate that the file size has shrunk
|
||||
body, err := io.ReadAll(resp.Body)
|
||||
require.NoError(t, err)
|
||||
require.Less(t, len(body), len(filedata))
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@ -95,7 +101,7 @@ func Test_Compress_Deflate(t *testing.T) {
|
||||
req := httptest.NewRequest(fiber.MethodGet, "/", nil)
|
||||
req.Header.Set("Accept-Encoding", "deflate")
|
||||
|
||||
resp, err := app.Test(req)
|
||||
resp, err := app.Test(req, 10*time.Second)
|
||||
require.NoError(t, err, "app.Test(req)")
|
||||
require.Equal(t, 200, resp.StatusCode, "Status code")
|
||||
require.Equal(t, "deflate", resp.Header.Get(fiber.HeaderContentEncoding))
|
||||
@ -130,6 +136,30 @@ func Test_Compress_Brotli(t *testing.T) {
|
||||
require.Less(t, len(body), len(filedata))
|
||||
}
|
||||
|
||||
func Test_Compress_Zstd(t *testing.T) {
|
||||
t.Parallel()
|
||||
app := fiber.New()
|
||||
|
||||
app.Use(New())
|
||||
|
||||
app.Get("/", func(c fiber.Ctx) error {
|
||||
return c.Send(filedata)
|
||||
})
|
||||
|
||||
req := httptest.NewRequest(fiber.MethodGet, "/", nil)
|
||||
req.Header.Set("Accept-Encoding", "zstd")
|
||||
|
||||
resp, err := app.Test(req, 10*time.Second)
|
||||
require.NoError(t, err, "app.Test(req)")
|
||||
require.Equal(t, 200, resp.StatusCode, "Status code")
|
||||
require.Equal(t, "zstd", resp.Header.Get(fiber.HeaderContentEncoding))
|
||||
|
||||
// Validate that the file size has shrunk
|
||||
body, err := io.ReadAll(resp.Body)
|
||||
require.NoError(t, err)
|
||||
require.Less(t, len(body), len(filedata))
|
||||
}
|
||||
|
||||
func Test_Compress_Disabled(t *testing.T) {
|
||||
t.Parallel()
|
||||
app := fiber.New()
|
||||
@ -143,7 +173,7 @@ func Test_Compress_Disabled(t *testing.T) {
|
||||
req := httptest.NewRequest(fiber.MethodGet, "/", nil)
|
||||
req.Header.Set("Accept-Encoding", "br")
|
||||
|
||||
resp, err := app.Test(req)
|
||||
resp, err := app.Test(req, 10*time.Second)
|
||||
require.NoError(t, err, "app.Test(req)")
|
||||
require.Equal(t, 200, resp.StatusCode, "Status code")
|
||||
require.Equal(t, "", resp.Header.Get(fiber.HeaderContentEncoding))
|
||||
@ -191,3 +221,193 @@ func Test_Compress_Next(t *testing.T) {
|
||||
require.NoError(t, err)
|
||||
require.Equal(t, fiber.StatusNotFound, resp.StatusCode)
|
||||
}
|
||||
|
||||
// go test -bench=Benchmark_Compress
|
||||
func Benchmark_Compress(b *testing.B) {
|
||||
tests := []struct {
|
||||
name string
|
||||
acceptEncoding string
|
||||
}{
|
||||
{"Gzip", "gzip"},
|
||||
{"Deflate", "deflate"},
|
||||
{"Brotli", "br"},
|
||||
{"Zstd", "zstd"},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
b.Run(tt.name, func(b *testing.B) {
|
||||
app := fiber.New()
|
||||
app.Use(New())
|
||||
app.Get("/", func(c fiber.Ctx) error {
|
||||
c.Set(fiber.HeaderContentType, fiber.MIMETextPlainCharsetUTF8)
|
||||
return c.Send(filedata)
|
||||
})
|
||||
|
||||
h := app.Handler()
|
||||
fctx := &fasthttp.RequestCtx{}
|
||||
fctx.Request.Header.SetMethod(fiber.MethodGet)
|
||||
fctx.Request.SetRequestURI("/")
|
||||
|
||||
if tt.acceptEncoding != "" {
|
||||
fctx.Request.Header.Set("Accept-Encoding", tt.acceptEncoding)
|
||||
}
|
||||
|
||||
b.ReportAllocs()
|
||||
b.ResetTimer()
|
||||
|
||||
for i := 0; i < b.N; i++ {
|
||||
h(fctx)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
// go test -bench=Benchmark_Compress_Levels
|
||||
func Benchmark_Compress_Levels(b *testing.B) {
|
||||
tests := []struct {
|
||||
name string
|
||||
acceptEncoding string
|
||||
}{
|
||||
{"Gzip", "gzip"},
|
||||
{"Deflate", "deflate"},
|
||||
{"Brotli", "br"},
|
||||
{"Zstd", "zstd"},
|
||||
}
|
||||
|
||||
levels := []struct {
|
||||
name string
|
||||
level Level
|
||||
}{
|
||||
{"LevelDisabled", LevelDisabled},
|
||||
{"LevelDefault", LevelDefault},
|
||||
{"LevelBestSpeed", LevelBestSpeed},
|
||||
{"LevelBestCompression", LevelBestCompression},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
for _, lvl := range levels {
|
||||
b.Run(tt.name+"_"+lvl.name, func(b *testing.B) {
|
||||
app := fiber.New()
|
||||
app.Use(New(Config{Level: lvl.level}))
|
||||
app.Get("/", func(c fiber.Ctx) error {
|
||||
c.Set(fiber.HeaderContentType, fiber.MIMETextPlainCharsetUTF8)
|
||||
return c.Send(filedata)
|
||||
})
|
||||
|
||||
h := app.Handler()
|
||||
fctx := &fasthttp.RequestCtx{}
|
||||
fctx.Request.Header.SetMethod(fiber.MethodGet)
|
||||
fctx.Request.SetRequestURI("/")
|
||||
|
||||
if tt.acceptEncoding != "" {
|
||||
fctx.Request.Header.Set("Accept-Encoding", tt.acceptEncoding)
|
||||
}
|
||||
|
||||
b.ReportAllocs()
|
||||
b.ResetTimer()
|
||||
|
||||
for i := 0; i < b.N; i++ {
|
||||
h(fctx)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// go test -bench=Benchmark_Compress_Parallel
|
||||
func Benchmark_Compress_Parallel(b *testing.B) {
|
||||
tests := []struct {
|
||||
name string
|
||||
acceptEncoding string
|
||||
}{
|
||||
{"Gzip", "gzip"},
|
||||
{"Deflate", "deflate"},
|
||||
{"Brotli", "br"},
|
||||
{"Zstd", "zstd"},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
b.Run(tt.name, func(b *testing.B) {
|
||||
app := fiber.New()
|
||||
app.Use(New())
|
||||
app.Get("/", func(c fiber.Ctx) error {
|
||||
c.Set(fiber.HeaderContentType, fiber.MIMETextPlainCharsetUTF8)
|
||||
return c.Send(filedata)
|
||||
})
|
||||
|
||||
h := app.Handler()
|
||||
|
||||
b.ReportAllocs()
|
||||
b.ResetTimer()
|
||||
|
||||
b.RunParallel(func(pb *testing.PB) {
|
||||
fctx := &fasthttp.RequestCtx{}
|
||||
fctx.Request.Header.SetMethod(fiber.MethodGet)
|
||||
fctx.Request.SetRequestURI("/")
|
||||
|
||||
if tt.acceptEncoding != "" {
|
||||
fctx.Request.Header.Set("Accept-Encoding", tt.acceptEncoding)
|
||||
}
|
||||
|
||||
for pb.Next() {
|
||||
h(fctx)
|
||||
}
|
||||
})
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
// go test -bench=Benchmark_Compress_Levels_Parallel
|
||||
func Benchmark_Compress_Levels_Parallel(b *testing.B) {
|
||||
tests := []struct {
|
||||
name string
|
||||
acceptEncoding string
|
||||
}{
|
||||
{"Gzip", "gzip"},
|
||||
{"Deflate", "deflate"},
|
||||
{"Brotli", "br"},
|
||||
{"Zstd", "zstd"},
|
||||
}
|
||||
|
||||
levels := []struct {
|
||||
name string
|
||||
level Level
|
||||
}{
|
||||
{"LevelDisabled", LevelDisabled},
|
||||
{"LevelDefault", LevelDefault},
|
||||
{"LevelBestSpeed", LevelBestSpeed},
|
||||
{"LevelBestCompression", LevelBestCompression},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
for _, lvl := range levels {
|
||||
b.Run(tt.name+"_"+lvl.name, func(b *testing.B) {
|
||||
app := fiber.New()
|
||||
app.Use(New(Config{Level: lvl.level}))
|
||||
app.Get("/", func(c fiber.Ctx) error {
|
||||
c.Set(fiber.HeaderContentType, fiber.MIMETextPlainCharsetUTF8)
|
||||
return c.Send(filedata)
|
||||
})
|
||||
|
||||
h := app.Handler()
|
||||
|
||||
b.ReportAllocs()
|
||||
b.ResetTimer()
|
||||
|
||||
b.RunParallel(func(pb *testing.PB) {
|
||||
fctx := &fasthttp.RequestCtx{}
|
||||
fctx.Request.Header.SetMethod(fiber.MethodGet)
|
||||
fctx.Request.SetRequestURI("/")
|
||||
|
||||
if tt.acceptEncoding != "" {
|
||||
fctx.Request.Header.Set("Accept-Encoding", tt.acceptEncoding)
|
||||
}
|
||||
|
||||
for pb.Next() {
|
||||
h(fctx)
|
||||
}
|
||||
})
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -59,16 +59,17 @@ func New(root string, cfg ...Config) fiber.Handler {
|
||||
}
|
||||
|
||||
fs := &fasthttp.FS{
|
||||
Root: root,
|
||||
FS: config.FS,
|
||||
AllowEmptyRoot: true,
|
||||
GenerateIndexPages: config.Browse,
|
||||
AcceptByteRange: config.ByteRange,
|
||||
Compress: config.Compress,
|
||||
CompressedFileSuffix: c.App().Config().CompressedFileSuffix,
|
||||
CacheDuration: config.CacheDuration,
|
||||
SkipCache: config.CacheDuration < 0,
|
||||
IndexNames: config.IndexNames,
|
||||
Root: root,
|
||||
FS: config.FS,
|
||||
AllowEmptyRoot: true,
|
||||
GenerateIndexPages: config.Browse,
|
||||
AcceptByteRange: config.ByteRange,
|
||||
Compress: config.Compress,
|
||||
CompressBrotli: config.Compress, // Brotli compression won't work without this
|
||||
CompressedFileSuffixes: c.App().Config().CompressedFileSuffixes,
|
||||
CacheDuration: config.CacheDuration,
|
||||
SkipCache: config.CacheDuration < 0,
|
||||
IndexNames: config.IndexNames,
|
||||
PathNotFound: func(fctx *fasthttp.RequestCtx) {
|
||||
fctx.Response.SetStatusCode(fiber.StatusNotFound)
|
||||
},
|
||||
@ -122,6 +123,7 @@ func New(root string, cfg ...Config) fiber.Handler {
|
||||
|
||||
// Return request if found and not forbidden
|
||||
status := c.Context().Response.StatusCode()
|
||||
|
||||
if status != fiber.StatusNotFound && status != fiber.StatusForbidden {
|
||||
if len(cacheControlValue) > 0 {
|
||||
c.Context().Response.Header.Set(fiber.HeaderCacheControl, cacheControlValue)
|
||||
|
@ -9,6 +9,7 @@ import (
|
||||
"runtime"
|
||||
"strings"
|
||||
"testing"
|
||||
"time"
|
||||
|
||||
"github.com/gofiber/fiber/v3"
|
||||
"github.com/stretchr/testify/require"
|
||||
@ -719,3 +720,129 @@ func Test_isFile(t *testing.T) {
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func Test_Static_Compress(t *testing.T) {
|
||||
t.Parallel()
|
||||
dir := "../../.github/testdata/fs"
|
||||
app := fiber.New()
|
||||
app.Get("/*", New(dir, Config{
|
||||
Compress: true,
|
||||
}))
|
||||
|
||||
// Note: deflate is not supported by fasthttp.FS
|
||||
algorithms := []string{"zstd", "gzip", "br"}
|
||||
|
||||
for _, algo := range algorithms {
|
||||
algo := algo
|
||||
t.Run(algo+"_compression", func(t *testing.T) {
|
||||
t.Parallel()
|
||||
// request non-compressable file (less than 200 bytes), Content Lengh will remain the same
|
||||
req := httptest.NewRequest(fiber.MethodGet, "/css/style.css", nil)
|
||||
req.Header.Set("Accept-Encoding", algo)
|
||||
resp, err := app.Test(req, 10*time.Second)
|
||||
|
||||
require.NoError(t, err, "app.Test(req)")
|
||||
require.Equal(t, 200, resp.StatusCode, "Status code")
|
||||
require.Equal(t, "", resp.Header.Get(fiber.HeaderContentEncoding))
|
||||
require.Equal(t, "46", resp.Header.Get(fiber.HeaderContentLength))
|
||||
|
||||
// request compressable file, ContentLenght will change
|
||||
req = httptest.NewRequest(fiber.MethodGet, "/index.html", nil)
|
||||
req.Header.Set("Accept-Encoding", algo)
|
||||
resp, err = app.Test(req, 10*time.Second)
|
||||
|
||||
require.NoError(t, err, "app.Test(req)")
|
||||
require.Equal(t, 200, resp.StatusCode, "Status code")
|
||||
require.Equal(t, algo, resp.Header.Get(fiber.HeaderContentEncoding))
|
||||
require.Greater(t, "299", resp.Header.Get(fiber.HeaderContentLength))
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func Test_Static_Compress_WithoutEncoding(t *testing.T) {
|
||||
t.Parallel()
|
||||
dir := "../../.github/testdata/fs"
|
||||
app := fiber.New()
|
||||
app.Get("/*", New(dir, Config{
|
||||
Compress: true,
|
||||
CacheDuration: 1 * time.Second,
|
||||
}))
|
||||
|
||||
// request compressable file without encoding
|
||||
req := httptest.NewRequest(fiber.MethodGet, "/index.html", nil)
|
||||
resp, err := app.Test(req, 10*time.Second)
|
||||
|
||||
require.NoError(t, err, "app.Test(req)")
|
||||
require.Equal(t, 200, resp.StatusCode, "Status code")
|
||||
require.Equal(t, "", resp.Header.Get(fiber.HeaderContentEncoding))
|
||||
require.Equal(t, "299", resp.Header.Get(fiber.HeaderContentLength))
|
||||
|
||||
// request compressable file with different encodings
|
||||
algorithms := []string{"zstd", "gzip", "br"}
|
||||
fileSuffixes := map[string]string{
|
||||
"gzip": ".fiber.gz",
|
||||
"br": ".fiber.br",
|
||||
"zstd": ".fiber.zst",
|
||||
}
|
||||
|
||||
for _, algo := range algorithms {
|
||||
// Wait for cache to expire
|
||||
time.Sleep(2 * time.Second)
|
||||
fileName := "index.html"
|
||||
compressedFileName := dir + "/index.html" + fileSuffixes[algo]
|
||||
|
||||
req = httptest.NewRequest(fiber.MethodGet, "/"+fileName, nil)
|
||||
req.Header.Set("Accept-Encoding", algo)
|
||||
resp, err = app.Test(req, 10*time.Second)
|
||||
|
||||
require.NoError(t, err, "app.Test(req)")
|
||||
require.Equal(t, 200, resp.StatusCode, "Status code")
|
||||
require.Equal(t, algo, resp.Header.Get(fiber.HeaderContentEncoding))
|
||||
require.Greater(t, "299", resp.Header.Get(fiber.HeaderContentLength))
|
||||
|
||||
// verify suffixed file was created
|
||||
_, err := os.Stat(compressedFileName)
|
||||
require.NoError(t, err, "File should exist")
|
||||
}
|
||||
}
|
||||
|
||||
func Test_Static_Compress_WithFileSuffixes(t *testing.T) {
|
||||
t.Parallel()
|
||||
dir := "../../.github/testdata/fs"
|
||||
fileSuffixes := map[string]string{
|
||||
"gzip": ".test.gz",
|
||||
"br": ".test.br",
|
||||
"zstd": ".test.zst",
|
||||
}
|
||||
|
||||
app := fiber.New(fiber.Config{
|
||||
CompressedFileSuffixes: fileSuffixes,
|
||||
})
|
||||
app.Get("/*", New(dir, Config{
|
||||
Compress: true,
|
||||
CacheDuration: 1 * time.Second,
|
||||
}))
|
||||
|
||||
// request compressable file with different encodings
|
||||
algorithms := []string{"zstd", "gzip", "br"}
|
||||
|
||||
for _, algo := range algorithms {
|
||||
// Wait for cache to expire
|
||||
time.Sleep(2 * time.Second)
|
||||
fileName := "index.html"
|
||||
compressedFileName := dir + "/index.html" + fileSuffixes[algo]
|
||||
|
||||
req := httptest.NewRequest(fiber.MethodGet, "/"+fileName, nil)
|
||||
req.Header.Set("Accept-Encoding", algo)
|
||||
resp, err := app.Test(req, 10*time.Second)
|
||||
|
||||
require.NoError(t, err, "app.Test(req)")
|
||||
require.Equal(t, 200, resp.StatusCode, "Status code")
|
||||
require.Equal(t, algo, resp.Header.Get(fiber.HeaderContentEncoding))
|
||||
require.Greater(t, "299", resp.Header.Get(fiber.HeaderContentLength))
|
||||
|
||||
// verify suffixed file was created
|
||||
_, err = os.Stat(compressedFileName)
|
||||
require.NoError(t, err, "File should exist")
|
||||
}
|
||||
}
|
||||
|
Loading…
x
Reference in New Issue
Block a user