Add gzip compression

This commit is contained in:
video-prize-ranch 2024-03-27 17:01:45 -04:00
parent dd6d8bd60f
commit 6f7e694fd4
No known key found for this signature in database
2 changed files with 41 additions and 28 deletions

View File

@ -2,6 +2,7 @@ package gitea
import (
"bytes"
"compress/gzip"
"errors"
"fmt"
"io"
@ -9,7 +10,6 @@ import (
"net/http"
"net/url"
"path"
"strconv"
"strings"
"time"
@ -101,7 +101,7 @@ func (client *Client) ContentWebLink(targetOwner, targetRepo, branch, resource s
}
func (client *Client) GiteaRawContent(targetOwner, targetRepo, ref, resource string) ([]byte, error) {
reader, _, _, err := client.ServeRawContent(targetOwner, targetRepo, ref, resource)
reader, _, _, err := client.ServeRawContent(targetOwner, targetRepo, ref, resource, false)
if err != nil {
return nil, err
}
@ -109,7 +109,7 @@ func (client *Client) GiteaRawContent(targetOwner, targetRepo, ref, resource str
return io.ReadAll(reader)
}
func (client *Client) ServeRawContent(targetOwner, targetRepo, ref, resource string) (io.ReadCloser, http.Header, int, error) {
func (client *Client) ServeRawContent(targetOwner, targetRepo, ref, resource string, acceptsGzip bool) (io.ReadCloser, http.Header, int, error) {
cacheKey := fmt.Sprintf("%s/%s/%s|%s|%s", rawContentCacheKeyPrefix, targetOwner, targetRepo, ref, resource)
log := log.With().Str("cache_key", cacheKey).Logger()
log.Trace().Msg("try file in cache")
@ -123,7 +123,7 @@ func (client *Client) ServeRawContent(targetOwner, targetRepo, ref, resource str
if cache.IsSymlink {
linkDest := string(cache.Body)
log.Debug().Msgf("[cache] follow symlink from %q to %q", resource, linkDest)
return client.ServeRawContent(targetOwner, targetRepo, ref, linkDest)
return client.ServeRawContent(targetOwner, targetRepo, ref, linkDest, acceptsGzip)
} else if !cache.IsEmpty() {
log.Debug().Msgf("[cache] return %d bytes", len(cache.Body))
return io.NopCloser(bytes.NewReader(cache.Body)), cachedHeader, cachedStatusCode, nil
@ -168,7 +168,7 @@ func (client *Client) ServeRawContent(targetOwner, targetRepo, ref, resource str
}
log.Debug().Msgf("follow symlink from %q to %q", resource, linkDest)
return client.ServeRawContent(targetOwner, targetRepo, ref, linkDest)
return client.ServeRawContent(targetOwner, targetRepo, ref, linkDest, acceptsGzip)
}
}
@ -176,7 +176,22 @@ func (client *Client) ServeRawContent(targetOwner, targetRepo, ref, resource str
mimeType := client.getMimeTypeByExtension(resource)
resp.Response.Header.Set(ContentTypeHeader, mimeType)
if !shouldRespBeSavedToCache(resp.Response) {
// Compress file response
buf := new(bytes.Buffer)
gw := gzip.NewWriter(buf)
_, err := io.Copy(gw, reader)
gw.Close()
if err != nil {
log.Error().Err(err).Msg("error on response compression")
}
// Check if size of gzipped response is greater than fileCacheSizeLimit and return gzipped
// response but uncached or return non-gzip response if not supported
len := int64(buf.Len());
shouldRespBeSavedToCache := len < fileCacheSizeLimit
if !shouldRespBeSavedToCache {
return io.NopCloser(buf), resp.Response.Header, resp.StatusCode, err
} else if !shouldRespBeSavedToCache && !acceptsGzip {
return reader, resp.Response.Header, resp.StatusCode, err
}
@ -186,7 +201,7 @@ func (client *Client) ServeRawContent(targetOwner, targetRepo, ref, resource str
ETag: resp.Header.Get(ETagHeader),
MimeType: mimeType,
}
return fileResp.CreateCacheReader(reader, client.responseCache, cacheKey), resp.Response.Header, resp.StatusCode, nil
return fileResp.CreateCacheReader(io.NopCloser(buf), client.responseCache, cacheKey), resp.Response.Header, resp.StatusCode, nil
case http.StatusNotFound:
if err := client.responseCache.Set(cacheKey, FileResponse{
@ -275,22 +290,3 @@ func (client *Client) getMimeTypeByExtension(resource string) string {
log.Trace().Msgf("probe mime of %q is %q", resource, mimeType)
return mimeType
}
func shouldRespBeSavedToCache(resp *http.Response) bool {
if resp == nil {
return false
}
contentLengthRaw := resp.Header.Get(ContentLengthHeader)
if contentLengthRaw == "" {
return false
}
contentLength, err := strconv.ParseInt(contentLengthRaw, 10, 64)
if err != nil {
log.Error().Err(err).Msg("could not parse content length")
}
// if content to big or could not be determined we not cache it
return contentLength > 0 && contentLength < fileCacheSizeLimit
}

View File

@ -1,6 +1,7 @@
package upstream
import (
"compress/gzip"
"errors"
"fmt"
"io"
@ -97,7 +98,10 @@ func (o *Options) Upstream(ctx *context.Context, giteaClient *gitea.Client, redi
log.Debug().Msg("Preparing")
reader, header, statusCode, err := giteaClient.ServeRawContent(o.TargetOwner, o.TargetRepo, o.TargetBranch, o.TargetPath)
// Check if gzip is supported
acceptsGzip := strings.Contains(ctx.Req.Header.Get("Accept-Encoding"), "gzip")
reader, header, statusCode, err := giteaClient.ServeRawContent(o.TargetOwner, o.TargetRepo, o.TargetBranch, o.TargetPath, acceptsGzip)
if reader != nil {
defer reader.Close()
}
@ -197,6 +201,19 @@ func (o *Options) Upstream(ctx *context.Context, giteaClient *gitea.Client, redi
return true
}
// Decompress response if gzip is not supported
r := reader
if !acceptsGzip {
r, err = gzip.NewReader(reader)
if err != nil {
log.Error().Err(err).Msgf("Couldn't decompress for %q", o.TargetPath)
html.ReturnErrorPage(ctx, "", http.StatusInternalServerError)
return true
}
} else {
ctx.RespWriter.Header().Set("Content-Encoding", "gzip")
}
// Set ETag & MIME
o.setHeader(ctx, header)
@ -206,7 +223,7 @@ func (o *Options) Upstream(ctx *context.Context, giteaClient *gitea.Client, redi
// Write the response body to the original request
if reader != nil {
_, err := io.Copy(ctx.RespWriter, reader)
_, err := io.Copy(ctx.RespWriter, r)
if err != nil {
log.Error().Err(err).Msgf("Couldn't write body for %q", o.TargetPath)
html.ReturnErrorPage(ctx, "", http.StatusInternalServerError)