Browse Source

Intoduced imagestreamer, headerwriter

Viktor Sokolov 2 months ago
parent
commit
f69c328425

+ 53 - 0
handlererr/err.go

@@ -0,0 +1,53 @@
+// handlererr package exposes helper functions for error handling in request handlers
+// (like streaming or processing).
+package handlererr
+
+import (
+	"context"
+	"net/http"
+
+	"github.com/imgproxy/imgproxy/v3/ierrors"
+	"github.com/imgproxy/imgproxy/v3/metrics"
+)
+
+// Error types for categorizing errors in the error collector
+const (
+	ErrTypeTimeout         = "timeout"
+	ErrTypeStreaming       = "streaming"
+	ErrTypeDownload        = "download"
+	ErrTypeSvgProcessing   = "svg_processing"
+	ErrTypeProcessing      = "processing"
+	ErrTypePathParsing     = "path_parsing"
+	ErrTypeSecurity        = "security"
+	ErrTypeQueue           = "queue"
+	ErrTypeIO              = "IO"
+	ErrTypeDownloadTimeout = "download"
+)
+
+// Send sends an error to the error collector if the error is not a 499 (client closed request)
+func Send(ctx context.Context, errType string, err error) {
+	if ierr, ok := err.(*ierrors.Error); ok {
+		switch ierr.StatusCode() {
+		case http.StatusServiceUnavailable:
+			errType = ErrTypeTimeout
+		case 499:
+			return // no need to report request closed by the client
+		}
+	}
+
+	metrics.SendError(ctx, errType, err)
+}
+
+// SendAndPanic sends an error to the error collector and panics with that error.
+func SendAndPanic(ctx context.Context, errType string, err error) {
+	Send(ctx, errType, err)
+	panic(err)
+}
+
+// Check checks if the error is not nil and sends it to the error collector, panicking if it is not nil.
+func Check(ctx context.Context, errType string, err error) {
+	if err == nil {
+		return
+	}
+	SendAndPanic(ctx, errType, err)
+}

+ 44 - 0
headerwriter/config.go

@@ -0,0 +1,44 @@
+package headerwriter
+
+import (
+	"github.com/imgproxy/imgproxy/v3/config"
+)
+
+// Config is the package-local configuration
+type Config struct {
+	// SetCanonicalHeader indicates whether to set the canonical header
+	SetCanonicalHeader bool
+
+	// TTL is the default Cache-Control max-age= value for cached images
+	DefaultTTL int
+
+	// CacheControlPassthrough indicates whether to passthrough the Cache-Control header
+	// from the original response
+	CacheControlPassthrough bool
+
+	// LastModifiedEnabled indicates whether to set the Last-Modified header
+	LastModifiedEnabled bool
+
+	// EnableClientHints indicates whether to enable Client Hints in Vary header
+	EnableClientHints bool
+
+	// SetVaryAccept indicates that the Vary header should include Accept
+	SetVaryAccept bool
+}
+
+// NewConfigFromEnv creates a new Config instance from the current configuration
+func NewConfigFromEnv() *Config {
+	return &Config{
+		SetCanonicalHeader:      config.SetCanonicalHeader,
+		DefaultTTL:              config.TTL,
+		LastModifiedEnabled:     config.LastModifiedEnabled,
+		CacheControlPassthrough: config.CacheControlPassthrough,
+		EnableClientHints:       config.EnableClientHints,
+		SetVaryAccept: config.AutoWebp ||
+			config.EnforceWebp ||
+			config.AutoAvif ||
+			config.EnforceAvif ||
+			config.AutoJxl ||
+			config.EnforceJxl,
+	}
+}

+ 19 - 0
headerwriter/factory.go

@@ -0,0 +1,19 @@
+package headerwriter
+
+import "net/http"
+
+// Factory is a struct that provides methods to create HeaderBuilder instances
+type Factory struct {
+	config *Config
+}
+
+// NewFactory creates a new factory instance with the provided configuration
+func NewFactory(config *Config) *Factory {
+	return &Factory{config: config}
+}
+
+// NewHeaderBuilder creates a new HeaderBuilder instance with the provided request headers
+// NOTE: should URL be string here?
+func (f *Factory) NewHeaderWriter(originalResponseHeaders http.Header, url string) *Writer {
+	return newWriter(f.config, originalResponseHeaders, url)
+}

+ 240 - 0
headerwriter/writer.go

@@ -0,0 +1,240 @@
+// headerwriter writes response HTTP headers
+package headerwriter
+
+import (
+	"fmt"
+	"net/http"
+	"strconv"
+	"strings"
+	"time"
+
+	"go.withmatt.com/httpheaders"
+)
+
+const (
+	// Content-Disposition header format
+	contentDispositionFmt = "%s; filename=\"%s%s\""
+)
+
+// Writer is a struct that builds HTTP response headers.
+type Writer struct {
+	config                  *Config
+	originalResponseHeaders http.Header // Original response headers
+	res                     http.Header // Headers to be written to the response
+	maxAge                  int         // Current max age for Cache-Control header
+	url                     string      // URL of the request, used for canonical header
+}
+
+// newWriter creates a new HeaderBuilder instance with the provided origin headers and URL
+func newWriter(config *Config, originalResponseHeaders http.Header, url string) *Writer {
+	return &Writer{
+		config:                  config,
+		originalResponseHeaders: originalResponseHeaders,
+		url:                     url,
+		res:                     make(http.Header),
+		maxAge:                  -1,
+	}
+}
+
+// SetMaxAge sets the max-age for the Cache-Control header.
+// Overrides any existing max-age value.
+func (w *Writer) SetMaxAge(maxAge int) {
+	if maxAge > 0 {
+		w.maxAge = maxAge
+	}
+}
+
+// WriteIsFallbackImage sets the Fallback-Image header to
+// indicate that the fallback image was used.
+func (w *Writer) WriteIsFallbackImage() {
+	w.res.Set("Fallback-Image", "1")
+}
+
+// SetMaxAgeTime sets the max-age for the Cache-Control header based
+// on the time provided. If time provided is in the past compared
+// to the current maxAge value, it will correct maxAge.
+func (w *Writer) SetMaxAgeFromExpires(expires *time.Time) {
+	if expires == nil {
+		return
+	}
+
+	// Convert current maxAge to time
+	currentMaxAgeTime := time.Now().Add(time.Duration(w.maxAge) * time.Second)
+
+	// If the expires time is in the past compared to the current maxAge time,
+	// or if maxAge is not set, we will use the expires time to set the maxAge.
+	if w.maxAge < 0 || expires.Before(currentMaxAgeTime) {
+		// Get the TTL from the expires time (must not be in the past)
+		expiresTTL := max(0, int(time.Until(*expires).Seconds()))
+
+		if expiresTTL > 0 {
+			w.maxAge = expiresTTL
+		}
+	}
+}
+
+// WriteLastModified sets the Last-Modified header from request
+func (w *Writer) WriteLastModified() {
+	if !w.config.LastModifiedEnabled {
+		return
+	}
+
+	val := w.originalResponseHeaders.Get(httpheaders.LastModified)
+	if val == "" {
+		return
+	}
+
+	w.res.Set(httpheaders.LastModified, val)
+}
+
+// WriteVary sets the Vary header
+func (w *Writer) WriteVary() {
+	vary := make([]string, 0)
+
+	if w.config.SetVaryAccept {
+		vary = append(vary, "Accept")
+	}
+
+	if w.config.EnableClientHints {
+		vary = append(vary, "Sec-CH-DPR", "DPR", "Sec-CH-Width", "Width")
+	}
+
+	varyValue := strings.Join(vary, ", ")
+
+	if varyValue != "" {
+		w.res.Set(httpheaders.Vary, varyValue)
+	}
+}
+
+// Copy copies specified headers from the original response headers to the response headers.
+func (w *Writer) Copy(only []string) {
+	for _, key := range only {
+		values := w.originalResponseHeaders.Values(key)
+
+		for _, value := range values {
+			w.res.Add(key, value)
+		}
+	}
+}
+
+// CopyFrom copies specified headers from the headers object. Please note that
+// all the past operations may overwrite those values.
+func (w *Writer) CopyFrom(headers http.Header, only []string) {
+	for _, key := range only {
+		values := headers.Values(key)
+
+		for _, value := range values {
+			w.res.Add(key, value)
+		}
+	}
+}
+
+// WriteContentLength sets the Content-Length header
+func (w *Writer) WriteContentLength(contentLength int) {
+	if contentLength > 0 {
+		w.res.Set(httpheaders.ContentLength, strconv.Itoa(contentLength))
+	}
+}
+
+// WriteContentDisposition sets the Content-Disposition header
+func (w *Writer) WriteContentDisposition(filename, ext string, returnAttachment bool) {
+	disposition := "inline"
+
+	if returnAttachment {
+		disposition = "attachment"
+	}
+
+	value := fmt.Sprintf(contentDispositionFmt, disposition, strings.ReplaceAll(filename, `"`, "%22"), ext)
+
+	w.res.Set(httpheaders.ContentDisposition, value)
+}
+
+func (w *Writer) WriteContentType(mime string) {
+	w.res.Set(httpheaders.ContentType, mime)
+}
+
+// writeCanonical sets the Link header with the canonical URL.
+// It is mandatory for any response if enabled in the configuration.
+func (b *Writer) WriteCanonical() {
+	if !b.config.SetCanonicalHeader {
+		return
+	}
+
+	if strings.HasPrefix(b.url, "https://") || strings.HasPrefix(b.url, "http://") {
+		value := fmt.Sprintf(`<%s>; rel="canonical"`, b.url)
+		b.res.Set(httpheaders.Link, value)
+	}
+}
+
+// writeCacheControlNoCache sets the Cache-Control header to no-cache (default).
+func (w *Writer) writeCacheControlNoCache() {
+	w.res.Set(httpheaders.CacheControl, "no-cache")
+}
+
+// writeCacheControlMaxAge sets the Cache-Control header with max-age.
+func (w *Writer) writeCacheControlMaxAge() {
+	maxAge := w.maxAge
+
+	if maxAge <= 0 {
+		maxAge = w.config.DefaultTTL
+	}
+
+	if maxAge > 0 {
+		w.res.Set(httpheaders.CacheControl, fmt.Sprintf("max-age=%d, public", maxAge))
+	}
+}
+
+// writeCacheControlPassthrough sets the Cache-Control header from the request
+// if passthrough is enabled in the configuration.
+func (w *Writer) writeCacheControlPassthrough() bool {
+	if !w.config.CacheControlPassthrough || w.maxAge > 0 {
+		return false
+	}
+
+	if val := w.originalResponseHeaders.Get(httpheaders.CacheControl); val != "" {
+		w.res.Set(httpheaders.CacheControl, val)
+		return true
+	}
+
+	if val := w.originalResponseHeaders.Get(httpheaders.Expires); val != "" {
+		if t, err := time.Parse(http.TimeFormat, val); err == nil {
+			w.maxAge = max(0, int(time.Until(t).Seconds()))
+		}
+	}
+
+	return false
+}
+
+// writeCSP sets the Content-Security-Policy header to prevent script execution.
+func (w *Writer) writeCSP() {
+	w.res.Set("Content-Security-Policy", "script-src 'none'")
+}
+
+// Write writes the headers to the response writer
+func (w *Writer) Write(rw http.ResponseWriter) {
+	w.writeCacheControlNoCache()
+
+	if !w.writeCacheControlPassthrough() {
+		w.writeCacheControlMaxAge()
+	}
+
+	w.writeCSP()
+
+	for key, values := range w.res {
+		for _, value := range values {
+			rw.Header().Add(key, value)
+		}
+	}
+}
+
+// NOTE: WIP
+
+// func (w *HeaderBuilder) SetDebugHeaders() {
+// 	if config.EnableDebugHeaders {
+// 		rw.Header().Set("X-Origin-Content-Length", strconv.Itoa(len(originData.Data)))
+// 		rw.Header().Set("X-Origin-Width", resultData.Headers["X-Origin-Width"])
+// 		rw.Header().Set("X-Origin-Height", resultData.Headers["X-Origin-Height"])
+// 		rw.Header().Set("X-Result-Width", resultData.Headers["X-Result-Width"])
+// 		rw.Header().Set("X-Result-Height", resultData.Headers["X-Result-Height"])
+// 	}
+// }

+ 361 - 0
headerwriter/writer_test.go

@@ -0,0 +1,361 @@
+package headerwriter
+
+import (
+	"fmt"
+	"math"
+	"net/http"
+	"net/http/httptest"
+	"strconv"
+	"testing"
+	"time"
+
+	"github.com/stretchr/testify/suite"
+	"go.withmatt.com/httpheaders"
+)
+
+type HeaderWriterSuite struct {
+	suite.Suite
+}
+
+type writerTestCase struct {
+	name   string
+	url    string
+	req    http.Header
+	res    http.Header
+	config Config
+	fn     func(*Writer)
+}
+
+func (s *HeaderWriterSuite) TestHeaderCases() {
+	expires := time.Date(2030, 8, 1, 0, 0, 0, 0, time.UTC)
+	expiresSeconds := strconv.Itoa(int(time.Until(expires).Seconds()))
+
+	tt := []writerTestCase{
+		{
+			name: "MinimalHeaders",
+			req:  http.Header{},
+			res: http.Header{
+				httpheaders.CacheControl:          []string{"no-cache"},
+				httpheaders.ContentSecurityPolicy: []string{"script-src 'none'"},
+			},
+			config: Config{
+				SetCanonicalHeader:      false,
+				DefaultTTL:              0,
+				CacheControlPassthrough: false,
+				LastModifiedEnabled:     false,
+				EnableClientHints:       false,
+				SetVaryAccept:           false,
+			},
+		},
+		{
+			name: "PassthroughCacheControl",
+			req: http.Header{
+				httpheaders.CacheControl: []string{"no-cache, no-store, must-revalidate"},
+			},
+			res: http.Header{
+				httpheaders.CacheControl:          []string{"no-cache, no-store, must-revalidate"},
+				httpheaders.ContentSecurityPolicy: []string{"script-src 'none'"},
+			},
+			config: Config{
+				CacheControlPassthrough: true,
+				DefaultTTL:              3600,
+			},
+		},
+		{
+			name: "PassthroughCacheControlExpires",
+			req: http.Header{
+				httpheaders.Expires: []string{expires.Format(http.TimeFormat)},
+			},
+			res: http.Header{
+				httpheaders.CacheControl:          []string{fmt.Sprintf("max-age=%s, public", expiresSeconds)},
+				httpheaders.ContentSecurityPolicy: []string{"script-src 'none'"},
+			},
+			config: Config{
+				CacheControlPassthrough: true,
+				DefaultTTL:              3600,
+			},
+		},
+		{
+			name: "Canonical_ValidURL",
+			req:  http.Header{},
+			url:  "https://example.com/image.jpg",
+			res: http.Header{
+				httpheaders.Link:                  []string{"<https://example.com/image.jpg>; rel=\"canonical\""},
+				httpheaders.CacheControl:          []string{"max-age=3600, public"},
+				httpheaders.ContentSecurityPolicy: []string{"script-src 'none'"},
+			},
+			config: Config{
+				SetCanonicalHeader: true,
+				DefaultTTL:         3600,
+			},
+			fn: func(w *Writer) {
+				w.WriteCanonical()
+			},
+		},
+		{
+			name: "Canonical_InvalidURL",
+			url:  "ftp://example.com/image.jpg",
+			req:  http.Header{},
+			res: http.Header{
+				httpheaders.CacheControl:          []string{"max-age=3600, public"},
+				httpheaders.ContentSecurityPolicy: []string{"script-src 'none'"},
+			},
+			config: Config{
+				SetCanonicalHeader: true,
+				DefaultTTL:         3600,
+			},
+		},
+		{
+			name: "WriteCanonical_Disabled",
+			req:  http.Header{},
+			url:  "https://example.com/image.jpg",
+			res: http.Header{
+				httpheaders.CacheControl:          []string{"max-age=3600, public"},
+				httpheaders.ContentSecurityPolicy: []string{"script-src 'none'"},
+			},
+			config: Config{
+				SetCanonicalHeader: false,
+				DefaultTTL:         3600,
+			},
+			fn: func(w *Writer) {
+				w.WriteCanonical()
+			},
+		},
+		{
+			name: "LastModified",
+			req: http.Header{
+				httpheaders.LastModified: []string{expires.Format(http.TimeFormat)},
+			},
+			res: http.Header{
+				httpheaders.LastModified:          []string{expires.Format(http.TimeFormat)},
+				httpheaders.ContentSecurityPolicy: []string{"script-src 'none'"},
+				httpheaders.CacheControl:          []string{"max-age=3600, public"},
+			},
+			config: Config{
+				LastModifiedEnabled: true,
+				DefaultTTL:          3600,
+			},
+			fn: func(w *Writer) {
+				w.WriteLastModified()
+			},
+		},
+		{
+			name: "SetMaxAgeExplicit",
+			req:  http.Header{},
+			res: http.Header{
+				httpheaders.CacheControl:          []string{"max-age=1, public"},
+				httpheaders.ContentSecurityPolicy: []string{"script-src 'none'"},
+			},
+			config: Config{
+				DefaultTTL: 3600,
+			},
+			fn: func(w *Writer) {
+				w.SetMaxAge(1)
+			},
+		},
+		{
+			name: "SetMaxAgeFromTime",
+			req:  http.Header{},
+			res: http.Header{
+				httpheaders.CacheControl:          []string{fmt.Sprintf("max-age=%s, public", expiresSeconds)},
+				httpheaders.ContentSecurityPolicy: []string{"script-src 'none'"},
+			},
+			config: Config{
+				DefaultTTL: math.MaxInt32,
+			},
+			fn: func(w *Writer) {
+				w.SetMaxAgeFromExpires(&expires)
+			},
+		},
+		{
+			name: "SetVaryHeader",
+			req:  http.Header{},
+			res: http.Header{
+				httpheaders.Vary:                  []string{"Accept, Sec-CH-DPR, DPR, Sec-CH-Width, Width"},
+				httpheaders.CacheControl:          []string{"no-cache"},
+				httpheaders.ContentSecurityPolicy: []string{"script-src 'none'"},
+			},
+			config: Config{
+				EnableClientHints: true,
+				SetVaryAccept:     true,
+			},
+			fn: func(w *Writer) {
+				w.WriteVary()
+			},
+		},
+		{
+			name: "CopyHeaders",
+			req: http.Header{
+				"X-Test": []string{"foo", "bar"},
+			},
+			res: http.Header{
+				"X-Test":                          []string{"foo", "bar"},
+				httpheaders.CacheControl:          []string{"no-cache"},
+				httpheaders.ContentSecurityPolicy: []string{"script-src 'none'"},
+			},
+			config: Config{},
+			fn: func(w *Writer) {
+				w.Copy([]string{"X-Test"})
+			},
+		},
+		{
+			name: "CopyFromHeaders",
+			req:  http.Header{},
+			res: http.Header{
+				"X-From":                          []string{"baz"},
+				httpheaders.CacheControl:          []string{"no-cache"},
+				httpheaders.ContentSecurityPolicy: []string{"script-src 'none'"},
+			},
+			config: Config{},
+			fn: func(w *Writer) {
+				h := http.Header{}
+				h.Set("X-From", "baz")
+				w.CopyFrom(h, []string{"X-From"})
+			},
+		},
+		{
+			name: "WriteContentLength",
+			req:  http.Header{},
+			res: http.Header{
+				httpheaders.ContentLength:         []string{"123"},
+				httpheaders.CacheControl:          []string{"no-cache"},
+				httpheaders.ContentSecurityPolicy: []string{"script-src 'none'"},
+			},
+			config: Config{},
+			fn: func(w *Writer) {
+				w.WriteContentLength(123)
+			},
+		},
+		{
+			name: "WriteContentDispositionInline",
+			req:  http.Header{},
+			res: http.Header{
+				httpheaders.ContentDisposition:    []string{"inline; filename=\"file.txt\""},
+				httpheaders.CacheControl:          []string{"no-cache"},
+				httpheaders.ContentSecurityPolicy: []string{"script-src 'none'"},
+			},
+			config: Config{},
+			fn: func(w *Writer) {
+				w.WriteContentDisposition("file", ".txt", false)
+			},
+		},
+		{
+			name: "WriteContentDispositionAttachment",
+			req:  http.Header{},
+			res: http.Header{
+				httpheaders.ContentDisposition:    []string{"attachment; filename=\"file.txt\""},
+				httpheaders.CacheControl:          []string{"no-cache"},
+				httpheaders.ContentSecurityPolicy: []string{"script-src 'none'"},
+			},
+			config: Config{},
+			fn: func(w *Writer) {
+				w.WriteContentDisposition("file", ".txt", true)
+			},
+		},
+		{
+			name: "WriteContentType",
+			req:  http.Header{},
+			res: http.Header{
+				httpheaders.ContentType:           []string{"image/png"},
+				httpheaders.CacheControl:          []string{"no-cache"},
+				httpheaders.ContentSecurityPolicy: []string{"script-src 'none'"},
+			},
+			config: Config{},
+			fn: func(w *Writer) {
+				w.WriteContentType("image/png")
+			},
+		},
+		{
+			name: "WriteIsFallbackImage",
+			req:  http.Header{},
+			res: http.Header{
+				"Fallback-Image":                  []string{"1"},
+				httpheaders.CacheControl:          []string{"no-cache"},
+				httpheaders.ContentSecurityPolicy: []string{"script-src 'none'"},
+			},
+			config: Config{},
+			fn: func(w *Writer) {
+				w.WriteIsFallbackImage()
+			},
+		},
+		{
+			name: "SetMaxAgeZeroOrNegative",
+			req:  http.Header{},
+			res: http.Header{
+				httpheaders.CacheControl:          []string{"max-age=3600, public"},
+				httpheaders.ContentSecurityPolicy: []string{"script-src 'none'"},
+			},
+			config: Config{
+				DefaultTTL: 3600,
+			},
+			fn: func(w *Writer) {
+				w.SetMaxAge(0)
+				w.SetMaxAge(-10)
+			},
+		},
+		{
+			name: "SetMaxAgeFromExpiresNil",
+			req:  http.Header{},
+			res: http.Header{
+				httpheaders.CacheControl:          []string{"max-age=3600, public"},
+				httpheaders.ContentSecurityPolicy: []string{"script-src 'none'"},
+			},
+			config: Config{
+				DefaultTTL: 3600,
+			},
+			fn: func(w *Writer) {
+				w.SetMaxAgeFromExpires(nil)
+			},
+		},
+		{
+			name: "WriteVaryAcceptOnly",
+			req:  http.Header{},
+			res: http.Header{
+				httpheaders.Vary:                  []string{"Accept"},
+				httpheaders.CacheControl:          []string{"no-cache"},
+				httpheaders.ContentSecurityPolicy: []string{"script-src 'none'"},
+			},
+			config: Config{
+				SetVaryAccept: true,
+			},
+			fn: func(w *Writer) {
+				w.WriteVary()
+			},
+		},
+		{
+			name: "WriteVaryClientHintsOnly",
+			req:  http.Header{},
+			res: http.Header{
+				httpheaders.Vary:                  []string{"Sec-CH-DPR, DPR, Sec-CH-Width, Width"},
+				httpheaders.CacheControl:          []string{"no-cache"},
+				httpheaders.ContentSecurityPolicy: []string{"script-src 'none'"},
+			},
+			config: Config{
+				EnableClientHints: true,
+			},
+			fn: func(w *Writer) {
+				w.WriteVary()
+			},
+		},
+	}
+
+	for _, tc := range tt {
+		s.Run(tc.name, func() {
+			f := NewFactory(&tc.config)
+			writer := f.NewHeaderWriter(tc.req, tc.url)
+
+			if tc.fn != nil {
+				tc.fn(writer)
+			}
+
+			r := httptest.NewRecorder()
+			writer.Write(r)
+
+			s.Require().Equal(tc.res, r.Header())
+		})
+	}
+}
+
+func TestHeaderWriter(t *testing.T) {
+	suite.Run(t, new(HeaderWriterSuite))
+}

+ 7 - 8
imagedata/download.go

@@ -5,7 +5,6 @@ import (
 	"net/http"
 	"slices"
 
-	"github.com/imgproxy/imgproxy/v3/config"
 	"github.com/imgproxy/imgproxy/v3/ierrors"
 	"github.com/imgproxy/imgproxy/v3/imagefetcher"
 	"github.com/imgproxy/imgproxy/v3/security"
@@ -43,7 +42,7 @@ func initDownloading() error {
 		return err
 	}
 
-	Fetcher, err = imagefetcher.NewFetcher(ts, config.MaxRedirects)
+	Fetcher, err = imagefetcher.NewFetcher(ts, imagefetcher.NewConfigFromEnv())
 	if err != nil {
 		return ierrors.Wrap(err, 0, ierrors.WithPrefix("can't create image fetcher"))
 	}
@@ -51,7 +50,7 @@ func initDownloading() error {
 	return nil
 }
 
-func download(ctx context.Context, imageURL string, opts DownloadOptions, secopts security.Options) (*ImageData, error) {
+func download(ctx context.Context, imageURL string, opts DownloadOptions, secopts security.Options) (*ImageData, http.Header, error) {
 	// We use this for testing
 	if len(redirectAllRequestsTo) > 0 {
 		imageURL = redirectAllRequestsTo
@@ -59,7 +58,7 @@ func download(ctx context.Context, imageURL string, opts DownloadOptions, secopt
 
 	req, err := Fetcher.BuildRequest(ctx, imageURL, opts.Header, opts.CookieJar)
 	if err != nil {
-		return nil, err
+		return nil, nil, err
 	}
 	defer req.Cancel()
 
@@ -68,7 +67,7 @@ func download(ctx context.Context, imageURL string, opts DownloadOptions, secopt
 		if res != nil {
 			res.Body.Close()
 		}
-		return nil, err
+		return nil, nil, err
 	}
 
 	res, err = security.LimitResponseSize(res, secopts)
@@ -76,12 +75,12 @@ func download(ctx context.Context, imageURL string, opts DownloadOptions, secopt
 		defer res.Body.Close()
 	}
 	if err != nil {
-		return nil, err
+		return nil, nil, err
 	}
 
 	imgdata, err := readAndCheckImage(res.Body, int(res.ContentLength), secopts)
 	if err != nil {
-		return nil, ierrors.Wrap(err, 0)
+		return nil, nil, ierrors.Wrap(err, 0)
 	}
 
 	h := make(map[string]string)
@@ -100,7 +99,7 @@ func download(ctx context.Context, imageURL string, opts DownloadOptions, secopt
 
 	imgdata.Headers = h
 
-	return imgdata, nil
+	return imgdata, res.Header, nil
 }
 
 func RedirectAllRequestsTo(u string) {

+ 7 - 6
imagedata/image_data.go

@@ -4,6 +4,7 @@ import (
 	"context"
 	"encoding/base64"
 	"fmt"
+	"net/http"
 	"os"
 	"strings"
 	"sync"
@@ -70,7 +71,7 @@ func loadWatermark() (err error) {
 	}
 
 	if len(config.WatermarkURL) > 0 {
-		Watermark, err = Download(context.Background(), config.WatermarkURL, "watermark", DownloadOptions{Header: nil, CookieJar: nil}, security.DefaultOptions())
+		Watermark, _, err = Download(context.Background(), config.WatermarkURL, "watermark", DownloadOptions{Header: nil, CookieJar: nil}, security.DefaultOptions())
 		return
 	}
 
@@ -84,7 +85,7 @@ func loadFallbackImage() (err error) {
 	case len(config.FallbackImagePath) > 0:
 		FallbackImage, err = FromFile(config.FallbackImagePath, "fallback image", security.DefaultOptions())
 	case len(config.FallbackImageURL) > 0:
-		FallbackImage, err = Download(context.Background(), config.FallbackImageURL, "fallback image", DownloadOptions{Header: nil, CookieJar: nil}, security.DefaultOptions())
+		FallbackImage, _, err = Download(context.Background(), config.FallbackImageURL, "fallback image", DownloadOptions{Header: nil, CookieJar: nil}, security.DefaultOptions())
 	default:
 		FallbackImage, err = nil, nil
 	}
@@ -130,14 +131,14 @@ func FromFile(path, desc string, secopts security.Options) (*ImageData, error) {
 	return imgdata, nil
 }
 
-func Download(ctx context.Context, imageURL, desc string, opts DownloadOptions, secopts security.Options) (*ImageData, error) {
-	imgdata, err := download(ctx, imageURL, opts, secopts)
+func Download(ctx context.Context, imageURL, desc string, opts DownloadOptions, secopts security.Options) (*ImageData, http.Header, error) {
+	imgdata, h, err := download(ctx, imageURL, opts, secopts)
 	if err != nil {
-		return nil, ierrors.Wrap(
+		return nil, nil, ierrors.Wrap(
 			err, 0,
 			ierrors.WithPrefix(fmt.Sprintf("Can't download %s", desc)),
 		)
 	}
 
-	return imgdata, nil
+	return imgdata, h, nil
 }

+ 11 - 11
imagedata/image_data_test.go

@@ -90,7 +90,7 @@ func (s *ImageDataTestSuite) SetupTest() {
 }
 
 func (s *ImageDataTestSuite) TestDownloadStatusOK() {
-	imgdata, err := Download(context.Background(), s.server.URL, "Test image", DownloadOptions{}, security.DefaultOptions())
+	imgdata, _, err := Download(context.Background(), s.server.URL, "Test image", DownloadOptions{}, security.DefaultOptions())
 
 	s.Require().NoError(err)
 	s.Require().NotNil(imgdata)
@@ -157,7 +157,7 @@ func (s *ImageDataTestSuite) TestDownloadStatusPartialContent() {
 		s.Run(tc.name, func() {
 			s.header.Set("Content-Range", tc.contentRange)
 
-			imgdata, err := Download(context.Background(), s.server.URL, "Test image", DownloadOptions{}, security.DefaultOptions())
+			imgdata, _, err := Download(context.Background(), s.server.URL, "Test image", DownloadOptions{}, security.DefaultOptions())
 
 			if tc.expectErr {
 				s.Require().Error(err)
@@ -177,7 +177,7 @@ func (s *ImageDataTestSuite) TestDownloadStatusNotFound() {
 	s.data = []byte("Not Found")
 	s.header.Set("Content-Type", "text/plain")
 
-	imgdata, err := Download(context.Background(), s.server.URL, "Test image", DownloadOptions{}, security.DefaultOptions())
+	imgdata, _, err := Download(context.Background(), s.server.URL, "Test image", DownloadOptions{}, security.DefaultOptions())
 
 	s.Require().Error(err)
 	s.Require().Equal(404, ierrors.Wrap(err, 0).StatusCode())
@@ -189,7 +189,7 @@ func (s *ImageDataTestSuite) TestDownloadStatusForbidden() {
 	s.data = []byte("Forbidden")
 	s.header.Set("Content-Type", "text/plain")
 
-	imgdata, err := Download(context.Background(), s.server.URL, "Test image", DownloadOptions{}, security.DefaultOptions())
+	imgdata, _, err := Download(context.Background(), s.server.URL, "Test image", DownloadOptions{}, security.DefaultOptions())
 
 	s.Require().Error(err)
 	s.Require().Equal(404, ierrors.Wrap(err, 0).StatusCode())
@@ -201,7 +201,7 @@ func (s *ImageDataTestSuite) TestDownloadStatusInternalServerError() {
 	s.data = []byte("Internal Server Error")
 	s.header.Set("Content-Type", "text/plain")
 
-	imgdata, err := Download(context.Background(), s.server.URL, "Test image", DownloadOptions{}, security.DefaultOptions())
+	imgdata, _, err := Download(context.Background(), s.server.URL, "Test image", DownloadOptions{}, security.DefaultOptions())
 
 	s.Require().Error(err)
 	s.Require().Equal(500, ierrors.Wrap(err, 0).StatusCode())
@@ -215,7 +215,7 @@ func (s *ImageDataTestSuite) TestDownloadUnreachable() {
 
 	serverURL := fmt.Sprintf("http://%s", l.Addr().String())
 
-	imgdata, err := Download(context.Background(), serverURL, "Test image", DownloadOptions{}, security.DefaultOptions())
+	imgdata, _, err := Download(context.Background(), serverURL, "Test image", DownloadOptions{}, security.DefaultOptions())
 
 	s.Require().Error(err)
 	s.Require().Equal(500, ierrors.Wrap(err, 0).StatusCode())
@@ -225,7 +225,7 @@ func (s *ImageDataTestSuite) TestDownloadUnreachable() {
 func (s *ImageDataTestSuite) TestDownloadInvalidImage() {
 	s.data = []byte("invalid")
 
-	imgdata, err := Download(context.Background(), s.server.URL, "Test image", DownloadOptions{}, security.DefaultOptions())
+	imgdata, _, err := Download(context.Background(), s.server.URL, "Test image", DownloadOptions{}, security.DefaultOptions())
 
 	s.Require().Error(err)
 	s.Require().Equal(422, ierrors.Wrap(err, 0).StatusCode())
@@ -235,7 +235,7 @@ func (s *ImageDataTestSuite) TestDownloadInvalidImage() {
 func (s *ImageDataTestSuite) TestDownloadSourceAddressNotAllowed() {
 	config.AllowLoopbackSourceAddresses = false
 
-	imgdata, err := Download(context.Background(), s.server.URL, "Test image", DownloadOptions{}, security.DefaultOptions())
+	imgdata, _, err := Download(context.Background(), s.server.URL, "Test image", DownloadOptions{}, security.DefaultOptions())
 
 	s.Require().Error(err)
 	s.Require().Equal(404, ierrors.Wrap(err, 0).StatusCode())
@@ -245,7 +245,7 @@ func (s *ImageDataTestSuite) TestDownloadSourceAddressNotAllowed() {
 func (s *ImageDataTestSuite) TestDownloadImageTooLarge() {
 	config.MaxSrcResolution = 1
 
-	imgdata, err := Download(context.Background(), s.server.URL, "Test image", DownloadOptions{}, security.DefaultOptions())
+	imgdata, _, err := Download(context.Background(), s.server.URL, "Test image", DownloadOptions{}, security.DefaultOptions())
 
 	s.Require().Error(err)
 	s.Require().Equal(422, ierrors.Wrap(err, 0).StatusCode())
@@ -255,7 +255,7 @@ func (s *ImageDataTestSuite) TestDownloadImageTooLarge() {
 func (s *ImageDataTestSuite) TestDownloadImageFileTooLarge() {
 	config.MaxSrcFileSize = 1
 
-	imgdata, err := Download(context.Background(), s.server.URL, "Test image", DownloadOptions{}, security.DefaultOptions())
+	imgdata, _, err := Download(context.Background(), s.server.URL, "Test image", DownloadOptions{}, security.DefaultOptions())
 
 	s.Require().Error(err)
 	s.Require().Equal(422, ierrors.Wrap(err, 0).StatusCode())
@@ -274,7 +274,7 @@ func (s *ImageDataTestSuite) TestDownloadGzip() {
 	s.data = buf.Bytes()
 	s.header.Set("Content-Encoding", "gzip")
 
-	imgdata, err := Download(context.Background(), s.server.URL, "Test image", DownloadOptions{}, security.DefaultOptions())
+	imgdata, _, err := Download(context.Background(), s.server.URL, "Test image", DownloadOptions{}, security.DefaultOptions())
 
 	s.Require().NoError(err)
 	s.Require().NotNil(imgdata)

+ 22 - 0
imagefetcher/config.go

@@ -0,0 +1,22 @@
+package imagefetcher
+
+import "github.com/imgproxy/imgproxy/v3/config"
+
+type Config struct {
+	// MaxRedirects is the maximum number of redirects to follow
+	MaxRedirects int
+
+	// UserAgent is the user agent string to use for requests
+	UserAgent string
+
+	// DownloadTimeout is the timeout for downloading images in seconds
+	DownloadTimeout int
+}
+
+func NewConfigFromEnv() *Config {
+	return &Config{
+		MaxRedirects:    config.MaxRedirects,
+		UserAgent:       config.UserAgent,
+		DownloadTimeout: config.DownloadTimeout,
+	}
+}

+ 7 - 8
imagefetcher/fetcher.go

@@ -7,7 +7,6 @@ import (
 	"net/http"
 	"time"
 
-	"github.com/imgproxy/imgproxy/v3/config"
 	"github.com/imgproxy/imgproxy/v3/transport"
 	"github.com/imgproxy/imgproxy/v3/transport/common"
 	"go.withmatt.com/httpheaders"
@@ -20,19 +19,19 @@ const (
 
 // Fetcher is a struct that holds the HTTP client and transport for fetching images
 type Fetcher struct {
-	transport    *transport.Transport // Transport used for making HTTP requests
-	maxRedirects int                  // Maximum number of redirects allowed
+	transport *transport.Transport // Transport used for making HTTP requests
+	config    *Config              // Configuration for the fetcher
 }
 
 // NewFetcher creates a new ImageFetcher with the provided transport
-func NewFetcher(transport *transport.Transport, maxRedirects int) (*Fetcher, error) {
-	return &Fetcher{transport, maxRedirects}, nil
+func NewFetcher(transport *transport.Transport, config *Config) (*Fetcher, error) {
+	return &Fetcher{transport, config}, nil
 }
 
 // checkRedirect is a method that checks if the number of redirects exceeds the maximum allowed
 func (f *Fetcher) checkRedirect(req *http.Request, via []*http.Request) error {
 	redirects := len(via)
-	if redirects >= f.maxRedirects {
+	if redirects >= f.config.MaxRedirects {
 		return newImageTooManyRedirectsError(redirects)
 	}
 	return nil
@@ -51,7 +50,7 @@ func (f *Fetcher) BuildRequest(ctx context.Context, url string, header http.Head
 	url = common.EscapeURL(url)
 
 	// Set request timeout and get cancel function
-	ctx, cancel := context.WithTimeout(ctx, time.Duration(config.DownloadTimeout)*time.Second)
+	ctx, cancel := context.WithTimeout(ctx, time.Duration(f.config.DownloadTimeout)*time.Second)
 
 	req, err := http.NewRequestWithContext(ctx, http.MethodGet, url, nil)
 	if err != nil {
@@ -73,7 +72,7 @@ func (f *Fetcher) BuildRequest(ctx context.Context, url string, header http.Head
 	}
 
 	// Set user agent header
-	req.Header.Set(httpheaders.UserAgent, config.UserAgent)
+	req.Header.Set(httpheaders.UserAgent, f.config.UserAgent)
 
 	// Set headers
 	for k, v := range header {

+ 38 - 0
imagestreamer/config.go

@@ -0,0 +1,38 @@
+package imagestreamer
+
+import (
+	"github.com/imgproxy/imgproxy/v3/config"
+	"go.withmatt.com/httpheaders"
+)
+
+// Config represents the configuration for the image streamer
+type Config struct {
+	// CookiePassthrough indicates whether cookies should be passed through to the image response
+	CookiePassthrough bool
+
+	// PassthroughRequestHeaders specifies the request headers to include in the passthrough response
+	PassthroughRequestHeaders []string
+
+	// KeepResponseHeaders specifies the response headers to copy from the response
+	KeepResponseHeaders []string
+}
+
+// NewConfigFromEnv creates a new Config instance from environment variables
+func NewConfigFromEnv() *Config {
+	return &Config{
+		CookiePassthrough: config.CookiePassthrough,
+		PassthroughRequestHeaders: []string{
+			httpheaders.IfNoneMatch,
+			httpheaders.AcceptEncoding,
+			"Range",
+		},
+		KeepResponseHeaders: []string{
+			httpheaders.ContentType,
+			httpheaders.ContentEncoding,
+			httpheaders.ContentRange,
+			httpheaders.AcceptRanges,
+			httpheaders.LastModified,
+			httpheaders.Etag,
+		},
+	}
+}

+ 44 - 0
imagestreamer/service.go

@@ -0,0 +1,44 @@
+package imagestreamer
+
+import (
+	"context"
+	"net/http"
+
+	"github.com/imgproxy/imgproxy/v3/headerwriter"
+	"github.com/imgproxy/imgproxy/v3/imagefetcher"
+	"github.com/imgproxy/imgproxy/v3/options"
+)
+
+// Service is a struct which stores dependencies for the image streaming service
+type Service struct {
+	config              *Config
+	fetcher             *imagefetcher.Fetcher
+	headerWriterFactory *headerwriter.Factory
+}
+
+// Request holds parameters for the image streaming options
+// NOTE: This struct looks like it could be reused in the processing handler as a request context
+type Request struct {
+	UserRequest       *http.Request              // Original user request to imgproxy
+	ImageURL          string                     // URL of the image to be streamed
+	ReqID             string                     // Unique identifier for the request
+	ProcessingOptions *options.ProcessingOptions // Processing options for the image
+	Rw                http.ResponseWriter        // Response writer to write the streamed image
+}
+
+// NewService creates a new service instance with the provided configuration
+func NewService(config *Config, fetcher *imagefetcher.Fetcher, headerWriterFactory *headerwriter.Factory) *Service {
+	return &Service{config: config, fetcher: fetcher, headerWriterFactory: headerWriterFactory}
+}
+
+// Stream streams the image based on the provided request
+func (f *Service) Stream(ctx context.Context, r *Request) {
+	s := &streamer{
+		fetcher:             f.fetcher,
+		headerWriterFactory: f.headerWriterFactory,
+		config:              f.config,
+		p:                   r,
+	}
+
+	s.Stream(ctx)
+}

+ 142 - 0
imagestreamer/streamer.go

@@ -0,0 +1,142 @@
+// imagestreamer is responsible for handling image passthrough streaming
+package imagestreamer
+
+import (
+	"context"
+	"io"
+	"net/http"
+	"sync"
+
+	"github.com/imgproxy/imgproxy/v3/cookies"
+	"github.com/imgproxy/imgproxy/v3/handlererr"
+	"github.com/imgproxy/imgproxy/v3/headerwriter"
+	"github.com/imgproxy/imgproxy/v3/imagefetcher"
+	"github.com/imgproxy/imgproxy/v3/metrics"
+	"github.com/imgproxy/imgproxy/v3/metrics/stats"
+	"github.com/imgproxy/imgproxy/v3/router"
+	"github.com/imgproxy/imgproxy/v3/stemext"
+	log "github.com/sirupsen/logrus"
+)
+
+const (
+	streamBufferSize = 4096 // Size of the buffer used for streaming
+)
+
+var (
+	// streamBufPool is a sync.Pool for reusing byte slices used for streaming
+	streamBufPool = sync.Pool{
+		New: func() interface{} {
+			buf := make([]byte, streamBufferSize)
+			return &buf
+		},
+	}
+)
+
+// streamer handles image passthrough requests, allowing images to be streamed directly
+type streamer struct {
+	fetcher             *imagefetcher.Fetcher // Fetcher instance to handle image fetching
+	headerWriterFactory *headerwriter.Factory // Factory for creating header writers
+	config              *Config               // Configuration for the streamer
+	p                   *Request              // Streaming request
+}
+
+// Stream handles the image passthrough request, streaming the image directly to the response writer
+func (s *streamer) Stream(ctx context.Context) {
+	stats.IncImagesInProgress()
+	defer stats.DecImagesInProgress()
+	defer metrics.StartStreamingSegment(ctx)()
+
+	// Passthrough request headers from the original request
+	requestHeaders := s.getPassthroughRequestHeaders()
+	cookieJar := s.getCookieJar(ctx)
+
+	// Build the request to fetch the image
+	r, err := s.fetcher.BuildRequest(ctx, s.p.ImageURL, requestHeaders, cookieJar)
+	defer r.Cancel()
+	handlererr.Check(ctx, handlererr.ErrTypeStreaming, err)
+
+	// Send the request to fetch the image
+	res, err := r.Send()
+	if res != nil {
+		defer res.Body.Close()
+	}
+	handlererr.Check(ctx, handlererr.ErrTypeStreaming, err)
+
+	s.writeHeaders(r, res)
+	s.sendData(res)
+}
+
+// getCookieJar returns non-empty cookie jar if cookie passthrough is enabled
+func (s *streamer) getCookieJar(ctx context.Context) http.CookieJar {
+	if !s.config.CookiePassthrough {
+		return nil
+	}
+
+	cookieJar, err := cookies.JarFromRequest(s.p.UserRequest)
+	handlererr.Check(ctx, handlererr.ErrTypeStreaming, err)
+
+	return cookieJar
+}
+
+// getPassthroughRequestHeaders returns a new http.Header containing only
+// the headers that should be passed through from the user request
+func (s *streamer) getPassthroughRequestHeaders() http.Header {
+	h := make(http.Header)
+
+	for _, key := range s.config.PassthroughRequestHeaders {
+		values := s.p.UserRequest.Header.Values(key)
+
+		for _, value := range values {
+			h.Add(key, value)
+		}
+	}
+
+	return h
+}
+
+// writeHeaders writes the headers to the response writer
+func (s *streamer) writeHeaders(r *imagefetcher.Request, res *http.Response) {
+	hw := s.headerWriterFactory.NewHeaderWriter(s.p.Rw.Header(), r.URL().String())
+
+	// Copy the response headers to the header writer
+	hw.Copy(s.config.KeepResponseHeaders)
+
+	// Set the Content-Length header
+	hw.WriteContentLength(int(res.ContentLength))
+
+	// Try to set correct Content-Disposition file name and extension
+	if res.StatusCode >= 200 && res.StatusCode < 300 {
+		ct := res.Header.Get("Content-Type")
+
+		// Try to best guess the file name and extension
+		stem, ext := stemext.FromURL(r.URL()).
+			SetExtFromContentTypeIfEmpty(ct).
+			OverrideStem(s.p.ProcessingOptions.Filename).
+			StemExt()
+
+		// Write the Content-Disposition header
+		hw.WriteContentDisposition(stem, ext, s.p.ProcessingOptions.ReturnAttachment)
+	}
+
+	hw.Write(s.p.Rw)
+}
+
+// sendData copies the image data from the response body to the response writer
+func (s *streamer) sendData(res *http.Response) {
+	buf := streamBufPool.Get().(*[]byte)
+	defer streamBufPool.Put(buf)
+
+	_, copyerr := io.CopyBuffer(s.p.Rw, res.Body, *buf)
+
+	router.LogResponse(
+		s.p.ReqID, s.p.UserRequest, res.StatusCode, nil,
+		log.Fields{
+			"image_url":          s.p.ImageURL,
+			"processing_options": s.p.ProcessingOptions,
+		},
+	)
+
+	if copyerr != nil {
+		panic(http.ErrAbortHandler)
+	}
+}

+ 98 - 0
imagestreamer/streamer_test.go

@@ -0,0 +1,98 @@
+package imagestreamer
+
+import (
+	"context"
+	"net/http"
+	"net/http/httptest"
+	"os"
+	"path/filepath"
+	"testing"
+
+	"github.com/imgproxy/imgproxy/v3/config"
+	"github.com/imgproxy/imgproxy/v3/headerwriter"
+	"github.com/imgproxy/imgproxy/v3/imagefetcher"
+	"github.com/imgproxy/imgproxy/v3/options"
+	"github.com/imgproxy/imgproxy/v3/transport"
+	"github.com/stretchr/testify/suite"
+	"go.withmatt.com/httpheaders"
+)
+
+const (
+	testDataPath = "../testdata"
+)
+
+type StreamerTestSuite struct {
+	suite.Suite
+	ts      *httptest.Server
+	service *Service
+}
+
+func (s *StreamerTestSuite) SetupSuite() {
+	config.Reset()
+	config.AllowLoopbackSourceAddresses = true
+
+	s.ts = httptest.NewServer(http.FileServer(http.Dir(testDataPath)))
+}
+
+func (s *StreamerTestSuite) TearDownSuite() {
+	config.Reset()
+	s.ts.Close()
+}
+
+func (s *StreamerTestSuite) SetupTest() {
+	tr, err := transport.NewTransport()
+	s.Require().NoError(err)
+
+	fetcher, err := imagefetcher.NewFetcher(tr, imagefetcher.NewConfigFromEnv())
+	s.Require().NoError(err)
+
+	hwf := headerwriter.NewFactory(headerwriter.NewConfigFromEnv())
+
+	s.service = NewService(NewConfigFromEnv(), fetcher, hwf)
+}
+
+func (s *StreamerTestSuite) TestStreamer() {
+	const testFilePath = "/test1.jpg"
+
+	// Read expected output from test data
+	expected, err := os.ReadFile(filepath.Join(testDataPath, testFilePath))
+	s.Require().NoError(err)
+
+	// Prepare HTTP request and response recorder
+	req := httptest.NewRequest("GET", testFilePath, nil)
+	req.Header.Set(httpheaders.AcceptEncoding, "gzip")
+
+	// Override the test server handler to assert Accept-Encoding header
+	s.ts.Config.Handler = http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
+		// Check that the Accept-Encoding header is passed through from original request
+		s.Equal("gzip", r.Header.Get(httpheaders.AcceptEncoding))
+		http.ServeFile(w, r, filepath.Join(testDataPath, r.URL.Path))
+	})
+
+	po := &options.ProcessingOptions{
+		Filename: "xxx", // Override Content-Disposition
+	}
+
+	rr := httptest.NewRecorder()
+
+	p := Request{
+		UserRequest:       req,
+		ImageURL:          s.ts.URL + testFilePath,
+		ReqID:             "test-req-id",
+		ProcessingOptions: po,
+		Rw:                rr,
+	}
+
+	s.service.Stream(context.Background(), &p)
+
+	// Check response body
+	respBody := rr.Body.Bytes()
+	s.Require().Equal(expected, respBody)
+
+	// Check that Content-Disposition header is set correctly
+	s.Require().Equal("inline; filename=\"xxx.jpg\"", rr.Header().Get("Content-Disposition"))
+}
+
+func TestStreamer(t *testing.T) {
+	suite.Run(t, new(StreamerTestSuite))
+}

+ 74 - 150
processing_handler.go

@@ -1,15 +1,12 @@
 package main
 
 import (
-	"context"
 	"errors"
-	"fmt"
 	"net/http"
 	"net/url"
 	"slices"
 	"strconv"
 	"strings"
-	"time"
 
 	log "github.com/sirupsen/logrus"
 	"golang.org/x/sync/semaphore"
@@ -18,17 +15,20 @@ import (
 	"github.com/imgproxy/imgproxy/v3/cookies"
 	"github.com/imgproxy/imgproxy/v3/errorreport"
 	"github.com/imgproxy/imgproxy/v3/etag"
+	"github.com/imgproxy/imgproxy/v3/handlererr"
+	"github.com/imgproxy/imgproxy/v3/headerwriter"
 	"github.com/imgproxy/imgproxy/v3/ierrors"
 	"github.com/imgproxy/imgproxy/v3/imagedata"
 	"github.com/imgproxy/imgproxy/v3/imagefetcher"
+	"github.com/imgproxy/imgproxy/v3/imagestreamer"
 	"github.com/imgproxy/imgproxy/v3/imagetype"
-	"github.com/imgproxy/imgproxy/v3/imath"
 	"github.com/imgproxy/imgproxy/v3/metrics"
 	"github.com/imgproxy/imgproxy/v3/metrics/stats"
 	"github.com/imgproxy/imgproxy/v3/options"
 	"github.com/imgproxy/imgproxy/v3/processing"
 	"github.com/imgproxy/imgproxy/v3/router"
 	"github.com/imgproxy/imgproxy/v3/security"
+	"github.com/imgproxy/imgproxy/v3/stemext"
 	"github.com/imgproxy/imgproxy/v3/svg"
 	"github.com/imgproxy/imgproxy/v3/vips"
 )
@@ -36,8 +36,6 @@ import (
 var (
 	queueSem      *semaphore.Weighted
 	processingSem *semaphore.Weighted
-
-	headerVaryValue string
 )
 
 func initProcessingHandler() {
@@ -46,94 +44,24 @@ func initProcessingHandler() {
 	}
 
 	processingSem = semaphore.NewWeighted(int64(config.Workers))
-
-	vary := make([]string, 0)
-
-	if config.AutoWebp || config.EnforceWebp || config.AutoAvif || config.EnforceAvif {
-		vary = append(vary, "Accept")
-	}
-
-	if config.EnableClientHints {
-		vary = append(vary, "Sec-CH-DPR", "DPR", "Sec-CH-Width", "Width")
-	}
-
-	headerVaryValue = strings.Join(vary, ", ")
 }
 
-func setCacheControl(rw http.ResponseWriter, force *time.Time, originHeaders map[string]string) {
-	ttl := -1
-
-	if _, ok := originHeaders["Fallback-Image"]; ok && config.FallbackImageTTL > 0 {
-		ttl = config.FallbackImageTTL
-	}
-
-	if force != nil && (ttl < 0 || force.Before(time.Now().Add(time.Duration(ttl)*time.Second))) {
-		ttl = imath.Min(config.TTL, imath.Max(0, int(time.Until(*force).Seconds())))
-	}
+func respondWithImage(hw *headerwriter.Writer, reqID string, r *http.Request, rw http.ResponseWriter, statusCode int, resultData *imagedata.ImageData, po *options.ProcessingOptions, originURL string, originData *imagedata.ImageData) {
+	url, err := url.Parse(originURL)
+	handlererr.Check(r.Context(), handlererr.ErrTypePathParsing, err)
 
-	if config.CacheControlPassthrough && ttl < 0 && originHeaders != nil {
-		if val, ok := originHeaders["Cache-Control"]; ok && len(val) > 0 {
-			rw.Header().Set("Cache-Control", val)
-			return
-		}
+	stem, ext := stemext.FromURL(url).
+		OverrideStem(po.Filename).
+		OverrideExt(resultData.Type.Ext()).
+		StemExtWithFallback()
 
-		if val, ok := originHeaders["Expires"]; ok && len(val) > 0 {
-			if t, err := time.Parse(http.TimeFormat, val); err == nil {
-				ttl = imath.Max(0, int(time.Until(t).Seconds()))
-			}
-		}
-	}
-
-	if ttl < 0 {
-		ttl = config.TTL
-	}
-
-	if ttl > 0 {
-		rw.Header().Set("Cache-Control", fmt.Sprintf("max-age=%d, public", ttl))
-	} else {
-		rw.Header().Set("Cache-Control", "no-cache")
-	}
-}
-
-func setLastModified(rw http.ResponseWriter, originHeaders map[string]string) {
-	if config.LastModifiedEnabled {
-		if val, ok := originHeaders["Last-Modified"]; ok && len(val) != 0 {
-			rw.Header().Set("Last-Modified", val)
-		}
-	}
-}
-
-func setVary(rw http.ResponseWriter) {
-	if len(headerVaryValue) > 0 {
-		rw.Header().Set("Vary", headerVaryValue)
-	}
-}
-
-func setCanonical(rw http.ResponseWriter, originURL string) {
-	if config.SetCanonicalHeader {
-		if strings.HasPrefix(originURL, "https://") || strings.HasPrefix(originURL, "http://") {
-			linkHeader := fmt.Sprintf(`<%s>; rel="canonical"`, originURL)
-			rw.Header().Set("Link", linkHeader)
-		}
-	}
-}
-
-func respondWithImage(reqID string, r *http.Request, rw http.ResponseWriter, statusCode int, resultData *imagedata.ImageData, po *options.ProcessingOptions, originURL string, originData *imagedata.ImageData) {
-	var contentDisposition string
-	if len(po.Filename) > 0 {
-		contentDisposition = resultData.Type.ContentDisposition(po.Filename, po.ReturnAttachment)
-	} else {
-		contentDisposition = resultData.Type.ContentDispositionFromURL(originURL, po.ReturnAttachment)
-	}
-
-	rw.Header().Set("Content-Type", resultData.Type.Mime())
-	rw.Header().Set("Content-Disposition", contentDisposition)
-
-	setCacheControl(rw, po.Expires, originData.Headers)
-	setLastModified(rw, originData.Headers)
-	setVary(rw)
-	setCanonical(rw, originURL)
+	hw.SetMaxAgeFromExpires(po.Expires)
+	hw.WriteContentDisposition(stem, ext, po.ReturnAttachment)
+	hw.WriteContentType(resultData.Type.Mime())
+	hw.WriteLastModified()
+	hw.WriteVary()
 
+	// TODO: think about moving this to the headerwriter
 	if config.EnableDebugHeaders {
 		rw.Header().Set("X-Origin-Content-Length", strconv.Itoa(len(originData.Data)))
 		rw.Header().Set("X-Origin-Width", resultData.Headers["X-Origin-Width"])
@@ -142,18 +70,20 @@ func respondWithImage(reqID string, r *http.Request, rw http.ResponseWriter, sta
 		rw.Header().Set("X-Result-Height", resultData.Headers["X-Result-Height"])
 	}
 
-	rw.Header().Set("Content-Security-Policy", "script-src 'none'")
+	hw.WriteContentLength(len(resultData.Data))
+	hw.WriteCanonical()
+	hw.Write(rw)
 
-	rw.Header().Set("Content-Length", strconv.Itoa(len(resultData.Data)))
 	rw.WriteHeader(statusCode)
-	_, err := rw.Write(resultData.Data)
+
+	_, err = rw.Write(resultData.Data)
 
 	var ierr *ierrors.Error
 	if err != nil {
 		ierr = newResponseWriteError(err)
 
 		if config.ReportIOErrors {
-			sendErr(r.Context(), "IO", ierr)
+			handlererr.Send(r.Context(), handlererr.ErrTypeIO, ierr)
 			errorreport.Report(ierr, r)
 		}
 	}
@@ -167,9 +97,10 @@ func respondWithImage(reqID string, r *http.Request, rw http.ResponseWriter, sta
 	)
 }
 
-func respondWithNotModified(reqID string, r *http.Request, rw http.ResponseWriter, po *options.ProcessingOptions, originURL string, originHeaders map[string]string) {
-	setCacheControl(rw, po.Expires, originHeaders)
-	setVary(rw)
+func respondWithNotModified(hw *headerwriter.Writer, reqID string, r *http.Request, rw http.ResponseWriter, po *options.ProcessingOptions, originURL string, originHeaders map[string]string) {
+	hw.SetMaxAgeFromExpires(po.Expires)
+	hw.WriteVary()
+	hw.Write(rw)
 
 	rw.WriteHeader(304)
 	router.LogResponse(
@@ -181,36 +112,6 @@ func respondWithNotModified(reqID string, r *http.Request, rw http.ResponseWrite
 	)
 }
 
-func sendErr(ctx context.Context, errType string, err error) {
-	send := true
-
-	if ierr, ok := err.(*ierrors.Error); ok {
-		switch ierr.StatusCode() {
-		case http.StatusServiceUnavailable:
-			errType = "timeout"
-		case 499:
-			// Don't need to send a "request cancelled" error
-			send = false
-		}
-	}
-
-	if send {
-		metrics.SendError(ctx, errType, err)
-	}
-}
-
-func sendErrAndPanic(ctx context.Context, errType string, err error) {
-	sendErr(ctx, errType, err)
-	panic(err)
-}
-
-func checkErr(ctx context.Context, errType string, err error) {
-	if err == nil {
-		return
-	}
-	sendErrAndPanic(ctx, errType, err)
-}
-
 func handleProcessing(reqID string, rw http.ResponseWriter, r *http.Request) {
 	stats.IncRequestsInProgress()
 	defer stats.DecRequestsInProgress()
@@ -233,7 +134,7 @@ func handleProcessing(reqID string, rw http.ResponseWriter, r *http.Request) {
 		signature = path[:signatureEnd]
 		path = path[signatureEnd:]
 	} else {
-		sendErrAndPanic(ctx, "path_parsing", newInvalidURLErrorf(
+		handlererr.SendAndPanic(ctx, handlererr.ErrTypePathParsing, newInvalidURLErrorf(
 			http.StatusNotFound, "Invalid path: %s", path),
 		)
 	}
@@ -241,11 +142,11 @@ func handleProcessing(reqID string, rw http.ResponseWriter, r *http.Request) {
 	path = fixPath(path)
 
 	if err := security.VerifySignature(signature, path); err != nil {
-		sendErrAndPanic(ctx, "security", err)
+		handlererr.SendAndPanic(ctx, handlererr.ErrTypeSecurity, err)
 	}
 
 	po, imageURL, err := options.ParsePath(path, r.Header)
-	checkErr(ctx, "path_parsing", err)
+	handlererr.Check(ctx, handlererr.ErrTypePathParsing, err)
 
 	var imageOrigin any
 	if u, uerr := url.Parse(imageURL); uerr == nil {
@@ -265,16 +166,30 @@ func handleProcessing(reqID string, rw http.ResponseWriter, r *http.Request) {
 	metrics.SetMetadata(ctx, metricsMeta)
 
 	err = security.VerifySourceURL(imageURL)
-	checkErr(ctx, "security", err)
+	handlererr.Check(ctx, handlererr.ErrTypeSecurity, err)
 
 	if po.Raw {
-		streamOriginImage(ctx, reqID, r, rw, po, imageURL)
+		sf := imagestreamer.NewService(
+			imagestreamer.NewConfigFromEnv(),
+			imagedata.Fetcher,
+			headerwriter.NewFactory(headerwriter.NewConfigFromEnv()),
+		)
+
+		p := imagestreamer.Request{
+			UserRequest:       r,
+			ImageURL:          imageURL,
+			ReqID:             reqID,
+			ProcessingOptions: po,
+			Rw:                rw,
+		}
+
+		sf.Stream(ctx, &p)
 		return
 	}
 
 	// SVG is a special case. Though saving to svg is not supported, SVG->SVG is.
 	if !vips.SupportsSave(po.Format) && po.Format != imagetype.Unknown && po.Format != imagetype.SVG {
-		sendErrAndPanic(ctx, "path_parsing", newInvalidURLErrorf(
+		handlererr.SendAndPanic(ctx, handlererr.ErrTypePathParsing, newInvalidURLErrorf(
 			http.StatusUnprocessableEntity,
 			"Resulting image format is not supported: %s", po.Format,
 		))
@@ -317,10 +232,10 @@ func handleProcessing(reqID string, rw http.ResponseWriter, r *http.Request) {
 			// We don't actually need to check timeout here,
 			// but it's an easy way to check if this is an actual timeout
 			// or the request was canceled
-			checkErr(ctx, "queue", router.CheckTimeout(ctx))
+			handlererr.Check(ctx, handlererr.ErrTypeQueue, router.CheckTimeout(ctx))
 			// We should never reach this line as err could be only ctx.Err()
 			// and we've already checked for it. But beter safe than sorry
-			sendErrAndPanic(ctx, "queue", err)
+			handlererr.SendAndPanic(ctx, handlererr.ErrTypeQueue, err)
 		}
 	}()
 	defer processingSem.Release(1)
@@ -330,7 +245,7 @@ func handleProcessing(reqID string, rw http.ResponseWriter, r *http.Request) {
 
 	statusCode := http.StatusOK
 
-	originData, err := func() (*imagedata.ImageData, error) {
+	originData, originResponseHeaders, err := func() (*imagedata.ImageData, http.Header, error) {
 		defer metrics.StartDownloadingSegment(ctx, metrics.Meta{
 			metrics.MetaSourceImageURL:    metricsMeta[metrics.MetaSourceImageURL],
 			metrics.MetaSourceImageOrigin: metricsMeta[metrics.MetaSourceImageOrigin],
@@ -343,12 +258,15 @@ func handleProcessing(reqID string, rw http.ResponseWriter, r *http.Request) {
 
 		if config.CookiePassthrough {
 			downloadOpts.CookieJar, err = cookies.JarFromRequest(r)
-			checkErr(ctx, "download", err)
+			handlererr.Check(ctx, handlererr.ErrTypeDownload, err)
 		}
 
 		return imagedata.Download(ctx, imageURL, "source image", downloadOpts, po.SecurityOptions)
 	}()
 
+	hwf := headerwriter.NewFactory(headerwriter.NewConfigFromEnv())
+	hw := hwf.NewHeaderWriter(originResponseHeaders, imageURL)
+
 	var nmErr imagefetcher.NotModifiedError
 
 	switch {
@@ -365,20 +283,20 @@ func handleProcessing(reqID string, rw http.ResponseWriter, r *http.Request) {
 			h[k] = nmErr.Headers().Get(k)
 		}
 
-		respondWithNotModified(reqID, r, rw, po, imageURL, h)
+		respondWithNotModified(hw, reqID, r, rw, po, imageURL, h)
 		return
 
 	default:
 		// This may be a request timeout error or a request cancelled error.
 		// Check it before moving further
-		checkErr(ctx, "timeout", router.CheckTimeout(ctx))
+		handlererr.Check(ctx, handlererr.ErrTypeTimeout, router.CheckTimeout(ctx))
 
 		ierr := ierrors.Wrap(err, 0)
 		if config.ReportDownloadingErrors {
 			ierr = ierrors.Wrap(ierr, 0, ierrors.WithShouldReport(true))
 		}
 
-		sendErr(ctx, "download", ierr)
+		handlererr.Send(ctx, handlererr.ErrTypeDownload, ierr)
 
 		if imagedata.FallbackImage == nil {
 			panic(ierr)
@@ -398,10 +316,16 @@ func handleProcessing(reqID string, rw http.ResponseWriter, r *http.Request) {
 			statusCode = ierr.StatusCode()
 		}
 
+		hw.SetMaxAge(config.FallbackImageTTL)
+
+		if config.FallbackImageTTL > 0 {
+			hw.WriteIsFallbackImage()
+		}
+
 		originData = imagedata.FallbackImage
 	}
 
-	checkErr(ctx, "timeout", router.CheckTimeout(ctx))
+	handlererr.Check(ctx, handlererr.ErrTypeTimeout, router.CheckTimeout(ctx))
 
 	if config.ETagEnabled && statusCode == http.StatusOK {
 		imgDataMatch := etagHandler.SetActualImageData(originData)
@@ -409,12 +333,12 @@ func handleProcessing(reqID string, rw http.ResponseWriter, r *http.Request) {
 		rw.Header().Set("ETag", etagHandler.GenerateActualETag())
 
 		if imgDataMatch && etagHandler.ProcessingOptionsMatch() {
-			respondWithNotModified(reqID, r, rw, po, imageURL, originData.Headers)
+			respondWithNotModified(hw, reqID, r, rw, po, imageURL, originData.Headers)
 			return
 		}
 	}
 
-	checkErr(ctx, "timeout", router.CheckTimeout(ctx))
+	handlererr.Check(ctx, handlererr.ErrTypeTimeout, router.CheckTimeout(ctx))
 
 	// Skip processing svg with unknown or the same destination imageType
 	// if it's not forced by AlwaysRasterizeSvg option
@@ -426,20 +350,20 @@ func handleProcessing(reqID string, rw http.ResponseWriter, r *http.Request) {
 	if shouldSkipProcessing {
 		if originData.Type == imagetype.SVG && config.SanitizeSvg {
 			sanitized, svgErr := svg.Sanitize(originData)
-			checkErr(ctx, "svg_processing", svgErr)
+			handlererr.Check(ctx, handlererr.ErrTypeSvgProcessing, svgErr)
 
 			defer sanitized.Close()
 
-			respondWithImage(reqID, r, rw, statusCode, sanitized, po, imageURL, originData)
+			respondWithImage(hw, reqID, r, rw, statusCode, sanitized, po, imageURL, originData)
 			return
 		}
 
-		respondWithImage(reqID, r, rw, statusCode, originData, po, imageURL, originData)
+		respondWithImage(hw, reqID, r, rw, statusCode, originData, po, imageURL, originData)
 		return
 	}
 
 	if !vips.SupportsLoad(originData.Type) {
-		sendErrAndPanic(ctx, "processing", newInvalidURLErrorf(
+		handlererr.SendAndPanic(ctx, handlererr.ErrTypeProcessing, newInvalidURLErrorf(
 			http.StatusUnprocessableEntity,
 			"Source image format is not supported: %s", originData.Type,
 		))
@@ -447,7 +371,7 @@ func handleProcessing(reqID string, rw http.ResponseWriter, r *http.Request) {
 
 	// At this point we can't allow requested format to be SVG as we can't save SVGs
 	if po.Format == imagetype.SVG {
-		sendErrAndPanic(ctx, "processing", newInvalidURLErrorf(
+		handlererr.SendAndPanic(ctx, handlererr.ErrTypeProcessing, newInvalidURLErrorf(
 			http.StatusUnprocessableEntity,
 			"Resulting image format is not supported: svg",
 		))
@@ -459,11 +383,11 @@ func handleProcessing(reqID string, rw http.ResponseWriter, r *http.Request) {
 		})()
 		return processing.ProcessImage(ctx, originData, po)
 	}()
-	checkErr(ctx, "processing", err)
+	handlererr.Check(ctx, handlererr.ErrTypeProcessing, err)
 
 	defer resultData.Close()
 
-	checkErr(ctx, "timeout", router.CheckTimeout(ctx))
+	handlererr.Check(ctx, handlererr.ErrTypeTimeout, router.CheckTimeout(ctx))
 
-	respondWithImage(reqID, r, rw, statusCode, resultData, po, imageURL, originData)
+	respondWithImage(hw, reqID, r, rw, statusCode, resultData, po, imageURL, originData)
 }

+ 78 - 0
stemext/stem_ext.go

@@ -0,0 +1,78 @@
+// stemext package provides methods which help to detect correct stem and ext
+// for the content-disposition header.
+package stemext
+
+import (
+	"mime"
+	"net/url"
+	"path/filepath"
+	"strings"
+)
+
+const (
+	// fallbackStem is used when the stem cannot be determined from the URL.
+	fallbackStem = "image"
+)
+
+// stemExt helps to detect correct stem and ext for content-disposition header.
+type stemExt struct {
+	stem string
+	ext  string
+}
+
+// FromURL creates a new detectStemExt instance from the provided URL.
+func FromURL(url *url.URL) *stemExt {
+	_, filename := filepath.Split(url.Path)
+	ext := filepath.Ext(filename)
+	filename = strings.TrimSuffix(filename, ext)
+
+	return &stemExt{
+		stem: filename,
+		ext:  ext,
+	}
+}
+
+// SetExtFromContentTypeIfEmpty sets the ext field based on the provided content type.
+func (cd *stemExt) SetExtFromContentTypeIfEmpty(contentType string) *stemExt {
+	if len(contentType) == 0 || len(cd.ext) > 0 {
+		return cd
+	}
+
+	if exts, err := mime.ExtensionsByType(contentType); err == nil && len(exts) != 0 {
+		cd.ext = exts[0]
+	}
+
+	return cd
+}
+
+// OverrideExt sets the ext field if the provided ext is not empty.
+func (cd *stemExt) OverrideExt(ext string) *stemExt {
+	if len(ext) > 0 {
+		cd.ext = ext
+	}
+
+	return cd
+}
+
+// OverrideStem sets the stem field if the provided stem is not empty.
+func (cd *stemExt) OverrideStem(stem string) *stemExt {
+	if len(stem) > 0 {
+		cd.stem = stem
+	}
+
+	return cd
+}
+
+// StemExtWithFallback returns stem and ext, but if stem is empty, it uses a fallback value.
+func (cd *stemExt) StemExtWithFallback() (string, string) {
+	if len(cd.stem) == 0 {
+		cd.stem = fallbackStem
+	}
+
+	return cd.stem, cd.ext
+}
+
+// StemExt returns the tuple of stem and ext.
+func (cd *stemExt) StemExt() (string, string) {
+	return cd.stem, cd.ext
+}

+ 105 - 0
stemext/stem_ext_test.go

@@ -0,0 +1,105 @@
+package stemext
+
+import (
+	"net/url"
+	"testing"
+
+	"github.com/stretchr/testify/require"
+)
+
+func TestStemExt(t *testing.T) {
+	// Test cases for stem and ext detection
+	tests := []struct {
+		name string
+		url  string
+		stem string
+		ext  string
+		fn   func(*stemExt) (string, string)
+	}{
+		{
+			name: "BasicURL",
+			url:  "http://example.com/test.jpg",
+			stem: "test",
+			ext:  ".jpg",
+			fn: func(se *stemExt) (string, string) {
+				return se.StemExt()
+			},
+		},
+		{
+			name: "EmptyFilename",
+			url:  "http://example.com/path/to/",
+			stem: "",
+			ext:  "",
+			fn: func(se *stemExt) (string, string) {
+				return se.StemExt()
+			},
+		},
+		{
+			name: "EmptyFilenameWithContentType",
+			url:  "http://example.com/path/to/",
+			stem: "",
+			ext:  ".png",
+			fn: func(se *stemExt) (string, string) {
+				return se.SetExtFromContentTypeIfEmpty("image/png").StemExt()
+			},
+		},
+		{
+			name: "EmptyFilenameWithContentTypeAndOverride",
+			url:  "http://example.com/path/to/",
+			stem: "example",
+			ext:  ".png",
+			fn: func(se *stemExt) (string, string) {
+				return se.OverrideStem("example").SetExtFromContentTypeIfEmpty("image/png").StemExt()
+			},
+		},
+		{
+			name: "EmptyFilenameWithOverride",
+			url:  "http://example.com/path/to/",
+			stem: "example",
+			ext:  ".jpg",
+			fn: func(se *stemExt) (string, string) {
+				return se.OverrideStem("example").OverrideExt(".jpg").StemExt()
+			},
+		},
+		{
+			name: "PresentFilenameWithOverride",
+			url:  "http://example.com/path/to/face",
+			stem: "face",
+			ext:  ".jpg",
+			fn: func(se *stemExt) (string, string) {
+				return se.OverrideExt(".jpg").StemExt()
+			},
+		},
+		{
+			name: "PresentFilenameWithOverride",
+			url:  "http://example.com/path/to/123",
+			stem: "face",
+			ext:  ".jpg",
+			fn: func(se *stemExt) (string, string) {
+				return se.OverrideStem("face").OverrideExt(".jpg").StemExt()
+			},
+		},
+		{
+			name: "EmptyFilenameWithFallback",
+			url:  "http://example.com/path/to/",
+			stem: "image",
+			ext:  ".png",
+			fn: func(se *stemExt) (string, string) {
+				return se.SetExtFromContentTypeIfEmpty("image/png").StemExtWithFallback()
+			},
+		},
+	}
+
+	for _, tc := range tests {
+		t.Run(tc.name, func(t *testing.T) {
+			u, err := url.Parse(tc.url)
+			require.NoError(t, err)
+
+			se := FromURL(u)
+			stem, ext := tc.fn(se)
+
+			require.Equal(t, tc.stem, stem)
+			require.Equal(t, tc.ext, ext)
+		})
+	}
+}

+ 0 - 141
stream.go

@@ -1,141 +0,0 @@
-package main
-
-import (
-	"context"
-	"io"
-	"mime"
-	"net/http"
-	"path/filepath"
-	"strconv"
-	"sync"
-
-	log "github.com/sirupsen/logrus"
-
-	"github.com/imgproxy/imgproxy/v3/config"
-	"github.com/imgproxy/imgproxy/v3/cookies"
-	"github.com/imgproxy/imgproxy/v3/imagedata"
-	"github.com/imgproxy/imgproxy/v3/imagetype"
-	"github.com/imgproxy/imgproxy/v3/metrics"
-	"github.com/imgproxy/imgproxy/v3/metrics/stats"
-	"github.com/imgproxy/imgproxy/v3/options"
-	"github.com/imgproxy/imgproxy/v3/router"
-)
-
-var (
-	streamReqHeaders = []string{
-		"If-None-Match",
-		"Accept-Encoding",
-		"Range",
-	}
-
-	streamRespHeaders = []string{
-		"ETag",
-		"Content-Type",
-		"Content-Encoding",
-		"Content-Range",
-		"Accept-Ranges",
-		"Last-Modified",
-	}
-
-	streamBufPool = sync.Pool{
-		New: func() interface{} {
-			buf := make([]byte, 4096)
-			return &buf
-		},
-	}
-)
-
-func streamOriginImage(ctx context.Context, reqID string, r *http.Request, rw http.ResponseWriter, po *options.ProcessingOptions, imageURL string) {
-	stats.IncImagesInProgress()
-	defer stats.DecImagesInProgress()
-
-	defer metrics.StartStreamingSegment(ctx)()
-
-	var (
-		cookieJar http.CookieJar
-		err       error
-	)
-
-	imgRequestHeader := make(http.Header)
-
-	for _, k := range streamReqHeaders {
-		if v := r.Header.Get(k); len(v) != 0 {
-			imgRequestHeader.Set(k, v)
-		}
-	}
-
-	if config.CookiePassthrough {
-		cookieJar, err = cookies.JarFromRequest(r)
-		checkErr(ctx, "streaming", err)
-	}
-
-	req, err := imagedata.Fetcher.BuildRequest(r.Context(), imageURL, imgRequestHeader, cookieJar)
-	defer req.Cancel()
-	checkErr(ctx, "streaming", err)
-
-	res, err := req.Send()
-	if res != nil {
-		defer res.Body.Close()
-	}
-	checkErr(ctx, "streaming", err)
-
-	for _, k := range streamRespHeaders {
-		vv := res.Header.Values(k)
-		for _, v := range vv {
-			rw.Header().Set(k, v)
-		}
-	}
-
-	if res.ContentLength >= 0 {
-		rw.Header().Set("Content-Length", strconv.Itoa(int(res.ContentLength)))
-	}
-
-	if res.StatusCode < 300 {
-		var filename, ext, mimetype string
-
-		_, filename = filepath.Split(req.URL().Path)
-		ext = filepath.Ext(filename)
-
-		if len(po.Filename) > 0 {
-			filename = po.Filename
-		} else {
-			filename = filename[:len(filename)-len(ext)]
-		}
-
-		mimetype = rw.Header().Get("Content-Type")
-
-		if len(ext) == 0 && len(mimetype) > 0 {
-			if exts, err := mime.ExtensionsByType(mimetype); err == nil && len(exts) != 0 {
-				ext = exts[0]
-			}
-		}
-
-		rw.Header().Set("Content-Disposition", imagetype.ContentDisposition(filename, ext, po.ReturnAttachment))
-	}
-
-	setCacheControl(rw, po.Expires, map[string]string{
-		"Cache-Control": res.Header.Get("Cache-Control"),
-		"Expires":       res.Header.Get("Expires"),
-	})
-	setCanonical(rw, imageURL)
-	rw.Header().Set("Content-Security-Policy", "script-src 'none'")
-
-	rw.WriteHeader(res.StatusCode)
-
-	buf := streamBufPool.Get().(*[]byte)
-	defer streamBufPool.Put(buf)
-
-	_, copyerr := io.CopyBuffer(rw, res.Body, *buf)
-
-	router.LogResponse(
-		reqID, r, res.StatusCode, nil,
-		log.Fields{
-			"image_url":          imageURL,
-			"processing_options": po,
-		},
-	)
-
-	if copyerr != nil {
-		panic(http.ErrAbortHandler)
-	}
-}