Jelajahi Sumber

Introduced new image data

Viktor Sokolov 2 bulan lalu
induk
melakukan
67629a5117
46 mengubah file dengan 1172 tambahan dan 460 penghapusan
  1. 1 1
      .devcontainer/oss/README.md
  2. 1 1
      .devcontainer/oss/devcontainer.json
  3. 9 19
      asyncbuffer/buffer.go
  4. 13 0
      asyncbuffer/buffer_test.go
  5. 34 0
      auximageprovider/aux_image_provider.go
  6. 236 0
      auximageprovider/aux_image_provider_test.go
  7. 69 0
      auximageprovider/factory.go
  8. 1 1
      etag/etag.go
  9. 2 2
      etag/etag_test.go
  10. 44 106
      imagedata/download.go
  11. 16 110
      imagedata/image_data.go
  12. 57 0
      imagedata/legacy.go
  13. 0 64
      imagedata/read.go
  14. 46 0
      imagedatanew/download.go
  15. 111 0
      imagedatanew/factory.go
  16. 92 0
      imagedatanew/image_data.go
  17. 71 29
      imagedatanew/image_data_test.go
  18. 122 0
      imagedownloader/downloader.go
  19. 14 0
      imagefetcher/config.go
  20. 5 5
      imagefetcher/fetcher.go
  21. 2 2
      main.go
  22. 2 2
      processing/apply_filters.go
  23. 3 3
      processing/crop.go
  24. 2 2
      processing/export_color_profile.go
  25. 3 3
      processing/extend.go
  26. 2 2
      processing/fix_size.go
  27. 2 2
      processing/flatten.go
  28. 2 2
      processing/import_color_profile.go
  29. 2 2
      processing/padding.go
  30. 3 3
      processing/pipeline.go
  31. 4 4
      processing/prepare.go
  32. 48 13
      processing/processing.go
  33. 5 3
      processing/processing_test.go
  34. 2 2
      processing/rotate_and_flip.go
  35. 2 2
      processing/scale.go
  36. 11 10
      processing/scale_on_load.go
  37. 2 2
      processing/strip_metadata.go
  38. 4 3
      processing/trim.go
  39. 13 7
      processing/watermark.go
  40. 45 29
      processing_handler.go
  41. 16 15
      processing_handler_test.go
  42. 23 0
      security/limit.go
  43. 7 0
      security/meta_dimensions.go
  44. 8 6
      stream.go
  45. 13 1
      svg/svg.go
  46. 2 2
      svg/svg_test.go

+ 1 - 1
.devcontainer/oss/README.md

@@ -23,6 +23,6 @@ Port `8080` is forwared to the host.
 
 # Test images
 
-[test images repo](https://github.com/imgproxy/test-images.git) will be automatically cloned or pulled to `.devcontainer/images` folder before the container starts.
+[test images repo](https://github.com/imgproxy/test-images.git) is cloned as a submodule in `testdata/test-images`
 
 [Try it](http://localhost:8080/insecure/rs:fit:300:200/plain/local:///kitten.jpg@png). -->

+ 1 - 1
.devcontainer/oss/devcontainer.json

@@ -16,7 +16,7 @@
     },
     "mounts": [
         {
-            "source": "${localWorkspaceFolder}/.devcontainer/images",
+            "source": "${localWorkspaceFolder}/testdata/test-images",
             "target": "/images",
             "type": "bind"
         }

+ 9 - 19
asyncbuffer/buffer.go

@@ -269,7 +269,7 @@ func (ab *AsyncBuffer) Error() error {
 // Chunk must be available when this method is called.
 // Returns the number of bytes copied to the slice or 0 if chunk has no data
 // (eg. offset is beyond the end of the stream).
-func (ab *AsyncBuffer) readChunkAt(p []byte, off, rem int64) int {
+func (ab *AsyncBuffer) readChunkAt(p []byte, off int64) int {
 	// If the chunk is not available, we return 0
 	if off >= ab.len.Load() {
 		return 0
@@ -286,17 +286,9 @@ func (ab *AsyncBuffer) readChunkAt(p []byte, off, rem int64) int {
 		return 0
 	}
 
-	// How many bytes we could read from the chunk. No more than:
-	// - left to read totally
-	// - chunk size minus the start offset
-	// - chunk has
-	size := min(rem, ChunkSize-startOffset, int64(len(chunk.data)))
-
-	if size == 0 {
-		return 0
-	}
-
-	return copy(p, chunk.data[startOffset:startOffset+size])
+	// Copy data to the target slice. The number of bytes to copy is limited by the
+	// size of the target slice and the size of the data in the chunk.
+	return copy(p, chunk.data[startOffset:])
 }
 
 // readAt reads data from the AsyncBuffer at the given offset.
@@ -333,7 +325,7 @@ func (ab *AsyncBuffer) readAt(p []byte, off int64) (int, error) {
 	}
 
 	// Read data from the first chunk
-	n := ab.readChunkAt(p, off, size)
+	n := ab.readChunkAt(p, off)
 	if n == 0 {
 		return 0, io.EOF // Failed to read any data: means we tried to read beyond the end of the stream
 	}
@@ -350,7 +342,7 @@ func (ab *AsyncBuffer) readAt(p []byte, off int64) (int, error) {
 		}
 
 		// Read data from the next chunk
-		nX := ab.readChunkAt(p[n:], off, size)
+		nX := ab.readChunkAt(p[n:], off)
 		n += nX
 		size -= int64(nX)
 		off += int64(nX)
@@ -402,13 +394,11 @@ func (ab *AsyncBuffer) Reader() *Reader {
 // Read reads data from the AsyncBuffer.
 func (r *Reader) Read(p []byte) (int, error) {
 	n, err := r.ab.readAt(p, r.pos)
-	if err != nil {
-		return n, err
+	if err == nil {
+		r.pos += int64(n)
 	}
 
-	r.pos += int64(n)
-
-	return n, nil
+	return n, err
 }
 
 // Seek sets the position of the reader to the given offset and returns the new position

+ 13 - 0
asyncbuffer/buffer_test.go

@@ -5,6 +5,7 @@ import (
 	"crypto/rand"
 	"errors"
 	"io"
+	"os"
 	"sync"
 	"sync/atomic"
 	"testing"
@@ -346,3 +347,15 @@ func TestAsyncBufferReadAsync(t *testing.T) {
 	require.ErrorIs(t, io.EOF, err)
 	assert.Equal(t, 0, n)
 }
+
+// TestAsyncBufferReadAllCompability tests that ReadAll methods works as expected
+func TestAsyncBufferReadAllCompability(t *testing.T) {
+	source, err := os.ReadFile("../testdata/test1.jpg")
+	require.NoError(t, err)
+	asyncBuffer := FromReader(bytes.NewReader(source))
+	defer asyncBuffer.Close()
+
+	b, err := io.ReadAll(asyncBuffer.Reader())
+	require.NoError(t, err)
+	require.Len(t, b, len(source))
+}

+ 34 - 0
auximageprovider/aux_image_provider.go

@@ -0,0 +1,34 @@
+// auximageprovider exposes an interface for retreiving auxiliary images
+// such as watermarks and fallbacks. Default implementation stores those in memory.
+package auximageprovider
+
+import (
+	"context"
+	"net/http"
+
+	"github.com/imgproxy/imgproxy/v3/imagedatanew"
+	"github.com/imgproxy/imgproxy/v3/options"
+)
+
+// AuxImageProvider is an interface that provides image data and headers based
+// on processing options. It is used to retrieve WatermarkImage and FallbackImage.
+type AuxImageProvider interface {
+	Get(context.Context, *options.ProcessingOptions) (imagedatanew.ImageData, http.Header, error)
+}
+
+// memoryAuxImageProvider is a simple implementation of ImageProvider, which returns
+// a static saved image data and headers.
+type memoryAuxImageProvider struct {
+	data    imagedatanew.ImageData
+	headers http.Header
+}
+
+// newStaticAuxImageProvider creates a new staticImageProvider with the given image data and headers.
+func newStaticAuxImageProvider(data imagedatanew.ImageData, headers http.Header) AuxImageProvider {
+	return &memoryAuxImageProvider{data: data, headers: headers}
+}
+
+// Get returns the static image data and headers stored in the provider.
+func (s *memoryAuxImageProvider) Get(_ context.Context, po *options.ProcessingOptions) (imagedatanew.ImageData, http.Header, error) {
+	return s.data, s.headers, nil
+}

+ 236 - 0
auximageprovider/aux_image_provider_test.go

@@ -0,0 +1,236 @@
+package auximageprovider
+
+import (
+	"context"
+	"encoding/base64"
+	"io"
+	"net/http"
+	"net/http/httptest"
+	"os"
+	"strconv"
+	"testing"
+
+	"github.com/stretchr/testify/suite"
+
+	"github.com/imgproxy/imgproxy/v3/config"
+	"github.com/imgproxy/imgproxy/v3/imagedownloader"
+	"github.com/imgproxy/imgproxy/v3/imagefetcher"
+	"github.com/imgproxy/imgproxy/v3/imagetype"
+	"github.com/imgproxy/imgproxy/v3/options"
+	"github.com/imgproxy/imgproxy/v3/transport"
+)
+
+type ImageProviderTestSuite struct {
+	suite.Suite
+
+	server      *httptest.Server
+	factory     *Factory
+	downloader  *imagedownloader.Downloader
+	testData    []byte
+	testDataB64 string
+
+	// Server state
+	status int
+	data   []byte
+	header http.Header
+}
+
+func (s *ImageProviderTestSuite) SetupSuite() {
+	config.Reset()
+	config.AllowLoopbackSourceAddresses = true
+
+	// Load test image data
+	f, err := os.Open("../testdata/test1.jpg")
+	s.Require().NoError(err)
+	defer f.Close()
+
+	data, err := io.ReadAll(f)
+	s.Require().NoError(err)
+
+	s.testData = data
+	s.testDataB64 = base64.StdEncoding.EncodeToString(data)
+
+	// Create downloader
+	tr, err := transport.NewTransport()
+	s.Require().NoError(err)
+
+	fetcher, err := imagefetcher.NewFetcher(tr, imagefetcher.NewConfigFromEnv())
+	s.Require().NoError(err)
+
+	s.downloader = imagedownloader.NewDownloader(fetcher)
+	s.factory = NewFactory(s.downloader)
+
+	// Create test server
+	s.server = httptest.NewServer(http.HandlerFunc(func(rw http.ResponseWriter, r *http.Request) {
+		for k, vv := range s.header {
+			for _, v := range vv {
+				rw.Header().Add(k, v)
+			}
+		}
+
+		data := s.data
+		if data == nil {
+			data = s.testData
+		}
+
+		rw.Header().Set("Content-Length", strconv.Itoa(len(data)))
+		rw.WriteHeader(s.status)
+		rw.Write(data)
+	}))
+}
+
+func (s *ImageProviderTestSuite) TearDownSuite() {
+	s.server.Close()
+}
+
+func (s *ImageProviderTestSuite) SetupTest() {
+	s.status = http.StatusOK
+	s.data = nil
+	s.header = http.Header{}
+	s.header.Set("Content-Type", "image/jpeg")
+}
+
+// Helper function to read data from ImageData
+func (s *ImageProviderTestSuite) readImageData(provider AuxImageProvider) []byte {
+	imgData, _, err := provider.Get(context.Background(), &options.ProcessingOptions{})
+	s.Require().NoError(err)
+	s.Require().NotNil(imgData)
+	defer imgData.Close()
+
+	reader := imgData.Reader()
+	data, err := io.ReadAll(reader)
+	s.Require().NoError(err)
+	return data
+}
+
+func (s *ImageProviderTestSuite) TestNewFromFile() {
+	provider, err := s.factory.NewMemoryFromFile("../testdata/test1.jpg")
+	s.Require().NoError(err)
+	s.Require().NotNil(provider)
+
+	// Test Get method
+	imgData, headers, err := provider.Get(context.Background(), &options.ProcessingOptions{})
+	s.Require().NoError(err)
+	s.Require().NotNil(imgData)
+	s.Require().NotNil(headers)
+	defer imgData.Close()
+
+	// Verify image data
+	reader := imgData.Reader()
+	data, err := io.ReadAll(reader)
+	s.Require().NoError(err)
+	s.Equal(s.testData, data)
+
+	// Verify image format
+	s.Equal(imagetype.JPEG, imgData.Format())
+}
+
+func (s *ImageProviderTestSuite) TestNewFromFileNonExistent() {
+	provider, err := s.factory.NewMemoryFromFile("../testdata/non-existent.jpg")
+	s.Require().Error(err)
+	s.Require().Nil(provider)
+}
+
+func (s *ImageProviderTestSuite) TestNewFromBase64() {
+	provider, err := s.factory.NewMemoryFromBase64(s.testDataB64)
+	s.Require().NoError(err)
+	s.Require().NotNil(provider)
+
+	// Test Get method
+	imgData, headers, err := provider.Get(context.Background(), &options.ProcessingOptions{})
+	s.Require().NoError(err)
+	s.Require().NotNil(imgData)
+	s.Require().NotNil(headers)
+	defer imgData.Close()
+
+	// Verify image data
+	reader := imgData.Reader()
+	data, err := io.ReadAll(reader)
+	s.Require().NoError(err)
+	s.Equal(s.testData, data)
+
+	// Verify image format
+	s.Equal(imagetype.JPEG, imgData.Format())
+}
+
+func (s *ImageProviderTestSuite) TestNewFromBase64Invalid() {
+	provider, err := s.factory.NewMemoryFromBase64("invalid-base64")
+	s.Require().Error(err)
+	s.Require().Nil(provider)
+}
+
+func (s *ImageProviderTestSuite) TestNewFromBase64InvalidImage() {
+	invalidB64 := base64.StdEncoding.EncodeToString([]byte("not an image"))
+	provider, err := s.factory.NewMemoryFromBase64(invalidB64)
+	s.Require().Error(err)
+	s.Require().Nil(provider)
+}
+
+func (s *ImageProviderTestSuite) TestNewFromURL() {
+	provider, err := s.factory.NewMemoryURL(context.Background(), s.server.URL)
+	s.Require().NoError(err)
+	s.Require().NotNil(provider)
+
+	// Test Get method
+	imgData, headers, err := provider.Get(context.Background(), &options.ProcessingOptions{})
+	s.Require().NoError(err)
+	s.Require().NotNil(imgData)
+	s.Require().NotNil(headers)
+	defer imgData.Close()
+
+	// Verify image data
+	reader := imgData.Reader()
+	data, err := io.ReadAll(reader)
+	s.Require().NoError(err)
+	s.Equal(s.testData, data)
+
+	// Verify image format
+	s.Equal(imagetype.JPEG, imgData.Format())
+}
+
+func (s *ImageProviderTestSuite) TestNewFromURLNotFound() {
+	s.status = http.StatusNotFound
+	s.data = []byte("Not Found")
+	s.header.Set("Content-Type", "text/plain")
+
+	provider, err := s.factory.NewMemoryURL(context.Background(), s.server.URL)
+	s.Require().Error(err)
+	s.Require().Nil(provider)
+}
+
+func (s *ImageProviderTestSuite) TestNewFromURLInvalidImage() {
+	s.data = []byte("not an image")
+
+	provider, err := s.factory.NewMemoryURL(context.Background(), s.server.URL)
+	s.Require().Error(err)
+	s.Require().Nil(provider)
+}
+
+func (s *ImageProviderTestSuite) TestNewFromTriple() {
+	// Test with base64 (should prefer base64)
+	provider, err := s.factory.NewMemoryTriple(s.testDataB64, "../testdata/test1.jpg", s.server.URL)
+	s.Require().NoError(err)
+	s.Require().NotNil(provider)
+	s.Equal(s.testData, s.readImageData(provider))
+
+	// Test with file path (no base64)
+	provider, err = s.factory.NewMemoryTriple("", "../testdata/test1.jpg", s.server.URL)
+	s.Require().NoError(err)
+	s.Require().NotNil(provider)
+	s.Equal(s.testData, s.readImageData(provider))
+
+	// Test with URL (no base64 or file)
+	provider, err = s.factory.NewMemoryTriple("", "", s.server.URL)
+	s.Require().NoError(err)
+	s.Require().NotNil(provider)
+	s.Equal(s.testData, s.readImageData(provider))
+
+	// Test with no inputs
+	provider, err = s.factory.NewMemoryTriple("", "", "")
+	s.Require().NoError(err)
+	s.Nil(provider)
+}
+
+func TestImageProvider(t *testing.T) {
+	suite.Run(t, new(ImageProviderTestSuite))
+}

+ 69 - 0
auximageprovider/factory.go

@@ -0,0 +1,69 @@
+package auximageprovider
+
+import (
+	"context"
+	"net/http"
+
+	"github.com/imgproxy/imgproxy/v3/imagedatanew"
+	"github.com/imgproxy/imgproxy/v3/imagedownloader"
+	"github.com/imgproxy/imgproxy/v3/security"
+)
+
+// Factory is a struct that provides methods to create ImageProvider instances.
+type Factory struct {
+	downloader *imagedownloader.Downloader
+}
+
+// NewFactory creates a new Factory instance with the given downloader.
+func NewFactory(downloader *imagedownloader.Downloader) *Factory {
+	return &Factory{
+		downloader: downloader,
+	}
+}
+
+// NewMemoryFromBase64 creates a new ImageProvider from a base64 encoded string.
+// It stores image in memory.
+func (f *Factory) NewMemoryFromBase64(b64 string) (AuxImageProvider, error) {
+	img, err := imagedatanew.NewFromBase64(b64, make(http.Header), security.DefaultOptions())
+	if err != nil {
+		return nil, err
+	}
+
+	return newStaticAuxImageProvider(img, make(http.Header)), nil
+}
+
+// NewMemoryFromFile creates a new ImageProvider from a local file path.
+// It stores image in memory.
+func (f *Factory) NewMemoryFromFile(path string) (AuxImageProvider, error) {
+	img, err := imagedatanew.NewFromFile(path, make(http.Header), security.DefaultOptions())
+	if err != nil {
+		return nil, err
+	}
+
+	return newStaticAuxImageProvider(img, make(http.Header)), nil
+}
+
+// NewMemoryURL creates a new ImageProvider from a URL.
+func (f *Factory) NewMemoryURL(ctx context.Context, url string) (AuxImageProvider, error) {
+	img, err := f.downloader.DownloadWithDesc(ctx, url, "ImageProvider", imagedownloader.DownloadOptions{}, security.DefaultOptions())
+	if err != nil {
+		return nil, err
+	}
+
+	//nolint:staticcheck
+	return newStaticAuxImageProvider(img, img.Headers()), nil
+}
+
+// NewMemoryTriple creates a new ImageProvider from either a base64 string, file path, or URL
+func (f *Factory) NewMemoryTriple(b64 string, path string, url string) (AuxImageProvider, error) {
+	switch {
+	case len(b64) > 0:
+		return f.NewMemoryFromBase64(b64)
+	case len(path) > 0:
+		return f.NewMemoryFromFile(path)
+	case len(url) > 0:
+		return f.NewMemoryURL(context.Background(), url)
+	}
+
+	return nil, nil
+}

+ 1 - 1
etag/etag.go

@@ -107,7 +107,7 @@ func (h *Handler) ImageEtagExpected() string {
 
 func (h *Handler) SetActualImageData(imgdata *imagedata.ImageData) bool {
 	var haveActualImgETag bool
-	h.imgEtagActual, haveActualImgETag = imgdata.Headers["ETag"]
+	h.imgEtagActual, haveActualImgETag = imgdata.Headers["Etag"]
 	haveActualImgETag = haveActualImgETag && len(h.imgEtagActual) > 0
 
 	// Just in case server didn't check ETag properly and returned the same one

+ 2 - 2
etag/etag_test.go

@@ -19,7 +19,7 @@ var (
 
 	imgWithETag = imagedata.ImageData{
 		Data:    []byte("Hello Test"),
-		Headers: map[string]string{"ETag": `"loremipsumdolor"`},
+		Headers: map[string]string{"Etag": `"loremipsumdolor"`},
 	}
 	imgWithoutETag = imagedata.ImageData{
 		Data: []byte("Hello Test"),
@@ -93,7 +93,7 @@ func (s *EtagTestSuite) TestImageETagExpectedPresent() {
 	s.h.ParseExpectedETag(etagReq)
 
 	//nolint:testifylint // False-positive expected-actual
-	s.Require().Equal(imgWithETag.Headers["ETag"], s.h.ImageEtagExpected())
+	s.Require().Equal(imgWithETag.Headers["Etag"], s.h.ImageEtagExpected())
 }
 
 func (s *EtagTestSuite) TestImageETagExpectedBlank() {

+ 44 - 106
imagedata/download.go

@@ -1,112 +1,50 @@
 package imagedata
 
-import (
-	"context"
-	"net/http"
-	"slices"
-
-	"github.com/imgproxy/imgproxy/v3/config"
-	"github.com/imgproxy/imgproxy/v3/ierrors"
-	"github.com/imgproxy/imgproxy/v3/imagefetcher"
-	"github.com/imgproxy/imgproxy/v3/security"
-	"github.com/imgproxy/imgproxy/v3/transport"
-	"go.withmatt.com/httpheaders"
-)
-
 var (
-	Fetcher *imagefetcher.Fetcher
+// Fetcher *imagefetcher.Fetcher
+// downloader *imagedownloader.Downloader
 
-	// For tests
-	redirectAllRequestsTo string
-
-	// keepResponseHeaders is a list of HTTP headers that should be preserved in the response
-	keepResponseHeaders = []string{
-		httpheaders.CacheControl,
-		httpheaders.Expires,
-		httpheaders.LastModified,
-		// NOTE:
-		// httpheaders.Etag == "Etag".
-		// Http header names are case-insensitive, but we rely on the case in most cases.
-		// We must migrate to http.Headers and the subsequent methods everywhere.
-		httpheaders.Etag,
-	}
+// For tests
+// redirectAllRequestsTo string
 )
 
-type DownloadOptions struct {
-	Header    http.Header
-	CookieJar http.CookieJar
-}
-
-func initDownloading() error {
-	ts, err := transport.NewTransport()
-	if err != nil {
-		return err
-	}
-
-	Fetcher, err = imagefetcher.NewFetcher(ts, config.MaxRedirects)
-	if err != nil {
-		return ierrors.Wrap(err, 0, ierrors.WithPrefix("can't create image fetcher"))
-	}
-
-	return nil
-}
-
-func download(ctx context.Context, imageURL string, opts DownloadOptions, secopts security.Options) (*ImageData, error) {
-	// We use this for testing
-	if len(redirectAllRequestsTo) > 0 {
-		imageURL = redirectAllRequestsTo
-	}
-
-	req, err := Fetcher.BuildRequest(ctx, imageURL, opts.Header, opts.CookieJar)
-	if err != nil {
-		return nil, err
-	}
-	defer req.Cancel()
-
-	res, err := req.FetchImage()
-	if err != nil {
-		if res != nil {
-			res.Body.Close()
-		}
-		return nil, err
-	}
-
-	res, err = security.LimitResponseSize(res, secopts)
-	if res != nil {
-		defer res.Body.Close()
-	}
-	if err != nil {
-		return nil, err
-	}
-
-	imgdata, err := readAndCheckImage(res.Body, int(res.ContentLength), secopts)
-	if err != nil {
-		return nil, ierrors.Wrap(err, 0)
-	}
-
-	h := make(map[string]string)
-	for k := range res.Header {
-		if !slices.Contains(keepResponseHeaders, k) {
-			continue
-		}
-
-		// TODO: Fix Etag/ETag inconsistency
-		if k == "Etag" {
-			h["ETag"] = res.Header.Get(k)
-		} else {
-			h[k] = res.Header.Get(k)
-		}
-	}
-
-	imgdata.Headers = h
-
-	return imgdata, nil
-}
-
-func RedirectAllRequestsTo(u string) {
-	redirectAllRequestsTo = u
-}
-
-func StopRedirectingRequests() {
-	redirectAllRequestsTo = ""
-}
+// type DownloadOptions struct {
+// 	Header    http.Header
+// 	CookieJar http.CookieJar
+// }
+
+// func initDownloading() error {
+// 	ts, err := transport.NewTransport()
+// 	if err != nil {
+// 		return err
+// 	}
+
+// 	Fetcher, err = imagefetcher.NewFetcher(ts, imagefetcher.NewConfigFromEnv())
+// 	if err != nil {
+// 		return ierrors.Wrap(err, 0, ierrors.WithPrefix("can't create image fetcher"))
+// 	}
+
+// 	downloader = imagedownloader.NewDownloader(Fetcher)
+
+// 	return nil
+// }
+
+// func download(ctx context.Context, imageURL string, opts DownloadOptions, secopts security.Options) (imagedatanew.ImageData, error) {
+// 	// We use this for testing
+// 	if len(redirectAllRequestsTo) > 0 {
+// 		imageURL = redirectAllRequestsTo
+// 	}
+
+// 	return downloader.Download(ctx, imageURL, imagedownloader.DownloadOptions{
+// 		Header:    opts.Header,
+// 		CookieJar: opts.CookieJar,
+// 	}, secopts)
+// }
+
+// func RedirectAllRequestsTo(u string) {
+// 	redirectAllRequestsTo = u
+// }
+
+// func StopRedirectingRequests() {
+// 	redirectAllRequestsTo = ""
+// }

+ 16 - 110
imagedata/image_data.go

@@ -2,21 +2,9 @@ package imagedata
 
 import (
 	"context"
-	"encoding/base64"
-	"fmt"
-	"os"
-	"strings"
 	"sync"
 
-	"github.com/imgproxy/imgproxy/v3/config"
-	"github.com/imgproxy/imgproxy/v3/ierrors"
 	"github.com/imgproxy/imgproxy/v3/imagetype"
-	"github.com/imgproxy/imgproxy/v3/security"
-)
-
-var (
-	Watermark     *ImageData
-	FallbackImage *ImageData
 )
 
 type ImageData struct {
@@ -40,104 +28,22 @@ func (d *ImageData) SetCancel(cancel context.CancelFunc) {
 	d.cancel = cancel
 }
 
-func Init() error {
-	initRead()
-
-	if err := initDownloading(); err != nil {
-		return err
-	}
-
-	if err := loadWatermark(); err != nil {
-		return err
-	}
-
-	if err := loadFallbackImage(); err != nil {
-		return err
-	}
-
-	return nil
-}
-
-func loadWatermark() (err error) {
-	if len(config.WatermarkData) > 0 {
-		Watermark, err = FromBase64(config.WatermarkData, "watermark", security.DefaultOptions())
-		return
-	}
-
-	if len(config.WatermarkPath) > 0 {
-		Watermark, err = FromFile(config.WatermarkPath, "watermark", security.DefaultOptions())
-		return
-	}
-
-	if len(config.WatermarkURL) > 0 {
-		Watermark, err = Download(context.Background(), config.WatermarkURL, "watermark", DownloadOptions{Header: nil, CookieJar: nil}, security.DefaultOptions())
-		return
-	}
-
-	return nil
-}
-
-func loadFallbackImage() (err error) {
-	switch {
-	case len(config.FallbackImageData) > 0:
-		FallbackImage, err = FromBase64(config.FallbackImageData, "fallback image", security.DefaultOptions())
-	case len(config.FallbackImagePath) > 0:
-		FallbackImage, err = FromFile(config.FallbackImagePath, "fallback image", security.DefaultOptions())
-	case len(config.FallbackImageURL) > 0:
-		FallbackImage, err = Download(context.Background(), config.FallbackImageURL, "fallback image", DownloadOptions{Header: nil, CookieJar: nil}, security.DefaultOptions())
-	default:
-		FallbackImage, err = nil, nil
-	}
+// func Init() error {
+// 	if err := initDownloading(); err != nil {
+// 		return err
+// 	}
 
-	if FallbackImage != nil && err == nil && config.FallbackImageTTL > 0 {
-		if FallbackImage.Headers == nil {
-			FallbackImage.Headers = make(map[string]string)
-		}
-		FallbackImage.Headers["Fallback-Image"] = "1"
-	}
-
-	return err
-}
-
-func FromBase64(encoded, desc string, secopts security.Options) (*ImageData, error) {
-	dec := base64.NewDecoder(base64.StdEncoding, strings.NewReader(encoded))
-	size := 4 * (len(encoded)/3 + 1)
-
-	imgdata, err := readAndCheckImage(dec, size, secopts)
-	if err != nil {
-		return nil, fmt.Errorf("Can't decode %s: %s", desc, err)
-	}
-
-	return imgdata, nil
-}
+// 	return nil
+// }
 
-func FromFile(path, desc string, secopts security.Options) (*ImageData, error) {
-	f, err := os.Open(path)
-	if err != nil {
-		return nil, fmt.Errorf("Can't read %s: %s", desc, err)
-	}
+// func Download(ctx context.Context, imageURL, desc string, opts DownloadOptions, secopts security.Options) (*ImageData, error) {
+// 	imgdata, err := download(ctx, imageURL, opts, secopts)
+// 	if err != nil {
+// 		return nil, ierrors.Wrap(
+// 			err, 0,
+// 			ierrors.WithPrefix(fmt.Sprintf("Can't download %s", desc)),
+// 		)
+// 	}
 
-	fi, err := f.Stat()
-	if err != nil {
-		return nil, fmt.Errorf("Can't read %s: %s", desc, err)
-	}
-
-	imgdata, err := readAndCheckImage(f, int(fi.Size()), secopts)
-	if err != nil {
-		return nil, fmt.Errorf("Can't read %s: %s", desc, err)
-	}
-
-	return imgdata, nil
-}
-
-func Download(ctx context.Context, imageURL, desc string, opts DownloadOptions, secopts security.Options) (*ImageData, error) {
-	imgdata, err := download(ctx, imageURL, opts, secopts)
-	if err != nil {
-		return nil, ierrors.Wrap(
-			err, 0,
-			ierrors.WithPrefix(fmt.Sprintf("Can't download %s", desc)),
-		)
-	}
-
-	return imgdata, nil
-}
+// 	return From(imgdata), nil
+// }

+ 57 - 0
imagedata/legacy.go

@@ -0,0 +1,57 @@
+// Temporary methods to convert old ImageData to new ImageData and vice versa
+package imagedata
+
+import (
+	"io"
+	"net/http"
+
+	"github.com/imgproxy/imgproxy/v3/imagedatanew"
+	"github.com/imgproxy/imgproxy/v3/security"
+)
+
+// Converts an old ImageData to a new ImageData
+func To(old *ImageData) imagedatanew.ImageData {
+	if old == nil {
+		return nil
+	}
+
+	headers := make(http.Header)
+	for k, v := range old.Headers {
+		headers.Add(k, v)
+	}
+
+	d, err := imagedatanew.NewFromBytes(
+		old.Data, headers, security.DefaultOptions(),
+	)
+
+	if err != nil {
+		panic(err) // temp method, can happen
+	}
+
+	return d
+}
+
+// Converts a new ImageData to an old ImageData
+func From(n imagedatanew.ImageData) *ImageData {
+	if n == nil {
+		return nil
+	}
+
+	data, err := io.ReadAll(n.Reader())
+	if err != nil {
+		panic(err) // temp method, can happen
+	}
+
+	headers := make(map[string]string)
+
+	//nolint:staticcheck
+	for k, v := range n.Headers() {
+		headers[k] = v[0]
+	}
+
+	return &ImageData{
+		Data:    data,
+		Type:    n.Format(),
+		Headers: headers,
+	}
+}

+ 0 - 64
imagedata/read.go

@@ -1,64 +0,0 @@
-package imagedata
-
-import (
-	"bytes"
-	"context"
-	"io"
-
-	"github.com/imgproxy/imgproxy/v3/bufpool"
-	"github.com/imgproxy/imgproxy/v3/bufreader"
-	"github.com/imgproxy/imgproxy/v3/config"
-	"github.com/imgproxy/imgproxy/v3/imagefetcher"
-	"github.com/imgproxy/imgproxy/v3/imagemeta"
-	"github.com/imgproxy/imgproxy/v3/security"
-)
-
-var downloadBufPool *bufpool.Pool
-
-func initRead() {
-	downloadBufPool = bufpool.New("download", config.Workers, config.DownloadBufferSize)
-}
-
-func readAndCheckImage(r io.Reader, contentLength int, secopts security.Options) (*ImageData, error) {
-	buf := downloadBufPool.Get(contentLength, false)
-	cancel := func() { downloadBufPool.Put(buf) }
-
-	br := bufreader.New(r, buf)
-
-	meta, err := imagemeta.DecodeMeta(br)
-	if err != nil {
-		buf.Reset()
-		cancel()
-
-		return nil, imagefetcher.WrapError(err)
-	}
-
-	if err = security.CheckDimensions(meta.Width(), meta.Height(), 1, secopts); err != nil {
-		buf.Reset()
-		cancel()
-
-		return nil, imagefetcher.WrapError(err)
-	}
-
-	downloadBufPool.GrowBuffer(buf, contentLength)
-
-	if err = br.Flush(); err != nil {
-		buf.Reset()
-		cancel()
-
-		return nil, imagefetcher.WrapError(err)
-	}
-
-	return &ImageData{
-		Data:   buf.Bytes(),
-		Type:   meta.Format(),
-		cancel: cancel,
-	}, nil
-}
-
-func BorrowBuffer() (*bytes.Buffer, context.CancelFunc) {
-	buf := downloadBufPool.Get(0, false)
-	cancel := func() { downloadBufPool.Put(buf) }
-
-	return buf, cancel
-}

+ 46 - 0
imagedatanew/download.go

@@ -0,0 +1,46 @@
+package imagedatanew
+
+import (
+	"context"
+	"net/http"
+
+	"github.com/imgproxy/imgproxy/v3/imagefetcher"
+	"github.com/imgproxy/imgproxy/v3/security"
+)
+
+// HTTPOptions defines options for HTTP requests made to fetch images
+type HTTPOptions struct {
+	Header    http.Header
+	CookieJar http.CookieJar
+}
+
+// NewFromURL creates a new ImageData making a HTTP request to the specified URL
+func NewFromURL(ctx context.Context, fetcher *imagefetcher.Fetcher, url string, opts HTTPOptions, secopts security.Options) (ImageData, error) {
+	req, err := fetcher.BuildRequest(ctx, url, opts.Header, opts.CookieJar)
+	if err != nil {
+		if req != nil {
+			defer req.Cancel()
+		}
+		return nil, err
+	}
+
+	res, err := req.FetchImage()
+	if err != nil {
+		if res != nil {
+			res.Body.Close()
+		}
+		return nil, err
+	}
+
+	// Create factory with the provided security options for this request
+	imgdata, err := NewFromResponse(res, secopts)
+	if err != nil {
+		if res != nil {
+			req.Cancel()
+			res.Body.Close()
+		}
+		return nil, err
+	}
+
+	return imgdata, nil
+}

+ 111 - 0
imagedatanew/factory.go

@@ -0,0 +1,111 @@
+package imagedatanew
+
+import (
+	"bytes"
+	"encoding/base64"
+	"io"
+	"net/http"
+	"os"
+
+	"github.com/imgproxy/imgproxy/v3/asyncbuffer"
+	"github.com/imgproxy/imgproxy/v3/imagemeta"
+	"github.com/imgproxy/imgproxy/v3/security"
+)
+
+// NewFromFile creates a new ImageData from an os.File
+func NewFromFile(path string, headers http.Header, secopts security.Options) (*imageDataBytes, error) {
+	f, err := os.Open(path)
+	if err != nil {
+		return nil, err
+	}
+	defer f.Close()
+
+	fr, err := security.LimitFileSize(f, secopts)
+	if err != nil {
+		return nil, err
+	}
+
+	b, err := io.ReadAll(fr)
+	if err != nil {
+		return nil, err
+	}
+
+	r := bytes.NewReader(b)
+	meta, err := imagemeta.DecodeMeta(r)
+	if err != nil {
+		return nil, err
+	}
+
+	err = security.CheckMeta(meta, secopts)
+	if err != nil {
+		return nil, err
+	}
+
+	return &imageDataBytes{b, meta, headers.Clone()}, nil
+}
+
+// NewFromBase64 creates a new ImageData from a base64 encoded byte slice
+func NewFromBase64(encoded string, headers http.Header, secopts security.Options) (*imageDataBytes, error) {
+	b, err := base64.StdEncoding.DecodeString(encoded)
+	if err != nil {
+		return nil, err
+	}
+
+	r := bytes.NewReader(b)
+
+	meta, err := imagemeta.DecodeMeta(r)
+	if err != nil {
+		return nil, err
+	}
+
+	err = security.CheckMeta(meta, secopts)
+	if err != nil {
+		return nil, err
+	}
+
+	return &imageDataBytes{b, meta, headers.Clone()}, nil
+}
+
+// NewFromBytes creates a new ImageDataBytes from a byte slice
+func NewFromBytes(b []byte, headers http.Header, secopts security.Options) (*imageDataBytes, error) {
+	r := bytes.NewReader(b)
+
+	meta, err := imagemeta.DecodeMeta(r)
+	if err != nil {
+		return nil, err
+	}
+
+	err = security.CheckMeta(meta, secopts)
+	if err != nil {
+		return nil, err
+	}
+
+	return &imageDataBytes{b, meta, headers.Clone()}, nil
+}
+
+// NewFromResponse creates a new ImageDataResponse from an http.Response
+func NewFromResponse(or *http.Response, secopts security.Options) (*imageDataResponse, error) {
+	// We must not close the response body here, as is is read in background
+	//nolint:bodyclose
+	r, err := security.LimitResponseSize(or, secopts)
+	if err != nil {
+		return nil, err
+	}
+
+	b := asyncbuffer.FromReader(r.Body)
+	c := r.Body
+
+	meta, err := imagemeta.DecodeMeta(b.Reader())
+	if err != nil {
+		b.Close() // Close the async buffer early
+		return nil, err
+	}
+
+	err = security.CheckMeta(meta, secopts)
+	if err != nil {
+		b.Close() // Close the async buffer early
+		return nil, err
+	}
+
+	return &imageDataResponse{b, c, meta, or.Header.Clone()}, nil
+}

+ 92 - 0
imagedatanew/image_data.go

@@ -0,0 +1,92 @@
+// imagedata provides shared ImageData interface for working with image data.
+package imagedatanew
+
+import (
+	"bytes"
+	"io"
+	"net/http"
+
+	"github.com/imgproxy/imgproxy/v3/asyncbuffer"
+	"github.com/imgproxy/imgproxy/v3/imagemeta"
+	"github.com/imgproxy/imgproxy/v3/imagetype"
+)
+
+// ImageData is an interface that defines methods for reading image data and metadata
+type ImageData interface {
+	io.Closer               // Close closes the image data and releases any resources held by it
+	Reader() io.ReadSeeker  // Reader returns a new ReadSeeker for the image data
+	Meta() imagemeta.Meta   // Meta returns the metadata of the image data
+	Format() imagetype.Type // Format returns the image format from the metadata (shortcut)
+
+	// Will be removed from the interface in the future (DEPRECATED)
+	Headers() http.Header // Headers returns the HTTP headers of the image data, if applicable
+}
+
+// imageDataResponse is a struct that implements the ImageData interface for http.Response
+type imageDataResponse struct {
+	b       *asyncbuffer.AsyncBuffer // AsyncBuffer instance
+	c       io.Closer                // Closer for the original response body
+	meta    imagemeta.Meta           // Metadata of the image data
+	headers http.Header              // Headers for the response, if applicable
+}
+
+// imageDataBytes is a struct that implements the ImageData interface for a byte slice
+type imageDataBytes struct {
+	b       []byte         // ReadSeeker for the image data
+	meta    imagemeta.Meta // Metadata of the image data
+	headers http.Header    // Headers for the response, if applicable
+}
+
+// Reader returns a ReadSeeker for the image data
+func (r *imageDataResponse) Reader() io.ReadSeeker {
+	return r.b.Reader()
+}
+
+// Close closes the response body (hence, response) and the async buffer itself
+func (r *imageDataResponse) Close() error {
+	if r.c != nil {
+		defer r.c.Close()
+	}
+
+	return r.b.Close()
+}
+
+// Meta returns the metadata of the image data
+func (r *imageDataResponse) Meta() imagemeta.Meta {
+	return r.meta
+}
+
+// Format returns the image format from the metadata
+func (r *imageDataResponse) Format() imagetype.Type {
+	return r.meta.Format()
+}
+
+// Headers returns the headers of the image data, if applicable
+func (r *imageDataResponse) Headers() http.Header {
+	return r.headers
+}
+
+// Reader returns a ReadSeeker for the image data
+func (b *imageDataBytes) Reader() io.ReadSeeker {
+	return bytes.NewReader(b.b)
+}
+
+// Close does nothing for imageDataBytes as it does not hold any resources
+func (b *imageDataBytes) Close() error {
+	return nil
+}
+
+// Meta returns the metadata of the image data
+func (b *imageDataBytes) Meta() imagemeta.Meta {
+	return b.meta
+}
+
+// Format returns the image format from the metadata
+func (r *imageDataBytes) Format() imagetype.Type {
+	return r.meta.Format()
+}
+
+// Headers returns the headers of the image data, if applicable
+func (r *imageDataBytes) Headers() http.Header {
+	return r.headers
+}

+ 71 - 29
imagedata/image_data_test.go → imagedatanew/image_data_test.go

@@ -1,9 +1,8 @@
-package imagedata
+package imagedatanew
 
 import (
 	"bytes"
 	"compress/gzip"
-	"context"
 	"encoding/base64"
 	"fmt"
 	"io"
@@ -18,14 +17,17 @@ import (
 
 	"github.com/imgproxy/imgproxy/v3/config"
 	"github.com/imgproxy/imgproxy/v3/ierrors"
+	"github.com/imgproxy/imgproxy/v3/imagefetcher"
 	"github.com/imgproxy/imgproxy/v3/imagetype"
 	"github.com/imgproxy/imgproxy/v3/security"
+	"github.com/imgproxy/imgproxy/v3/transport"
 )
 
 type ImageDataTestSuite struct {
 	suite.Suite
 
-	server *httptest.Server
+	server  *httptest.Server
+	fetcher *imagefetcher.Fetcher
 
 	status int
 	data   []byte
@@ -39,8 +41,6 @@ func (s *ImageDataTestSuite) SetupSuite() {
 	config.Reset()
 	config.ClientKeepAliveTimeout = 0
 
-	Init()
-
 	f, err := os.Open("../testdata/test1.jpg")
 	s.Require().NoError(err)
 	defer f.Close()
@@ -50,6 +50,12 @@ func (s *ImageDataTestSuite) SetupSuite() {
 
 	s.defaultData = data
 
+	tr, err := transport.NewTransport()
+	s.Require().NoError(err)
+
+	s.fetcher, err = imagefetcher.NewFetcher(tr, imagefetcher.NewConfigFromEnv())
+	s.Require().NoError(err)
+
 	s.server = httptest.NewServer(http.HandlerFunc(func(rw http.ResponseWriter, r *http.Request) {
 		if s.check != nil {
 			s.check(r)
@@ -89,13 +95,47 @@ func (s *ImageDataTestSuite) SetupTest() {
 	s.header.Set("Content-Type", "image/jpeg")
 }
 
-func (s *ImageDataTestSuite) TestDownloadStatusOK() {
-	imgdata, err := Download(context.Background(), s.server.URL, "Test image", DownloadOptions{}, security.DefaultOptions())
+func (s *ImageDataTestSuite) readData(i ImageData) []byte {
+	d, err := io.ReadAll(i.Reader())
+	s.Require().NoError(err)
+	return d
+}
+
+// download simulates downloading an image from the given URL and returns ImageData.
+func (s *ImageDataTestSuite) download(url string, secopts security.Options) (ImageData, error) {
+	req, err := s.fetcher.BuildRequest(s.T().Context(), url, make(http.Header), nil)
+	if err != nil {
+		defer req.Cancel()
+		return nil, err
+	}
 
+	res, err := req.FetchImage()
+	if err != nil {
+		if res != nil {
+			res.Body.Close()
+		}
+		return nil, err
+	}
+
+	imgdata, err := NewFromResponse(res, secopts)
+	if err != nil {
+		if res != nil {
+			res.Body.Close()
+		}
+		return nil, err
+	}
+
+	return imgdata, nil
+}
+
+func (s *ImageDataTestSuite) TestDownloadStatusOK() {
+	imgdata, err := s.download(s.server.URL, security.DefaultOptions()) // Ensure the server is running
 	s.Require().NoError(err)
+	defer imgdata.Close()
+
 	s.Require().NotNil(imgdata)
-	s.Require().Equal(s.defaultData, imgdata.Data)
-	s.Require().Equal(imagetype.JPEG, imgdata.Type)
+	s.Require().Equal(s.defaultData, s.readData(imgdata))
+	s.Require().Equal(imagetype.JPEG, imgdata.Format())
 }
 
 func (s *ImageDataTestSuite) TestDownloadStatusPartialContent() {
@@ -157,16 +197,17 @@ func (s *ImageDataTestSuite) TestDownloadStatusPartialContent() {
 		s.Run(tc.name, func() {
 			s.header.Set("Content-Range", tc.contentRange)
 
-			imgdata, err := Download(context.Background(), s.server.URL, "Test image", DownloadOptions{}, security.DefaultOptions())
+			imgdata, err := s.download(s.server.URL, security.DefaultOptions())
 
 			if tc.expectErr {
 				s.Require().Error(err)
 				s.Require().Equal(404, ierrors.Wrap(err, 0).StatusCode())
 			} else {
 				s.Require().NoError(err)
+				defer imgdata.Close()
 				s.Require().NotNil(imgdata)
-				s.Require().Equal(s.defaultData, imgdata.Data)
-				s.Require().Equal(imagetype.JPEG, imgdata.Type)
+				s.Require().Equal(s.defaultData, s.readData(imgdata))
+				s.Require().Equal(imagetype.JPEG, imgdata.Format())
 			}
 		})
 	}
@@ -177,7 +218,7 @@ func (s *ImageDataTestSuite) TestDownloadStatusNotFound() {
 	s.data = []byte("Not Found")
 	s.header.Set("Content-Type", "text/plain")
 
-	imgdata, err := Download(context.Background(), s.server.URL, "Test image", DownloadOptions{}, security.DefaultOptions())
+	imgdata, err := s.download(s.server.URL, security.DefaultOptions())
 
 	s.Require().Error(err)
 	s.Require().Equal(404, ierrors.Wrap(err, 0).StatusCode())
@@ -189,7 +230,7 @@ func (s *ImageDataTestSuite) TestDownloadStatusForbidden() {
 	s.data = []byte("Forbidden")
 	s.header.Set("Content-Type", "text/plain")
 
-	imgdata, err := Download(context.Background(), s.server.URL, "Test image", DownloadOptions{}, security.DefaultOptions())
+	imgdata, err := s.download(s.server.URL, security.DefaultOptions())
 
 	s.Require().Error(err)
 	s.Require().Equal(404, ierrors.Wrap(err, 0).StatusCode())
@@ -201,7 +242,7 @@ func (s *ImageDataTestSuite) TestDownloadStatusInternalServerError() {
 	s.data = []byte("Internal Server Error")
 	s.header.Set("Content-Type", "text/plain")
 
-	imgdata, err := Download(context.Background(), s.server.URL, "Test image", DownloadOptions{}, security.DefaultOptions())
+	imgdata, err := s.download(s.server.URL, security.DefaultOptions())
 
 	s.Require().Error(err)
 	s.Require().Equal(500, ierrors.Wrap(err, 0).StatusCode())
@@ -215,7 +256,7 @@ func (s *ImageDataTestSuite) TestDownloadUnreachable() {
 
 	serverURL := fmt.Sprintf("http://%s", l.Addr().String())
 
-	imgdata, err := Download(context.Background(), serverURL, "Test image", DownloadOptions{}, security.DefaultOptions())
+	imgdata, err := s.download(serverURL, security.DefaultOptions())
 
 	s.Require().Error(err)
 	s.Require().Equal(500, ierrors.Wrap(err, 0).StatusCode())
@@ -225,7 +266,7 @@ func (s *ImageDataTestSuite) TestDownloadUnreachable() {
 func (s *ImageDataTestSuite) TestDownloadInvalidImage() {
 	s.data = []byte("invalid")
 
-	imgdata, err := Download(context.Background(), s.server.URL, "Test image", DownloadOptions{}, security.DefaultOptions())
+	imgdata, err := s.download(s.server.URL, security.DefaultOptions())
 
 	s.Require().Error(err)
 	s.Require().Equal(422, ierrors.Wrap(err, 0).StatusCode())
@@ -235,7 +276,7 @@ func (s *ImageDataTestSuite) TestDownloadInvalidImage() {
 func (s *ImageDataTestSuite) TestDownloadSourceAddressNotAllowed() {
 	config.AllowLoopbackSourceAddresses = false
 
-	imgdata, err := Download(context.Background(), s.server.URL, "Test image", DownloadOptions{}, security.DefaultOptions())
+	imgdata, err := s.download(s.server.URL, security.DefaultOptions())
 
 	s.Require().Error(err)
 	s.Require().Equal(404, ierrors.Wrap(err, 0).StatusCode())
@@ -245,7 +286,7 @@ func (s *ImageDataTestSuite) TestDownloadSourceAddressNotAllowed() {
 func (s *ImageDataTestSuite) TestDownloadImageTooLarge() {
 	config.MaxSrcResolution = 1
 
-	imgdata, err := Download(context.Background(), s.server.URL, "Test image", DownloadOptions{}, security.DefaultOptions())
+	imgdata, err := s.download(s.server.URL, security.DefaultOptions())
 
 	s.Require().Error(err)
 	s.Require().Equal(422, ierrors.Wrap(err, 0).StatusCode())
@@ -255,7 +296,7 @@ func (s *ImageDataTestSuite) TestDownloadImageTooLarge() {
 func (s *ImageDataTestSuite) TestDownloadImageFileTooLarge() {
 	config.MaxSrcFileSize = 1
 
-	imgdata, err := Download(context.Background(), s.server.URL, "Test image", DownloadOptions{}, security.DefaultOptions())
+	imgdata, err := s.download(s.server.URL, security.DefaultOptions())
 
 	s.Require().Error(err)
 	s.Require().Equal(422, ierrors.Wrap(err, 0).StatusCode())
@@ -274,32 +315,33 @@ func (s *ImageDataTestSuite) TestDownloadGzip() {
 	s.data = buf.Bytes()
 	s.header.Set("Content-Encoding", "gzip")
 
-	imgdata, err := Download(context.Background(), s.server.URL, "Test image", DownloadOptions{}, security.DefaultOptions())
+	imgdata, err := s.download(s.server.URL, security.DefaultOptions())
 
 	s.Require().NoError(err)
+	defer imgdata.Close()
 	s.Require().NotNil(imgdata)
-	s.Require().Equal(s.defaultData, imgdata.Data)
-	s.Require().Equal(imagetype.JPEG, imgdata.Type)
+	s.Require().Equal(s.defaultData, s.readData(imgdata))
+	s.Require().Equal(imagetype.JPEG, imgdata.Format())
 }
 
 func (s *ImageDataTestSuite) TestFromFile() {
-	imgdata, err := FromFile("../testdata/test1.jpg", "Test image", security.DefaultOptions())
+	imgdata, err := NewFromFile("../testdata/test1.jpg", http.Header{}, security.DefaultOptions())
 
 	s.Require().NoError(err)
 	s.Require().NotNil(imgdata)
-	s.Require().Equal(s.defaultData, imgdata.Data)
-	s.Require().Equal(imagetype.JPEG, imgdata.Type)
+	s.Require().Equal(s.defaultData, s.readData(imgdata))
+	s.Require().Equal(imagetype.JPEG, imgdata.Format())
 }
 
 func (s *ImageDataTestSuite) TestFromBase64() {
 	b64 := base64.StdEncoding.EncodeToString(s.defaultData)
 
-	imgdata, err := FromBase64(b64, "Test image", security.DefaultOptions())
+	imgdata, err := NewFromBase64(b64, http.Header{}, security.DefaultOptions())
 
 	s.Require().NoError(err)
 	s.Require().NotNil(imgdata)
-	s.Require().Equal(s.defaultData, imgdata.Data)
-	s.Require().Equal(imagetype.JPEG, imgdata.Type)
+	s.Require().Equal(s.defaultData, s.readData(imgdata))
+	s.Require().Equal(imagetype.JPEG, imgdata.Format())
 }
 
 func TestImageData(t *testing.T) {

+ 122 - 0
imagedownloader/downloader.go

@@ -0,0 +1,122 @@
+// Package imagedownloader provides a shared method for downloading any
+// images within imgproxy.
+package imagedownloader
+
+import (
+	"context"
+	"fmt"
+	"net/http"
+
+	"github.com/imgproxy/imgproxy/v3/ierrors"
+	"github.com/imgproxy/imgproxy/v3/imagedatanew"
+	"github.com/imgproxy/imgproxy/v3/imagefetcher"
+	"github.com/imgproxy/imgproxy/v3/security"
+	"github.com/imgproxy/imgproxy/v3/transport"
+)
+
+var (
+	// Global downloader instance
+	Fetcher *imagefetcher.Fetcher
+	D       *Downloader
+
+	// For tests
+	redirectAllRequestsTo string
+)
+
+type DownloadOptions struct {
+	Header    http.Header
+	CookieJar http.CookieJar
+}
+
+// Downloader is responsible for downloading images and converting them to ImageData
+type Downloader struct {
+	fetcher *imagefetcher.Fetcher
+}
+
+// NewDownloader creates a new Downloader with the provided fetcher and config
+func NewDownloader(fetcher *imagefetcher.Fetcher) *Downloader {
+	return &Downloader{
+		fetcher: fetcher,
+	}
+}
+
+// Init initializes the global downloader
+func InitGlobalDownloader() error {
+	ts, err := transport.NewTransport()
+	if err != nil {
+		return err
+	}
+
+	Fetcher, err := imagefetcher.NewFetcher(ts, imagefetcher.NewConfigFromEnv())
+	if err != nil {
+		return ierrors.Wrap(err, 0, ierrors.WithPrefix("can't create image fetcher"))
+	}
+
+	D = NewDownloader(Fetcher)
+
+	return nil
+}
+
+// Download downloads an image from the given URL and returns ImageData
+func (d *Downloader) Download(ctx context.Context, imageURL string, opts DownloadOptions, secopts security.Options) (imagedatanew.ImageData, error) {
+	// We use this for testing
+	if len(redirectAllRequestsTo) > 0 {
+		imageURL = redirectAllRequestsTo
+	}
+
+	req, err := d.fetcher.BuildRequest(ctx, imageURL, opts.Header, opts.CookieJar)
+	if err != nil {
+		defer req.Cancel()
+		return nil, err
+	}
+
+	res, err := req.FetchImage()
+	if err != nil {
+		if res != nil {
+			res.Body.Close()
+		}
+		return nil, err
+	}
+
+	// Create factory with the provided security options for this request
+	imgdata, err := imagedatanew.NewFromResponse(res, secopts)
+	if err != nil {
+		if res != nil {
+			res.Body.Close()
+		}
+		return nil, err
+	}
+
+	return imgdata, nil
+}
+
+// DownloadWithDesc downloads an image from the given URL, gives error a description context and returns ImageData
+func (d *Downloader) DownloadWithDesc(ctx context.Context, imageURL, desc string, opts DownloadOptions, secopts security.Options) (imagedatanew.ImageData, error) {
+	i, err := d.Download(ctx, imageURL, opts, secopts)
+
+	if err != nil {
+		return nil, ierrors.Wrap(
+			err, 0,
+			ierrors.WithPrefix(fmt.Sprintf("Can't download %s", desc)),
+		)
+	}
+
+	return i, err
+}
+
+// Download downloads an image using the global downloader.
+// NOTE: This method uses globalDownloader instance. In the future, this will
+// be replaced with an instance everywhere.
+func Download(ctx context.Context, imageURL, desc string, opts DownloadOptions, secopts security.Options) (imagedatanew.ImageData, error) {
+	return D.DownloadWithDesc(ctx, imageURL, desc, opts, secopts)
+}
+
+// RedirectAllRequestsTo redirects all requests to the given URL (for testing)
+func RedirectAllRequestsTo(u string) {
+	redirectAllRequestsTo = u
+}
+
+// StopRedirectingRequests stops redirecting requests (for testing)
+func StopRedirectingRequests() {
+	redirectAllRequestsTo = ""
+}

+ 14 - 0
imagefetcher/config.go

@@ -0,0 +1,14 @@
+package imagefetcher
+
+import "github.com/imgproxy/imgproxy/v3/config"
+
+type Config struct {
+	// MaxRedirects is the maximum number of redirects allowed when fetching images.
+	MaxRedirects int
+}
+
+func NewConfigFromEnv() *Config {
+	return &Config{
+		MaxRedirects: config.MaxRedirects,
+	}
+}

+ 5 - 5
imagefetcher/fetcher.go

@@ -20,19 +20,19 @@ const (
 
 // Fetcher is a struct that holds the HTTP client and transport for fetching images
 type Fetcher struct {
-	transport    *transport.Transport // Transport used for making HTTP requests
-	maxRedirects int                  // Maximum number of redirects allowed
+	transport *transport.Transport // Transport used for making HTTP requests
+	config    *Config              // Fetcher configuration
 }
 
 // NewFetcher creates a new ImageFetcher with the provided transport
-func NewFetcher(transport *transport.Transport, maxRedirects int) (*Fetcher, error) {
-	return &Fetcher{transport, maxRedirects}, nil
+func NewFetcher(transport *transport.Transport, config *Config) (*Fetcher, error) {
+	return &Fetcher{transport, config}, nil
 }
 
 // checkRedirect is a method that checks if the number of redirects exceeds the maximum allowed
 func (f *Fetcher) checkRedirect(req *http.Request, via []*http.Request) error {
 	redirects := len(via)
-	if redirects >= f.maxRedirects {
+	if redirects >= f.config.MaxRedirects {
 		return newImageTooManyRedirectsError(redirects)
 	}
 	return nil

+ 2 - 2
main.go

@@ -16,7 +16,7 @@ import (
 	"github.com/imgproxy/imgproxy/v3/config/loadenv"
 	"github.com/imgproxy/imgproxy/v3/errorreport"
 	"github.com/imgproxy/imgproxy/v3/gliblog"
-	"github.com/imgproxy/imgproxy/v3/imagedata"
+	"github.com/imgproxy/imgproxy/v3/imagedownloader"
 	"github.com/imgproxy/imgproxy/v3/logger"
 	"github.com/imgproxy/imgproxy/v3/memory"
 	"github.com/imgproxy/imgproxy/v3/metrics"
@@ -48,7 +48,7 @@ func initialize() error {
 		return err
 	}
 
-	if err := imagedata.Init(); err != nil {
+	if err := imagedownloader.InitGlobalDownloader(); err != nil {
 		return err
 	}
 

+ 2 - 2
processing/apply_filters.go

@@ -1,12 +1,12 @@
 package processing
 
 import (
-	"github.com/imgproxy/imgproxy/v3/imagedata"
+	"github.com/imgproxy/imgproxy/v3/imagedatanew"
 	"github.com/imgproxy/imgproxy/v3/options"
 	"github.com/imgproxy/imgproxy/v3/vips"
 )
 
-func applyFilters(pctx *pipelineContext, img *vips.Image, po *options.ProcessingOptions, imgdata *imagedata.ImageData) error {
+func applyFilters(pctx *pipelineContext, img *vips.Image, po *options.ProcessingOptions, imgdata imagedatanew.ImageData) error {
 	if po.Blur == 0 && po.Sharpen == 0 && po.Pixelate <= 1 {
 		return nil
 	}

+ 3 - 3
processing/crop.go

@@ -1,7 +1,7 @@
 package processing
 
 import (
-	"github.com/imgproxy/imgproxy/v3/imagedata"
+	"github.com/imgproxy/imgproxy/v3/imagedatanew"
 	"github.com/imgproxy/imgproxy/v3/imath"
 	"github.com/imgproxy/imgproxy/v3/options"
 	"github.com/imgproxy/imgproxy/v3/vips"
@@ -32,7 +32,7 @@ func cropImage(img *vips.Image, cropWidth, cropHeight int, gravity *options.Grav
 	return img.Crop(left, top, cropWidth, cropHeight)
 }
 
-func crop(pctx *pipelineContext, img *vips.Image, po *options.ProcessingOptions, imgdata *imagedata.ImageData) error {
+func crop(pctx *pipelineContext, img *vips.Image, po *options.ProcessingOptions, imgdata imagedatanew.ImageData) error {
 	width, height := pctx.cropWidth, pctx.cropHeight
 
 	opts := pctx.cropGravity
@@ -47,6 +47,6 @@ func crop(pctx *pipelineContext, img *vips.Image, po *options.ProcessingOptions,
 	return cropImage(img, width, height, &opts, 1.0)
 }
 
-func cropToResult(pctx *pipelineContext, img *vips.Image, po *options.ProcessingOptions, imgdata *imagedata.ImageData) error {
+func cropToResult(pctx *pipelineContext, img *vips.Image, po *options.ProcessingOptions, imgdata imagedatanew.ImageData) error {
 	return cropImage(img, pctx.resultCropWidth, pctx.resultCropHeight, &po.Gravity, pctx.dprScale)
 }

+ 2 - 2
processing/export_color_profile.go

@@ -1,12 +1,12 @@
 package processing
 
 import (
-	"github.com/imgproxy/imgproxy/v3/imagedata"
+	"github.com/imgproxy/imgproxy/v3/imagedatanew"
 	"github.com/imgproxy/imgproxy/v3/options"
 	"github.com/imgproxy/imgproxy/v3/vips"
 )
 
-func exportColorProfile(pctx *pipelineContext, img *vips.Image, po *options.ProcessingOptions, imgdata *imagedata.ImageData) error {
+func exportColorProfile(pctx *pipelineContext, img *vips.Image, po *options.ProcessingOptions, imgdata imagedatanew.ImageData) error {
 	keepProfile := !po.StripColorProfile && po.Format.SupportsColourProfile()
 
 	if img.IsLinear() {

+ 3 - 3
processing/extend.go

@@ -1,7 +1,7 @@
 package processing
 
 import (
-	"github.com/imgproxy/imgproxy/v3/imagedata"
+	"github.com/imgproxy/imgproxy/v3/imagedatanew"
 	"github.com/imgproxy/imgproxy/v3/options"
 	"github.com/imgproxy/imgproxy/v3/vips"
 )
@@ -25,7 +25,7 @@ func extendImage(img *vips.Image, width, height int, gravity *options.GravityOpt
 	return img.Embed(width, height, offX, offY)
 }
 
-func extend(pctx *pipelineContext, img *vips.Image, po *options.ProcessingOptions, imgdata *imagedata.ImageData) error {
+func extend(pctx *pipelineContext, img *vips.Image, po *options.ProcessingOptions, imgdata imagedatanew.ImageData) error {
 	if !po.Extend.Enabled {
 		return nil
 	}
@@ -34,7 +34,7 @@ func extend(pctx *pipelineContext, img *vips.Image, po *options.ProcessingOption
 	return extendImage(img, width, height, &po.Extend.Gravity, pctx.dprScale)
 }
 
-func extendAspectRatio(pctx *pipelineContext, img *vips.Image, po *options.ProcessingOptions, imgdata *imagedata.ImageData) error {
+func extendAspectRatio(pctx *pipelineContext, img *vips.Image, po *options.ProcessingOptions, imgdata imagedatanew.ImageData) error {
 	if !po.ExtendAspectRatio.Enabled {
 		return nil
 	}

+ 2 - 2
processing/fix_size.go

@@ -3,7 +3,7 @@ package processing
 import (
 	"math"
 
-	"github.com/imgproxy/imgproxy/v3/imagedata"
+	"github.com/imgproxy/imgproxy/v3/imagedatanew"
 	"github.com/imgproxy/imgproxy/v3/imagetype"
 	"github.com/imgproxy/imgproxy/v3/imath"
 	"github.com/imgproxy/imgproxy/v3/options"
@@ -91,7 +91,7 @@ func fixIcoSize(img *vips.Image) error {
 	return nil
 }
 
-func fixSize(pctx *pipelineContext, img *vips.Image, po *options.ProcessingOptions, imgdata *imagedata.ImageData) error {
+func fixSize(pctx *pipelineContext, img *vips.Image, po *options.ProcessingOptions, imgdata imagedatanew.ImageData) error {
 	switch po.Format {
 	case imagetype.WEBP:
 		return fixWebpSize(img)

+ 2 - 2
processing/flatten.go

@@ -1,12 +1,12 @@
 package processing
 
 import (
-	"github.com/imgproxy/imgproxy/v3/imagedata"
+	"github.com/imgproxy/imgproxy/v3/imagedatanew"
 	"github.com/imgproxy/imgproxy/v3/options"
 	"github.com/imgproxy/imgproxy/v3/vips"
 )
 
-func flatten(pctx *pipelineContext, img *vips.Image, po *options.ProcessingOptions, imgdata *imagedata.ImageData) error {
+func flatten(pctx *pipelineContext, img *vips.Image, po *options.ProcessingOptions, imgdata imagedatanew.ImageData) error {
 	if !po.Flatten && po.Format.SupportsAlpha() {
 		return nil
 	}

+ 2 - 2
processing/import_color_profile.go

@@ -2,12 +2,12 @@ package processing
 
 import (
 	"github.com/imgproxy/imgproxy/v3/config"
-	"github.com/imgproxy/imgproxy/v3/imagedata"
+	"github.com/imgproxy/imgproxy/v3/imagedatanew"
 	"github.com/imgproxy/imgproxy/v3/options"
 	"github.com/imgproxy/imgproxy/v3/vips"
 )
 
-func importColorProfile(pctx *pipelineContext, img *vips.Image, po *options.ProcessingOptions, imgdata *imagedata.ImageData) error {
+func importColorProfile(pctx *pipelineContext, img *vips.Image, po *options.ProcessingOptions, imgdata imagedatanew.ImageData) error {
 	if img.ColourProfileImported() {
 		return nil
 	}

+ 2 - 2
processing/padding.go

@@ -1,13 +1,13 @@
 package processing
 
 import (
-	"github.com/imgproxy/imgproxy/v3/imagedata"
+	"github.com/imgproxy/imgproxy/v3/imagedatanew"
 	"github.com/imgproxy/imgproxy/v3/imath"
 	"github.com/imgproxy/imgproxy/v3/options"
 	"github.com/imgproxy/imgproxy/v3/vips"
 )
 
-func padding(pctx *pipelineContext, img *vips.Image, po *options.ProcessingOptions, imgdata *imagedata.ImageData) error {
+func padding(pctx *pipelineContext, img *vips.Image, po *options.ProcessingOptions, imgdata imagedatanew.ImageData) error {
 	if !po.Padding.Enabled {
 		return nil
 	}

+ 3 - 3
processing/pipeline.go

@@ -3,7 +3,7 @@ package processing
 import (
 	"context"
 
-	"github.com/imgproxy/imgproxy/v3/imagedata"
+	"github.com/imgproxy/imgproxy/v3/imagedatanew"
 	"github.com/imgproxy/imgproxy/v3/imagetype"
 	"github.com/imgproxy/imgproxy/v3/options"
 	"github.com/imgproxy/imgproxy/v3/router"
@@ -60,10 +60,10 @@ type pipelineContext struct {
 	extendAspectRatioHeight int
 }
 
-type pipelineStep func(pctx *pipelineContext, img *vips.Image, po *options.ProcessingOptions, imgdata *imagedata.ImageData) error
+type pipelineStep func(pctx *pipelineContext, img *vips.Image, po *options.ProcessingOptions, imgdata imagedatanew.ImageData) error
 type pipeline []pipelineStep
 
-func (p pipeline) Run(ctx context.Context, img *vips.Image, po *options.ProcessingOptions, imgdata *imagedata.ImageData) error {
+func (p pipeline) Run(ctx context.Context, img *vips.Image, po *options.ProcessingOptions, imgdata imagedatanew.ImageData) error {
 	pctx := pipelineContext{
 		ctx: ctx,
 

+ 4 - 4
processing/prepare.go

@@ -3,7 +3,7 @@ package processing
 import (
 	"math"
 
-	"github.com/imgproxy/imgproxy/v3/imagedata"
+	"github.com/imgproxy/imgproxy/v3/imagedatanew"
 	"github.com/imgproxy/imgproxy/v3/imagetype"
 	"github.com/imgproxy/imgproxy/v3/imath"
 	"github.com/imgproxy/imgproxy/v3/options"
@@ -248,10 +248,10 @@ func (pctx *pipelineContext) limitScale(widthToScale, heightToScale int, po *opt
 	}
 }
 
-func prepare(pctx *pipelineContext, img *vips.Image, po *options.ProcessingOptions, imgdata *imagedata.ImageData) error {
+func prepare(pctx *pipelineContext, img *vips.Image, po *options.ProcessingOptions, imgdata imagedatanew.ImageData) error {
 	pctx.imgtype = imagetype.Unknown
 	if imgdata != nil {
-		pctx.imgtype = imgdata.Type
+		pctx.imgtype = imgdata.Format()
 	}
 
 	pctx.srcWidth, pctx.srcHeight, pctx.angle, pctx.flip = extractMeta(img, po.Rotate, po.AutoRotate)
@@ -266,7 +266,7 @@ func prepare(pctx *pipelineContext, img *vips.Image, po *options.ProcessingOptio
 
 	// The size of a vector image is not checked during download, yet it can be very large.
 	// So we should scale it down to the maximum allowed resolution
-	if !pctx.trimmed && imgdata != nil && imgdata.Type.IsVector() && !po.Enlarge {
+	if !pctx.trimmed && imgdata != nil && imgdata.Format().IsVector() && !po.Enlarge {
 		resolution := imath.Round((float64(img.Width()*img.Height()) * pctx.wscale * pctx.hscale))
 		if resolution > po.SecurityOptions.MaxSrcResolution {
 			scale := math.Sqrt(float64(po.SecurityOptions.MaxSrcResolution) / float64(resolution))

+ 48 - 13
processing/processing.go

@@ -8,8 +8,11 @@ import (
 
 	log "github.com/sirupsen/logrus"
 
+	"github.com/imgproxy/imgproxy/v3/auximageprovider"
 	"github.com/imgproxy/imgproxy/v3/config"
 	"github.com/imgproxy/imgproxy/v3/imagedata"
+	"github.com/imgproxy/imgproxy/v3/imagedatanew"
+	"github.com/imgproxy/imgproxy/v3/imagedownloader"
 	"github.com/imgproxy/imgproxy/v3/imagetype"
 	"github.com/imgproxy/imgproxy/v3/imath"
 	"github.com/imgproxy/imgproxy/v3/options"
@@ -18,6 +21,33 @@ import (
 	"github.com/imgproxy/imgproxy/v3/vips"
 )
 
+var (
+	FallbackImage auximageprovider.AuxImageProvider
+	Watermark     auximageprovider.AuxImageProvider
+)
+
+func Init() (err error) {
+	FallbackImage, err = auximageprovider.NewFactory(imagedownloader.D).NewMemoryTriple(
+		config.FallbackImageData,
+		config.FallbackImagePath,
+		config.FallbackImageURL,
+	)
+	if err != nil {
+		return err
+	}
+
+	Watermark, err = auximageprovider.NewFactory(imagedownloader.D).NewMemoryTriple(
+		config.WatermarkData,
+		config.WatermarkPath,
+		config.WatermarkURL,
+	)
+	if err != nil {
+		return err
+	}
+
+	return nil
+}
+
 var mainPipeline = pipeline{
 	trim,
 	prepare,
@@ -174,13 +204,18 @@ func transformAnimated(ctx context.Context, img *vips.Image, po *options.Process
 		return err
 	}
 
-	if watermarkEnabled && imagedata.Watermark != nil {
+	if watermarkEnabled && Watermark != nil {
 		dprScale, derr := img.GetDoubleDefault("imgproxy-dpr-scale", 1.0)
 		if derr != nil {
 			dprScale = 1.0
 		}
 
-		if err = applyWatermark(img, imagedata.Watermark, &po.Watermark, dprScale, framesCount); err != nil {
+		wm, _, werr := Watermark.Get(ctx, po)
+		if werr != nil {
+			return werr
+		}
+
+		if err = applyWatermark(img, wm, &po.Watermark, dprScale, framesCount); err != nil {
 			return err
 		}
 	}
@@ -239,7 +274,7 @@ func saveImageToFitBytes(ctx context.Context, po *options.ProcessingOptions, img
 	}
 }
 
-func ProcessImage(ctx context.Context, imgdata *imagedata.ImageData, po *options.ProcessingOptions) (*imagedata.ImageData, error) {
+func ProcessImage(ctx context.Context, imgdata imagedatanew.ImageData, po *options.ProcessingOptions) (*imagedata.ImageData, error) {
 	runtime.LockOSThread()
 	defer runtime.UnlockOSThread()
 
@@ -247,7 +282,7 @@ func ProcessImage(ctx context.Context, imgdata *imagedata.ImageData, po *options
 
 	animationSupport :=
 		po.SecurityOptions.MaxAnimationFrames > 1 &&
-			imgdata.Type.SupportsAnimationLoad() &&
+			imgdata.Format().SupportsAnimationLoad() &&
 			(po.Format == imagetype.Unknown || po.Format.SupportsAnimationSave())
 
 	pages := 1
@@ -258,16 +293,16 @@ func ProcessImage(ctx context.Context, imgdata *imagedata.ImageData, po *options
 	img := new(vips.Image)
 	defer img.Clear()
 
-	if po.EnforceThumbnail && imgdata.Type.SupportsThumbnail() {
-		if err := img.LoadThumbnail(imgdata); err != nil {
+	if po.EnforceThumbnail && imgdata.Format().SupportsThumbnail() {
+		if err := img.LoadThumbnail(imagedata.From(imgdata)); err != nil {
 			log.Debugf("Can't load thumbnail: %s", err)
 			// Failed to load thumbnail, rollback to the full image
-			if err := img.Load(imgdata, 1, 1.0, pages); err != nil {
+			if err := img.Load(imagedata.From(imgdata), 1, 1.0, pages); err != nil {
 				return nil, err
 			}
 		}
 	} else {
-		if err := img.Load(imgdata, 1, 1.0, pages); err != nil {
+		if err := img.Load(imagedata.From(imgdata), 1, 1.0, pages); err != nil {
 			return nil, err
 		}
 	}
@@ -286,10 +321,10 @@ func ProcessImage(ctx context.Context, imgdata *imagedata.ImageData, po *options
 			po.Format = imagetype.AVIF
 		case po.PreferWebP:
 			po.Format = imagetype.WEBP
-		case isImageTypePreferred(imgdata.Type):
-			po.Format = imgdata.Type
+		case isImageTypePreferred(imgdata.Format()):
+			po.Format = imgdata.Format()
 		default:
-			po.Format = findBestFormat(imgdata.Type, animated, expectAlpha)
+			po.Format = findBestFormat(imgdata.Format(), animated, expectAlpha)
 		}
 	case po.EnforceJxl && !animated:
 		po.Format = imagetype.JXL
@@ -304,14 +339,14 @@ func ProcessImage(ctx context.Context, imgdata *imagedata.ImageData, po *options
 	}
 
 	if po.Format.SupportsAnimationSave() && animated {
-		if err := transformAnimated(ctx, img, po, imgdata); err != nil {
+		if err := transformAnimated(ctx, img, po, imagedata.From(imgdata)); err != nil {
 			return nil, err
 		}
 	} else {
 		if animated {
 			// We loaded animated image but the resulting format doesn't support
 			// animations, so we need to reload image as not animated
-			if err := img.Load(imgdata, 1, 1.0, 1); err != nil {
+			if err := img.Load(imagedata.From(imgdata), 1, 1.0, 1); err != nil {
 				return nil, err
 			}
 		}

+ 5 - 3
processing/processing_test.go

@@ -4,6 +4,7 @@ import (
 	"context"
 	"fmt"
 	"io"
+	"net/http"
 	"os"
 	"path/filepath"
 	"testing"
@@ -13,6 +14,7 @@ import (
 
 	"github.com/imgproxy/imgproxy/v3/config"
 	"github.com/imgproxy/imgproxy/v3/imagedata"
+	"github.com/imgproxy/imgproxy/v3/imagedatanew"
 	"github.com/imgproxy/imgproxy/v3/options"
 	"github.com/imgproxy/imgproxy/v3/security"
 	"github.com/imgproxy/imgproxy/v3/vips"
@@ -25,13 +27,13 @@ type ProcessingTestSuite struct {
 func (s *ProcessingTestSuite) SetupSuite() {
 	config.Reset()
 
-	s.Require().NoError(imagedata.Init())
+	// s.Require().NoError(imagedata.Init())
 	s.Require().NoError(vips.Init())
 
 	logrus.SetOutput(io.Discard)
 }
 
-func (s *ProcessingTestSuite) openFile(name string) *imagedata.ImageData {
+func (s *ProcessingTestSuite) openFile(name string) imagedatanew.ImageData {
 	secopts := security.Options{
 		MaxSrcResolution:            10 * 1024 * 1024,
 		MaxSrcFileSize:              10 * 1024 * 1024,
@@ -43,7 +45,7 @@ func (s *ProcessingTestSuite) openFile(name string) *imagedata.ImageData {
 	s.Require().NoError(err)
 	path := filepath.Join(wd, "..", "testdata", name)
 
-	imagedata, err := imagedata.FromFile(path, "test image", secopts)
+	imagedata, err := imagedatanew.NewFromFile(path, make(http.Header), secopts)
 	s.Require().NoError(err)
 
 	return imagedata

+ 2 - 2
processing/rotate_and_flip.go

@@ -1,12 +1,12 @@
 package processing
 
 import (
-	"github.com/imgproxy/imgproxy/v3/imagedata"
+	"github.com/imgproxy/imgproxy/v3/imagedatanew"
 	"github.com/imgproxy/imgproxy/v3/options"
 	"github.com/imgproxy/imgproxy/v3/vips"
 )
 
-func rotateAndFlip(pctx *pipelineContext, img *vips.Image, po *options.ProcessingOptions, imgdata *imagedata.ImageData) error {
+func rotateAndFlip(pctx *pipelineContext, img *vips.Image, po *options.ProcessingOptions, imgdata imagedatanew.ImageData) error {
 	if pctx.angle%360 == 0 && po.Rotate%360 == 0 && !pctx.flip {
 		return nil
 	}

+ 2 - 2
processing/scale.go

@@ -1,12 +1,12 @@
 package processing
 
 import (
-	"github.com/imgproxy/imgproxy/v3/imagedata"
+	"github.com/imgproxy/imgproxy/v3/imagedatanew"
 	"github.com/imgproxy/imgproxy/v3/options"
 	"github.com/imgproxy/imgproxy/v3/vips"
 )
 
-func scale(pctx *pipelineContext, img *vips.Image, po *options.ProcessingOptions, imgdata *imagedata.ImageData) error {
+func scale(pctx *pipelineContext, img *vips.Image, po *options.ProcessingOptions, imgdata imagedatanew.ImageData) error {
 	if pctx.wscale == 1 && pctx.hscale == 1 {
 		return nil
 	}

+ 11 - 10
processing/scale_on_load.go

@@ -7,18 +7,19 @@ import (
 
 	"github.com/imgproxy/imgproxy/v3/config"
 	"github.com/imgproxy/imgproxy/v3/imagedata"
+	"github.com/imgproxy/imgproxy/v3/imagedatanew"
 	"github.com/imgproxy/imgproxy/v3/imagetype"
 	"github.com/imgproxy/imgproxy/v3/imath"
 	"github.com/imgproxy/imgproxy/v3/options"
 	"github.com/imgproxy/imgproxy/v3/vips"
 )
 
-func canScaleOnLoad(pctx *pipelineContext, imgdata *imagedata.ImageData, scale float64) bool {
+func canScaleOnLoad(pctx *pipelineContext, imgdata imagedatanew.ImageData, scale float64) bool {
 	if imgdata == nil || pctx.trimmed || scale == 1 {
 		return false
 	}
 
-	if imgdata.Type.IsVector() {
+	if imgdata.Format().IsVector() {
 		return true
 	}
 
@@ -26,10 +27,10 @@ func canScaleOnLoad(pctx *pipelineContext, imgdata *imagedata.ImageData, scale f
 		return false
 	}
 
-	return imgdata.Type == imagetype.JPEG ||
-		imgdata.Type == imagetype.WEBP ||
-		imgdata.Type == imagetype.HEIC ||
-		imgdata.Type == imagetype.AVIF
+	return imgdata.Format() == imagetype.JPEG ||
+		imgdata.Format() == imagetype.WEBP ||
+		imgdata.Format() == imagetype.HEIC ||
+		imgdata.Format() == imagetype.AVIF
 }
 
 func calcJpegShink(shrink float64) int {
@@ -45,7 +46,7 @@ func calcJpegShink(shrink float64) int {
 	return 1
 }
 
-func scaleOnLoad(pctx *pipelineContext, img *vips.Image, po *options.ProcessingOptions, imgdata *imagedata.ImageData) error {
+func scaleOnLoad(pctx *pipelineContext, img *vips.Image, po *options.ProcessingOptions, imgdata imagedatanew.ImageData) error {
 	wshrink := float64(pctx.srcWidth) / float64(imath.Scale(pctx.srcWidth, pctx.wscale))
 	hshrink := float64(pctx.srcHeight) / float64(imath.Scale(pctx.srcHeight, pctx.hscale))
 	preshrink := math.Min(wshrink, hshrink)
@@ -57,11 +58,11 @@ func scaleOnLoad(pctx *pipelineContext, img *vips.Image, po *options.ProcessingO
 
 	var newWidth, newHeight int
 
-	if imgdata.Type.SupportsThumbnail() {
+	if imgdata.Format().SupportsThumbnail() {
 		thumbnail := new(vips.Image)
 		defer thumbnail.Clear()
 
-		if err := thumbnail.LoadThumbnail(imgdata); err != nil {
+		if err := thumbnail.LoadThumbnail(imagedata.From(imgdata)); err != nil {
 			log.Debugf("Can't load thumbnail: %s", err)
 			return nil
 		}
@@ -83,7 +84,7 @@ func scaleOnLoad(pctx *pipelineContext, img *vips.Image, po *options.ProcessingO
 			return nil
 		}
 
-		if err := img.Load(imgdata, jpegShrink, prescale, 1); err != nil {
+		if err := img.Load(imagedata.From(imgdata), jpegShrink, prescale, 1); err != nil {
 			return err
 		}
 

+ 2 - 2
processing/strip_metadata.go

@@ -5,7 +5,7 @@ import (
 
 	"github.com/trimmer-io/go-xmp/xmp"
 
-	"github.com/imgproxy/imgproxy/v3/imagedata"
+	"github.com/imgproxy/imgproxy/v3/imagedatanew"
 	"github.com/imgproxy/imgproxy/v3/imagemeta/iptc"
 	"github.com/imgproxy/imgproxy/v3/imagemeta/photoshop"
 	"github.com/imgproxy/imgproxy/v3/options"
@@ -105,7 +105,7 @@ func stripXMP(img *vips.Image) []byte {
 	return xmpData
 }
 
-func stripMetadata(pctx *pipelineContext, img *vips.Image, po *options.ProcessingOptions, imgdata *imagedata.ImageData) error {
+func stripMetadata(pctx *pipelineContext, img *vips.Image, po *options.ProcessingOptions, imgdata imagedatanew.ImageData) error {
 	if !po.StripMetadata {
 		return nil
 	}

+ 4 - 3
processing/trim.go

@@ -4,21 +4,22 @@ import (
 	"math"
 
 	"github.com/imgproxy/imgproxy/v3/imagedata"
+	"github.com/imgproxy/imgproxy/v3/imagedatanew"
 	"github.com/imgproxy/imgproxy/v3/options"
 	"github.com/imgproxy/imgproxy/v3/vips"
 )
 
-func trim(pctx *pipelineContext, img *vips.Image, po *options.ProcessingOptions, imgdata *imagedata.ImageData) error {
+func trim(pctx *pipelineContext, img *vips.Image, po *options.ProcessingOptions, imgdata imagedatanew.ImageData) error {
 	if !po.Trim.Enabled {
 		return nil
 	}
 
 	// The size of a vector image is not checked during download, yet it can be very large.
 	// So we should scale it down to the maximum allowed resolution
-	if imgdata != nil && imgdata.Type.IsVector() {
+	if imgdata != nil && imgdata.Format().IsVector() {
 		if resolution := img.Width() * img.Height(); resolution > po.SecurityOptions.MaxSrcResolution {
 			scale := math.Sqrt(float64(po.SecurityOptions.MaxSrcResolution) / float64(resolution))
-			if err := img.Load(imgdata, 1, scale, 1); err != nil {
+			if err := img.Load(imagedata.From(imgdata), 1, scale, 1); err != nil {
 				return err
 			}
 		}

+ 13 - 7
processing/watermark.go

@@ -6,6 +6,7 @@ import (
 
 	"github.com/imgproxy/imgproxy/v3/config"
 	"github.com/imgproxy/imgproxy/v3/imagedata"
+	"github.com/imgproxy/imgproxy/v3/imagedatanew"
 	"github.com/imgproxy/imgproxy/v3/imath"
 	"github.com/imgproxy/imgproxy/v3/options"
 	"github.com/imgproxy/imgproxy/v3/vips"
@@ -20,8 +21,8 @@ var watermarkPipeline = pipeline{
 	padding,
 }
 
-func prepareWatermark(wm *vips.Image, wmData *imagedata.ImageData, opts *options.WatermarkOptions, imgWidth, imgHeight int, offsetScale float64, framesCount int) error {
-	if err := wm.Load(wmData, 1, 1.0, 1); err != nil {
+func prepareWatermark(wm *vips.Image, wmData imagedatanew.ImageData, opts *options.WatermarkOptions, imgWidth, imgHeight int, offsetScale float64, framesCount int) error {
+	if err := wm.Load(imagedata.From(wmData), 1, 1.0, 1); err != nil {
 		return err
 	}
 
@@ -29,7 +30,7 @@ func prepareWatermark(wm *vips.Image, wmData *imagedata.ImageData, opts *options
 	po.ResizingType = options.ResizeFit
 	po.Dpr = 1
 	po.Enlarge = true
-	po.Format = wmData.Type
+	po.Format = wmData.Format()
 
 	if opts.Scale > 0 {
 		po.Width = imath.Max(imath.ScaleToEven(imgWidth, opts.Scale), 1)
@@ -81,7 +82,7 @@ func prepareWatermark(wm *vips.Image, wmData *imagedata.ImageData, opts *options
 	return wm.StripAll()
 }
 
-func applyWatermark(img *vips.Image, wmData *imagedata.ImageData, opts *options.WatermarkOptions, offsetScale float64, framesCount int) error {
+func applyWatermark(img *vips.Image, wmData imagedatanew.ImageData, opts *options.WatermarkOptions, offsetScale float64, framesCount int) error {
 	wm := new(vips.Image)
 	defer wm.Clear()
 
@@ -162,10 +163,15 @@ func applyWatermark(img *vips.Image, wmData *imagedata.ImageData, opts *options.
 	return nil
 }
 
-func watermark(pctx *pipelineContext, img *vips.Image, po *options.ProcessingOptions, imgdata *imagedata.ImageData) error {
-	if !po.Watermark.Enabled || imagedata.Watermark == nil {
+func watermark(pctx *pipelineContext, img *vips.Image, po *options.ProcessingOptions, imgdata imagedatanew.ImageData) error {
+	if !po.Watermark.Enabled || Watermark == nil {
 		return nil
 	}
 
-	return applyWatermark(img, imagedata.Watermark, &po.Watermark, pctx.dprScale, 1)
+	wm, _, err := Watermark.Get(pctx.ctx, po)
+	if err != nil {
+		return err
+	}
+
+	return applyWatermark(img, wm, &po.Watermark, pctx.dprScale, 1)
 }

+ 45 - 29
processing_handler.go

@@ -12,6 +12,7 @@ import (
 	"time"
 
 	log "github.com/sirupsen/logrus"
+	"go.withmatt.com/httpheaders"
 	"golang.org/x/sync/semaphore"
 
 	"github.com/imgproxy/imgproxy/v3/config"
@@ -20,6 +21,8 @@ import (
 	"github.com/imgproxy/imgproxy/v3/etag"
 	"github.com/imgproxy/imgproxy/v3/ierrors"
 	"github.com/imgproxy/imgproxy/v3/imagedata"
+	"github.com/imgproxy/imgproxy/v3/imagedatanew"
+	"github.com/imgproxy/imgproxy/v3/imagedownloader"
 	"github.com/imgproxy/imgproxy/v3/imagefetcher"
 	"github.com/imgproxy/imgproxy/v3/imagetype"
 	"github.com/imgproxy/imgproxy/v3/imath"
@@ -58,9 +61,10 @@ func initProcessingHandler() {
 	}
 
 	headerVaryValue = strings.Join(vary, ", ")
+
 }
 
-func setCacheControl(rw http.ResponseWriter, force *time.Time, originHeaders map[string]string) {
+func setCacheControl(rw http.ResponseWriter, force *time.Time, originHeaders http.Header) {
 	ttl := -1
 
 	if _, ok := originHeaders["Fallback-Image"]; ok && config.FallbackImageTTL > 0 {
@@ -72,12 +76,12 @@ func setCacheControl(rw http.ResponseWriter, force *time.Time, originHeaders map
 	}
 
 	if config.CacheControlPassthrough && ttl < 0 && originHeaders != nil {
-		if val, ok := originHeaders["Cache-Control"]; ok && len(val) > 0 {
+		if val := originHeaders.Get(httpheaders.CacheControl); len(val) > 0 {
 			rw.Header().Set("Cache-Control", val)
 			return
 		}
 
-		if val, ok := originHeaders["Expires"]; ok && len(val) > 0 {
+		if val := originHeaders.Get("Expires"); len(val) > 0 {
 			if t, err := time.Parse(http.TimeFormat, val); err == nil {
 				ttl = imath.Max(0, int(time.Until(t).Seconds()))
 			}
@@ -95,9 +99,9 @@ func setCacheControl(rw http.ResponseWriter, force *time.Time, originHeaders map
 	}
 }
 
-func setLastModified(rw http.ResponseWriter, originHeaders map[string]string) {
+func setLastModified(rw http.ResponseWriter, originHeaders http.Header) {
 	if config.LastModifiedEnabled {
-		if val, ok := originHeaders["Last-Modified"]; ok && len(val) != 0 {
+		if val := originHeaders.Get("Last-Modified"); len(val) != 0 {
 			rw.Header().Set("Last-Modified", val)
 		}
 	}
@@ -118,7 +122,7 @@ func setCanonical(rw http.ResponseWriter, originURL string) {
 	}
 }
 
-func respondWithImage(reqID string, r *http.Request, rw http.ResponseWriter, statusCode int, resultData *imagedata.ImageData, po *options.ProcessingOptions, originURL string, originData *imagedata.ImageData) {
+func respondWithImage(reqID string, r *http.Request, rw http.ResponseWriter, statusCode int, resultData *imagedata.ImageData, po *options.ProcessingOptions, originURL string, originData imagedatanew.ImageData) {
 	var contentDisposition string
 	if len(po.Filename) > 0 {
 		contentDisposition = resultData.Type.ContentDisposition(po.Filename, po.ReturnAttachment)
@@ -129,13 +133,14 @@ func respondWithImage(reqID string, r *http.Request, rw http.ResponseWriter, sta
 	rw.Header().Set("Content-Type", resultData.Type.Mime())
 	rw.Header().Set("Content-Disposition", contentDisposition)
 
-	setCacheControl(rw, po.Expires, originData.Headers)
-	setLastModified(rw, originData.Headers)
+	setCacheControl(rw, po.Expires, originData.Headers())
+	setLastModified(rw, originData.Headers())
 	setVary(rw)
 	setCanonical(rw, originURL)
 
 	if config.EnableDebugHeaders {
-		rw.Header().Set("X-Origin-Content-Length", strconv.Itoa(len(originData.Data)))
+		// TODO: RESTORE
+		// rw.Header().Set("X-Origin-Content-Length", strconv.Itoa(len(originData.Data)))
 		rw.Header().Set("X-Origin-Width", resultData.Headers["X-Origin-Width"])
 		rw.Header().Set("X-Origin-Height", resultData.Headers["X-Origin-Height"])
 		rw.Header().Set("X-Result-Width", resultData.Headers["X-Result-Width"])
@@ -167,7 +172,7 @@ func respondWithImage(reqID string, r *http.Request, rw http.ResponseWriter, sta
 	)
 }
 
-func respondWithNotModified(reqID string, r *http.Request, rw http.ResponseWriter, po *options.ProcessingOptions, originURL string, originHeaders map[string]string) {
+func respondWithNotModified(reqID string, r *http.Request, rw http.ResponseWriter, po *options.ProcessingOptions, originURL string, originHeaders http.Header) {
 	setCacheControl(rw, po.Expires, originHeaders)
 	setVary(rw)
 
@@ -330,13 +335,13 @@ func handleProcessing(reqID string, rw http.ResponseWriter, r *http.Request) {
 
 	statusCode := http.StatusOK
 
-	originData, err := func() (*imagedata.ImageData, error) {
+	originData, err := func() (imagedatanew.ImageData, error) {
 		defer metrics.StartDownloadingSegment(ctx, metrics.Meta{
 			metrics.MetaSourceImageURL:    metricsMeta[metrics.MetaSourceImageURL],
 			metrics.MetaSourceImageOrigin: metricsMeta[metrics.MetaSourceImageOrigin],
 		})()
 
-		downloadOpts := imagedata.DownloadOptions{
+		downloadOpts := imagedownloader.DownloadOptions{
 			Header:    imgRequestHeader,
 			CookieJar: nil,
 		}
@@ -346,7 +351,7 @@ func handleProcessing(reqID string, rw http.ResponseWriter, r *http.Request) {
 			checkErr(ctx, "download", err)
 		}
 
-		return imagedata.Download(ctx, imageURL, "source image", downloadOpts, po.SecurityOptions)
+		return imagedownloader.D.DownloadWithDesc(ctx, imageURL, "source image", downloadOpts, po.SecurityOptions)
 	}()
 
 	var nmErr imagefetcher.NotModifiedError
@@ -357,12 +362,12 @@ func handleProcessing(reqID string, rw http.ResponseWriter, r *http.Request) {
 
 	case errors.As(err, &nmErr):
 		if config.ETagEnabled && len(etagHandler.ImageEtagExpected()) != 0 {
-			rw.Header().Set("ETag", etagHandler.GenerateExpectedETag())
+			rw.Header().Set("Etag", etagHandler.GenerateExpectedETag())
 		}
 
-		h := make(map[string]string)
+		h := make(http.Header)
 		for k := range nmErr.Headers() {
-			h[k] = nmErr.Headers().Get(k)
+			h.Set(k, nmErr.Headers().Get(k))
 		}
 
 		respondWithNotModified(reqID, r, rw, po, imageURL, h)
@@ -380,7 +385,7 @@ func handleProcessing(reqID string, rw http.ResponseWriter, r *http.Request) {
 
 		sendErr(ctx, "download", ierr)
 
-		if imagedata.FallbackImage == nil {
+		if processing.FallbackImage == nil {
 			panic(ierr)
 		}
 
@@ -398,18 +403,29 @@ func handleProcessing(reqID string, rw http.ResponseWriter, r *http.Request) {
 			statusCode = ierr.StatusCode()
 		}
 
-		originData = imagedata.FallbackImage
+		fi, _, ferr := processing.FallbackImage.Get(ctx, po)
+		if ferr != nil {
+			sendErrAndPanic(ctx, "fallback_image", err)
+		}
+
+		originData = fi
+
+		if config.FallbackImageTTL > 0 {
+			// Will be removed along with the headers
+			//nolint:staticcheck
+			originData.Headers().Set("Fallback-Image", "1")
+		}
 	}
 
 	checkErr(ctx, "timeout", router.CheckTimeout(ctx))
 
 	if config.ETagEnabled && statusCode == http.StatusOK {
-		imgDataMatch := etagHandler.SetActualImageData(originData)
+		imgDataMatch := etagHandler.SetActualImageData(imagedata.From(originData))
 
-		rw.Header().Set("ETag", etagHandler.GenerateActualETag())
+		rw.Header().Set("Etag", etagHandler.GenerateActualETag())
 
 		if imgDataMatch && etagHandler.ProcessingOptionsMatch() {
-			respondWithNotModified(reqID, r, rw, po, imageURL, originData.Headers)
+			respondWithNotModified(reqID, r, rw, po, imageURL, originData.Headers())
 			return
 		}
 	}
@@ -419,13 +435,13 @@ func handleProcessing(reqID string, rw http.ResponseWriter, r *http.Request) {
 	// Skip processing svg with unknown or the same destination imageType
 	// if it's not forced by AlwaysRasterizeSvg option
 	// Also skip processing if the format is in SkipProcessingFormats
-	shouldSkipProcessing := (originData.Type == po.Format || po.Format == imagetype.Unknown) &&
-		(slices.Contains(po.SkipProcessingFormats, originData.Type) ||
-			originData.Type == imagetype.SVG && !config.AlwaysRasterizeSvg)
+	shouldSkipProcessing := (originData.Format() == po.Format || po.Format == imagetype.Unknown) &&
+		(slices.Contains(po.SkipProcessingFormats, originData.Format()) ||
+			originData.Format() == imagetype.SVG && !config.AlwaysRasterizeSvg)
 
 	if shouldSkipProcessing {
-		if originData.Type == imagetype.SVG && config.SanitizeSvg {
-			sanitized, svgErr := svg.Sanitize(originData)
+		if originData.Format() == imagetype.SVG && config.SanitizeSvg {
+			sanitized, svgErr := svg.Sanitize(imagedata.From(originData))
 			checkErr(ctx, "svg_processing", svgErr)
 
 			defer sanitized.Close()
@@ -434,14 +450,14 @@ func handleProcessing(reqID string, rw http.ResponseWriter, r *http.Request) {
 			return
 		}
 
-		respondWithImage(reqID, r, rw, statusCode, originData, po, imageURL, originData)
+		respondWithImage(reqID, r, rw, statusCode, imagedata.From(originData), po, imageURL, originData)
 		return
 	}
 
-	if !vips.SupportsLoad(originData.Type) {
+	if !vips.SupportsLoad(originData.Format()) {
 		sendErrAndPanic(ctx, "processing", newInvalidURLErrorf(
 			http.StatusUnprocessableEntity,
-			"Source image format is not supported: %s", originData.Type,
+			"Source image format is not supported: %s", originData.Format(),
 		))
 	}
 

+ 16 - 15
processing_handler_test.go

@@ -20,6 +20,7 @@ import (
 	"github.com/imgproxy/imgproxy/v3/config/configurators"
 	"github.com/imgproxy/imgproxy/v3/etag"
 	"github.com/imgproxy/imgproxy/v3/imagedata"
+	"github.com/imgproxy/imgproxy/v3/imagedownloader"
 	"github.com/imgproxy/imgproxy/v3/imagemeta"
 	"github.com/imgproxy/imgproxy/v3/imagetype"
 	"github.com/imgproxy/imgproxy/v3/options"
@@ -106,7 +107,7 @@ func (s *ProcessingHandlerTestSuite) sampleETagData(imgETag string) (string, *im
 	}
 
 	if len(imgETag) != 0 {
-		imgdata.Headers = map[string]string{"ETag": imgETag}
+		imgdata.Headers = map[string]string{"Etag": imgETag}
 	}
 
 	var h etag.Handler
@@ -152,8 +153,8 @@ func (s *ProcessingHandlerTestSuite) TestSignatureValidationSuccess() {
 }
 
 func (s *ProcessingHandlerTestSuite) TestSourceValidation() {
-	imagedata.RedirectAllRequestsTo("local:///test1.png")
-	defer imagedata.StopRedirectingRequests()
+	imagedownloader.RedirectAllRequestsTo("local:///test1.png")
+	defer imagedownloader.StopRedirectingRequests()
 
 	tt := []struct {
 		name           string
@@ -402,7 +403,7 @@ func (s *ProcessingHandlerTestSuite) TestETagDisabled() {
 	res := rw.Result()
 
 	s.Require().Equal(200, res.StatusCode)
-	s.Require().Empty(res.Header.Get("ETag"))
+	s.Require().Empty(res.Header.Get("Etag"))
 }
 
 func (s *ProcessingHandlerTestSuite) TestETagReqNoIfNotModified() {
@@ -413,7 +414,7 @@ func (s *ProcessingHandlerTestSuite) TestETagReqNoIfNotModified() {
 	ts := httptest.NewServer(http.HandlerFunc(func(rw http.ResponseWriter, r *http.Request) {
 		s.Empty(r.Header.Get("If-None-Match"))
 
-		rw.Header().Set("ETag", imgdata.Headers["ETag"])
+		rw.Header().Set("Etag", imgdata.Headers["Etag"])
 		rw.WriteHeader(200)
 		rw.Write(s.readTestFile("test1.png"))
 	}))
@@ -423,7 +424,7 @@ func (s *ProcessingHandlerTestSuite) TestETagReqNoIfNotModified() {
 	res := rw.Result()
 
 	s.Require().Equal(200, res.StatusCode)
-	s.Require().Equal(etag, res.Header.Get("ETag"))
+	s.Require().Equal(etag, res.Header.Get("Etag"))
 }
 
 func (s *ProcessingHandlerTestSuite) TestETagDataNoIfNotModified() {
@@ -443,7 +444,7 @@ func (s *ProcessingHandlerTestSuite) TestETagDataNoIfNotModified() {
 	res := rw.Result()
 
 	s.Require().Equal(200, res.StatusCode)
-	s.Require().Equal(etag, res.Header.Get("ETag"))
+	s.Require().Equal(etag, res.Header.Get("Etag"))
 }
 
 func (s *ProcessingHandlerTestSuite) TestETagReqMatch() {
@@ -452,7 +453,7 @@ func (s *ProcessingHandlerTestSuite) TestETagReqMatch() {
 	poStr, imgdata, etag := s.sampleETagData(`"loremipsumdolor"`)
 
 	ts := httptest.NewServer(http.HandlerFunc(func(rw http.ResponseWriter, r *http.Request) {
-		s.Equal(imgdata.Headers["ETag"], r.Header.Get("If-None-Match"))
+		s.Equal(imgdata.Headers["Etag"], r.Header.Get("If-None-Match"))
 
 		rw.WriteHeader(304)
 	}))
@@ -465,7 +466,7 @@ func (s *ProcessingHandlerTestSuite) TestETagReqMatch() {
 	res := rw.Result()
 
 	s.Require().Equal(304, res.StatusCode)
-	s.Require().Equal(etag, res.Header.Get("ETag"))
+	s.Require().Equal(etag, res.Header.Get("Etag"))
 }
 
 func (s *ProcessingHandlerTestSuite) TestETagDataMatch() {
@@ -488,7 +489,7 @@ func (s *ProcessingHandlerTestSuite) TestETagDataMatch() {
 	res := rw.Result()
 
 	s.Require().Equal(304, res.StatusCode)
-	s.Require().Equal(etag, res.Header.Get("ETag"))
+	s.Require().Equal(etag, res.Header.Get("Etag"))
 }
 
 func (s *ProcessingHandlerTestSuite) TestETagReqNotMatch() {
@@ -500,7 +501,7 @@ func (s *ProcessingHandlerTestSuite) TestETagReqNotMatch() {
 	ts := httptest.NewServer(http.HandlerFunc(func(rw http.ResponseWriter, r *http.Request) {
 		s.Equal(`"loremipsum"`, r.Header.Get("If-None-Match"))
 
-		rw.Header().Set("ETag", imgdata.Headers["ETag"])
+		rw.Header().Set("Etag", imgdata.Headers["Etag"])
 		rw.WriteHeader(200)
 		rw.Write(imgdata.Data)
 	}))
@@ -513,7 +514,7 @@ func (s *ProcessingHandlerTestSuite) TestETagReqNotMatch() {
 	res := rw.Result()
 
 	s.Require().Equal(200, res.StatusCode)
-	s.Require().Equal(actualETag, res.Header.Get("ETag"))
+	s.Require().Equal(actualETag, res.Header.Get("Etag"))
 }
 
 func (s *ProcessingHandlerTestSuite) TestETagDataNotMatch() {
@@ -538,7 +539,7 @@ func (s *ProcessingHandlerTestSuite) TestETagDataNotMatch() {
 	res := rw.Result()
 
 	s.Require().Equal(200, res.StatusCode)
-	s.Require().Equal(actualETag, res.Header.Get("ETag"))
+	s.Require().Equal(actualETag, res.Header.Get("Etag"))
 }
 
 func (s *ProcessingHandlerTestSuite) TestETagProcessingOptionsNotMatch() {
@@ -551,7 +552,7 @@ func (s *ProcessingHandlerTestSuite) TestETagProcessingOptionsNotMatch() {
 	ts := httptest.NewServer(http.HandlerFunc(func(rw http.ResponseWriter, r *http.Request) {
 		s.Empty(r.Header.Get("If-None-Match"))
 
-		rw.Header().Set("ETag", imgdata.Headers["ETag"])
+		rw.Header().Set("Etag", imgdata.Headers["Etag"])
 		rw.WriteHeader(200)
 		rw.Write(imgdata.Data)
 	}))
@@ -564,7 +565,7 @@ func (s *ProcessingHandlerTestSuite) TestETagProcessingOptionsNotMatch() {
 	res := rw.Result()
 
 	s.Require().Equal(200, res.StatusCode)
-	s.Require().Equal(actualETag, res.Header.Get("ETag"))
+	s.Require().Equal(actualETag, res.Header.Get("Etag"))
 }
 
 func (s *ProcessingHandlerTestSuite) TestLastModifiedEnabled() {

+ 23 - 0
security/response_limit.go → security/limit.go

@@ -3,6 +3,7 @@ package security
 import (
 	"io"
 	"net/http"
+	"os"
 )
 
 // hardLimitReadCloser is a wrapper around io.ReadCloser
@@ -12,6 +13,7 @@ type hardLimitReadCloser struct {
 	left int
 }
 
+// Read reads data from the underlying reader, limiting the number of bytes read
 func (lr *hardLimitReadCloser) Read(p []byte) (n int, err error) {
 	if lr.left <= 0 {
 		return 0, newFileSizeError()
@@ -24,10 +26,31 @@ func (lr *hardLimitReadCloser) Read(p []byte) (n int, err error) {
 	return
 }
 
+// Close closes the underlying reader
 func (lr *hardLimitReadCloser) Close() error {
 	return lr.r.Close()
 }
 
+// LimitFileSize limits the size of the file to MaxSrcFileSize (if set).
+// It calls f.Stat() to get the file to get its size and returns an error
+// if the size exceeds MaxSrcFileSize.
+func LimitFileSize(f *os.File, opts Options) (*os.File, error) {
+	if opts.MaxSrcFileSize == 0 {
+		return f, nil
+	}
+
+	s, err := f.Stat()
+	if err != nil {
+		return nil, err
+	}
+
+	if int(s.Size()) > opts.MaxSrcFileSize {
+		return nil, newFileSizeError()
+	}
+
+	return f, nil
+}
+
 // LimitResponseSize limits the size of the response body to MaxSrcFileSize (if set).
 // First, it tries to use Content-Length header to check the limit.
 // If Content-Length is not set, it limits the size of the response body by wrapping

+ 7 - 0
security/image_size.go → security/meta_dimensions.go

@@ -1,9 +1,11 @@
 package security
 
 import (
+	"github.com/imgproxy/imgproxy/v3/imagemeta"
 	"github.com/imgproxy/imgproxy/v3/imath"
 )
 
+// CheckDimensions checks the given dimensions against the security options
 func CheckDimensions(width, height, frames int, opts Options) error {
 	frames = imath.Max(frames, 1)
 
@@ -19,3 +21,8 @@ func CheckDimensions(width, height, frames int, opts Options) error {
 
 	return nil
 }
+
+// CheckMeta checks the image metadata against the security options
+func CheckMeta(meta imagemeta.Meta, opts Options) error {
+	return CheckDimensions(meta.Width(), meta.Height(), 1, opts)
+}

+ 8 - 6
stream.go

@@ -10,10 +10,11 @@ import (
 	"sync"
 
 	log "github.com/sirupsen/logrus"
+	"go.withmatt.com/httpheaders"
 
 	"github.com/imgproxy/imgproxy/v3/config"
 	"github.com/imgproxy/imgproxy/v3/cookies"
-	"github.com/imgproxy/imgproxy/v3/imagedata"
+	"github.com/imgproxy/imgproxy/v3/imagedownloader"
 	"github.com/imgproxy/imgproxy/v3/imagetype"
 	"github.com/imgproxy/imgproxy/v3/metrics"
 	"github.com/imgproxy/imgproxy/v3/metrics/stats"
@@ -69,7 +70,7 @@ func streamOriginImage(ctx context.Context, reqID string, r *http.Request, rw ht
 		checkErr(ctx, "streaming", err)
 	}
 
-	req, err := imagedata.Fetcher.BuildRequest(r.Context(), imageURL, imgRequestHeader, cookieJar)
+	req, err := imagedownloader.Fetcher.BuildRequest(r.Context(), imageURL, imgRequestHeader, cookieJar)
 	defer req.Cancel()
 	checkErr(ctx, "streaming", err)
 
@@ -113,10 +114,11 @@ func streamOriginImage(ctx context.Context, reqID string, r *http.Request, rw ht
 		rw.Header().Set("Content-Disposition", imagetype.ContentDisposition(filename, ext, po.ReturnAttachment))
 	}
 
-	setCacheControl(rw, po.Expires, map[string]string{
-		"Cache-Control": res.Header.Get("Cache-Control"),
-		"Expires":       res.Header.Get("Expires"),
-	})
+	h := make(http.Header)
+	h.Set(httpheaders.ContentType, res.Header.Get(httpheaders.ContentType))
+	h.Set(httpheaders.Expires, res.Header.Get(httpheaders.Expires))
+
+	setCacheControl(rw, po.Expires, h)
 	setCanonical(rw, imageURL)
 	rw.Header().Set("Content-Security-Policy", "script-src 'none'")
 

+ 13 - 1
svg/svg.go

@@ -2,15 +2,27 @@ package svg
 
 import (
 	"bytes"
+	"context"
 	"io"
 	"strings"
 
 	"github.com/tdewolff/parse/v2"
 	"github.com/tdewolff/parse/v2/xml"
 
+	"github.com/imgproxy/imgproxy/v3/bufpool"
+	"github.com/imgproxy/imgproxy/v3/config"
 	"github.com/imgproxy/imgproxy/v3/imagedata"
 )
 
+var svgBufPool = bufpool.New("svg", config.Workers, config.DownloadBufferSize)
+
+func BorrowBuffer() (*bytes.Buffer, context.CancelFunc) {
+	buf := svgBufPool.Get(0, false)
+	cancel := func() { svgBufPool.Put(buf) }
+
+	return buf, cancel
+}
+
 func cloneHeaders(src map[string]string) map[string]string {
 	if src == nil {
 		return nil
@@ -28,7 +40,7 @@ func Sanitize(data *imagedata.ImageData) (*imagedata.ImageData, error) {
 	r := bytes.NewReader(data.Data)
 	l := xml.NewLexer(parse.NewInput(r))
 
-	buf, cancel := imagedata.BorrowBuffer()
+	buf, cancel := BorrowBuffer()
 
 	ignoreTag := 0
 

+ 2 - 2
svg/svg_test.go

@@ -19,8 +19,8 @@ type SvgTestSuite struct {
 func (s *SvgTestSuite) SetupSuite() {
 	config.Reset()
 
-	err := imagedata.Init()
-	s.Require().NoError(err)
+	// err := imagedata.Init()
+	// s.Require().NoError(err)
 }
 
 func (s *SvgTestSuite) readTestFile(name string) *imagedata.ImageData {