From be93293a89882a68c8515094c9b8becd30bd06f9 Mon Sep 17 00:00:00 2001 From: Mohammed Al Sahaf Date: Fri, 1 Aug 2025 19:17:02 +0300 Subject: [PATCH 1/6] refactor `handleBuffer` in Souin core middleware Signed-off-by: Mohammed Al Sahaf --- pkg/middleware/middleware.go | 62 +++++++++--------------------------- pkg/middleware/writer.go | 30 +++++++++++------ 2 files changed, 36 insertions(+), 56 deletions(-) diff --git a/pkg/middleware/middleware.go b/pkg/middleware/middleware.go index e8c720c7e..33e4f1e54 100644 --- a/pkg/middleware/middleware.go +++ b/pkg/middleware/middleware.go @@ -514,9 +514,7 @@ func (s *SouinBaseHandler) Upstream( } err := s.Store(customWriter, rq, requestCc, cachedKey, uri) - defer customWriter.handleBuffer(func(b *bytes.Buffer) { - b.Reset() - }) + defer customWriter.resetBuffer() return singleflightValue{ body: customWriter.Buf.Bytes(), @@ -573,9 +571,7 @@ func (s *SouinBaseHandler) Revalidate(validator *core.Revalidator, next handlerF statusCode := customWriter.GetStatusCode() if err == nil { if validator.IfUnmodifiedSincePresent && statusCode != http.StatusNotModified { - customWriter.handleBuffer(func(b *bytes.Buffer) { - b.Reset() - }) + customWriter.resetBuffer() customWriter.Rw.WriteHeader(http.StatusPreconditionFailed) return nil, errors.New("") @@ -583,9 +579,7 @@ func (s *SouinBaseHandler) Revalidate(validator *core.Revalidator, next handlerF if validator.IfModifiedSincePresent { if lastModified, err := time.Parse(time.RFC1123, customWriter.Header().Get("Last-Modified")); err == nil && validator.IfModifiedSince.Sub(lastModified) > 0 { - customWriter.handleBuffer(func(b *bytes.Buffer) { - b.Reset() - }) + customWriter.resetBuffer() customWriter.Rw.WriteHeader(http.StatusNotModified) return nil, errors.New("") @@ -607,9 +601,7 @@ func (s *SouinBaseHandler) Revalidate(validator *core.Revalidator, next handlerF ), ) - defer customWriter.handleBuffer(func(b *bytes.Buffer) { - b.Reset() - }) + defer customWriter.resetBuffer() return singleflightValue{ body: customWriter.Buf.Bytes(), headers: customWriter.Header().Clone(), @@ -766,18 +758,14 @@ func (s *SouinBaseHandler) ServeHTTP(rw http.ResponseWriter, rq *http.Request, n } if validator.NotModified { customWriter.WriteHeader(http.StatusNotModified) - customWriter.handleBuffer(func(b *bytes.Buffer) { - b.Reset() - }) + customWriter.resetBuffer() _, _ = customWriter.Send() return nil } customWriter.WriteHeader(response.StatusCode) - customWriter.handleBuffer(func(b *bytes.Buffer) { - _, _ = io.Copy(b, response.Body) - }) + customWriter.copyToBuffer(response.Body) _, _ = customWriter.Send() return nil @@ -803,9 +791,7 @@ func (s *SouinBaseHandler) ServeHTTP(rw http.ResponseWriter, rq *http.Request, n } customWriter.WriteHeader(response.StatusCode) s.Configuration.GetLogger().Debugf("Serve from cache %+v", req) - customWriter.handleBuffer(func(b *bytes.Buffer) { - _, _ = io.Copy(b, response.Body) - }) + customWriter.copyToBuffer(response.Body) _, err := customWriter.Send() prometheus.Increment(prometheus.CachedResponseCounter) @@ -825,9 +811,7 @@ func (s *SouinBaseHandler) ServeHTTP(rw http.ResponseWriter, rq *http.Request, n } customWriter.WriteHeader(response.StatusCode) rfc.HitStaleCache(&response.Header) - customWriter.handleBuffer(func(b *bytes.Buffer) { - _, _ = io.Copy(b, response.Body) - }) + customWriter.copyToBuffer(response.Body) _, err := customWriter.Send() customWriter = NewCustomWriter(req, rw, bufPool) go func(v *core.Revalidator, goCw *CustomWriter, goRq *http.Request, goNext func(http.ResponseWriter, *http.Request) error, goCc *cacheobject.RequestCacheDirectives, goCk string, goUri string) { @@ -851,18 +835,13 @@ func (s *SouinBaseHandler) ServeHTTP(rw http.ResponseWriter, rq *http.Request, n response.Header.Set("Cache-Status", response.Header.Get("Cache-Status")+code) maps.Copy(customWriter.Header(), response.Header) customWriter.WriteHeader(response.StatusCode) - customWriter.handleBuffer(func(b *bytes.Buffer) { - b.Reset() - _, _ = io.Copy(b, response.Body) - }) + customWriter.resetAndCopyToBuffer(response.Body) _, err := customWriter.Send() return err } rw.WriteHeader(http.StatusGatewayTimeout) - customWriter.handleBuffer(func(b *bytes.Buffer) { - b.Reset() - }) + customWriter.resetBuffer() _, err := customWriter.Send() return err @@ -873,9 +852,7 @@ func (s *SouinBaseHandler) ServeHTTP(rw http.ResponseWriter, rq *http.Request, n rfc.SetCacheStatusHeader(response, storerName) customWriter.WriteHeader(response.StatusCode) maps.Copy(customWriter.Header(), response.Header) - customWriter.handleBuffer(func(b *bytes.Buffer) { - _, _ = io.Copy(b, response.Body) - }) + customWriter.copyToBuffer(response.Body) _, _ = customWriter.Send() return err @@ -884,9 +861,7 @@ func (s *SouinBaseHandler) ServeHTTP(rw http.ResponseWriter, rq *http.Request, n if statusCode != http.StatusNotModified && validator.Matched { customWriter.WriteHeader(http.StatusNotModified) - customWriter.handleBuffer(func(b *bytes.Buffer) { - b.Reset() - }) + customWriter.resetBuffer() _, _ = customWriter.Send() return err @@ -901,9 +876,7 @@ func (s *SouinBaseHandler) ServeHTTP(rw http.ResponseWriter, rq *http.Request, n customWriter.WriteHeader(response.StatusCode) rfc.HitStaleCache(&response.Header) maps.Copy(customWriter.Header(), response.Header) - customWriter.handleBuffer(func(b *bytes.Buffer) { - _, _ = io.Copy(b, response.Body) - }) + customWriter.copyToBuffer(response.Body) _, err := customWriter.Send() return err @@ -917,9 +890,7 @@ func (s *SouinBaseHandler) ServeHTTP(rw http.ResponseWriter, rq *http.Request, n customWriter.WriteHeader(response.StatusCode) rfc.HitStaleCache(&response.Header) maps.Copy(customWriter.Header(), response.Header) - customWriter.handleBuffer(func(b *bytes.Buffer) { - _, _ = io.Copy(b, response.Body) - }) + customWriter.copyToBuffer(response.Body) _, err := customWriter.Send() return err @@ -943,10 +914,7 @@ func (s *SouinBaseHandler) ServeHTTP(rw http.ResponseWriter, rq *http.Request, n response.Header.Set("Cache-Status", response.Header.Get("Cache-Status")+code) maps.Copy(customWriter.Header(), response.Header) customWriter.WriteHeader(response.StatusCode) - customWriter.handleBuffer(func(b *bytes.Buffer) { - b.Reset() - _, _ = io.Copy(b, response.Body) - }) + customWriter.resetAndCopyToBuffer(response.Body) _, err := customWriter.Send() return err diff --git a/pkg/middleware/writer.go b/pkg/middleware/writer.go index 3c7539161..021d5e999 100644 --- a/pkg/middleware/writer.go +++ b/pkg/middleware/writer.go @@ -2,6 +2,7 @@ package middleware import ( "bytes" + "io" "net/http" "strconv" "sync" @@ -39,12 +40,25 @@ type CustomWriter struct { statusCode int } -func (r *CustomWriter) handleBuffer(callback func(*bytes.Buffer)) { +func (r *CustomWriter) resetBuffer() { r.mutex.Lock() - callback(r.Buf) + r.Buf.Reset() r.mutex.Unlock() } +func (r *CustomWriter) copyToBuffer(src io.Reader) (int64, error) { + r.mutex.Lock() + defer r.mutex.Unlock() + return io.Copy(r.Buf, src) +} + +func (r *CustomWriter) resetAndCopyToBuffer(src io.Reader) (int64, error) { + r.mutex.Lock() + defer r.mutex.Unlock() + r.Buf.Reset() + return io.Copy(r.Buf, src) +} + // Header will write the response headers func (r *CustomWriter) Header() http.Header { r.mutex.Lock() @@ -77,19 +91,17 @@ func (r *CustomWriter) WriteHeader(code int) { // Write will write the response body func (r *CustomWriter) Write(b []byte) (int, error) { - r.handleBuffer(func(actual *bytes.Buffer) { - actual.Grow(len(b)) - _, _ = actual.Write(b) - }) + r.mutex.Lock() + r.Buf.Grow(len(b)) + _, _ = r.Buf.Write(b) + r.mutex.Unlock() return len(b), nil } // Send delays the response to handle Cache-Status func (r *CustomWriter) Send() (int, error) { - defer r.handleBuffer(func(b *bytes.Buffer) { - b.Reset() - }) + defer r.resetBuffer() storedLength := r.Header().Get(rfc.StoredLengthHeader) if storedLength != "" { r.Header().Set("Content-Length", storedLength) From d636b993cbed2dc22bccedf72d180b6796294cd3 Mon Sep 17 00:00:00 2001 From: Mohammed Al Sahaf Date: Fri, 1 Aug 2025 19:17:42 +0300 Subject: [PATCH 2/6] refactor `handleBuffer` in traefik override Signed-off-by: Mohammed Al Sahaf --- .../traefik/override/middleware/middleware.go | 58 +++++-------------- plugins/traefik/override/middleware/writer.go | 28 ++++++--- 2 files changed, 35 insertions(+), 51 deletions(-) diff --git a/plugins/traefik/override/middleware/middleware.go b/plugins/traefik/override/middleware/middleware.go index e81c15dfc..e56fc8e70 100644 --- a/plugins/traefik/override/middleware/middleware.go +++ b/plugins/traefik/override/middleware/middleware.go @@ -374,9 +374,7 @@ func (s *SouinBaseHandler) Upstream( } err := s.Store(customWriter, rq, requestCc, cachedKey) - defer customWriter.handleBuffer(func(b *bytes.Buffer) { - b.Reset() - }) + defer customWriter.resetBuffer() return singleflightValue{ body: customWriter.Buf.Bytes(), @@ -428,9 +426,7 @@ func (s *SouinBaseHandler) Revalidate(validator *types.Revalidator, next handler statusCode := customWriter.GetStatusCode() if err == nil { if validator.IfUnmodifiedSincePresent && statusCode != http.StatusNotModified { - customWriter.handleBuffer(func(b *bytes.Buffer) { - b.Reset() - }) + customWriter.resetBuffer() customWriter.Rw.WriteHeader(http.StatusPreconditionFailed) return nil, errors.New("") @@ -438,9 +434,7 @@ func (s *SouinBaseHandler) Revalidate(validator *types.Revalidator, next handler if validator.IfModifiedSincePresent { if lastModified, err := time.Parse(time.RFC1123, customWriter.Header().Get("Last-Modified")); err == nil && validator.IfModifiedSince.Sub(lastModified) > 0 { - customWriter.handleBuffer(func(b *bytes.Buffer) { - b.Reset() - }) + customWriter.resetBuffer() customWriter.Rw.WriteHeader(http.StatusNotModified) return nil, errors.New("") @@ -462,9 +456,7 @@ func (s *SouinBaseHandler) Revalidate(validator *types.Revalidator, next handler ), ) - defer customWriter.handleBuffer(func(b *bytes.Buffer) { - b.Reset() - }) + defer customWriter.resetBuffer() return singleflightValue{ body: customWriter.Buf.Bytes(), headers: customWriter.Header().Clone(), @@ -613,18 +605,14 @@ func (s *SouinBaseHandler) ServeHTTP(rw http.ResponseWriter, rq *http.Request, n } if validator.NotModified { customWriter.WriteHeader(http.StatusNotModified) - customWriter.handleBuffer(func(b *bytes.Buffer) { - b.Reset() - }) + customWriter.resetBuffer() _, _ = customWriter.Send() return nil } customWriter.WriteHeader(response.StatusCode) - customWriter.handleBuffer(func(b *bytes.Buffer) { - _, _ = io.Copy(b, response.Body) - }) + customWriter.copyToBuffer(response.Body) _, _ = customWriter.Send() return nil @@ -648,9 +636,7 @@ func (s *SouinBaseHandler) ServeHTTP(rw http.ResponseWriter, rq *http.Request, n customWriter.Header()[h] = v } customWriter.WriteHeader(response.StatusCode) - customWriter.handleBuffer(func(b *bytes.Buffer) { - _, _ = io.Copy(b, response.Body) - }) + customWriter.copyToBuffer(response.Body) _, err := customWriter.Send() return err @@ -669,9 +655,7 @@ func (s *SouinBaseHandler) ServeHTTP(rw http.ResponseWriter, rq *http.Request, n } customWriter.WriteHeader(response.StatusCode) rfc.HitStaleCache(&response.Header) - customWriter.handleBuffer(func(b *bytes.Buffer) { - _, _ = io.Copy(b, response.Body) - }) + customWriter.copyToBuffer(response.Body) _, err := customWriter.Send() customWriter = NewCustomWriter(req, rw, bufPool) go func(v *types.Revalidator, goCw *CustomWriter, goRq *http.Request, goNext func(http.ResponseWriter, *http.Request) error, goCc *cacheobject.RequestCacheDirectives, goCk string, goUri string) { @@ -698,18 +682,13 @@ func (s *SouinBaseHandler) ServeHTTP(rw http.ResponseWriter, rq *http.Request, n customWriter.Header().Set(k, response.Header.Get(k)) } customWriter.WriteHeader(response.StatusCode) - customWriter.handleBuffer(func(b *bytes.Buffer) { - b.Reset() - _, _ = io.Copy(b, response.Body) - }) + customWriter.resetAndCopyToBuffer(response.Body) _, err := customWriter.Send() return err } rw.WriteHeader(http.StatusGatewayTimeout) - customWriter.handleBuffer(func(b *bytes.Buffer) { - b.Reset() - }) + customWriter.resetBuffer() _, err := customWriter.Send() return err @@ -723,9 +702,7 @@ func (s *SouinBaseHandler) ServeHTTP(rw http.ResponseWriter, rq *http.Request, n for k := range response.Header { customWriter.Header().Set(k, response.Header.Get(k)) } - customWriter.handleBuffer(func(b *bytes.Buffer) { - _, _ = io.Copy(b, response.Body) - }) + customWriter.copyToBuffer(response.Body) _, _ = customWriter.Send() return err @@ -734,9 +711,7 @@ func (s *SouinBaseHandler) ServeHTTP(rw http.ResponseWriter, rq *http.Request, n if statusCode != http.StatusNotModified && validator.Matched { customWriter.WriteHeader(http.StatusNotModified) - customWriter.handleBuffer(func(b *bytes.Buffer) { - b.Reset() - }) + customWriter.resetBuffer() _, _ = customWriter.Send() return err @@ -754,9 +729,7 @@ func (s *SouinBaseHandler) ServeHTTP(rw http.ResponseWriter, rq *http.Request, n for k := range response.Header { customWriter.Header().Set(k, response.Header.Get(k)) } - customWriter.handleBuffer(func(b *bytes.Buffer) { - _, _ = io.Copy(b, response.Body) - }) + customWriter.copyToBuffer(response.Body) _, err := customWriter.Send() return err @@ -785,10 +758,7 @@ func (s *SouinBaseHandler) ServeHTTP(rw http.ResponseWriter, rq *http.Request, n customWriter.Header().Set(k, response.Header.Get(k)) } customWriter.WriteHeader(response.StatusCode) - customWriter.handleBuffer(func(b *bytes.Buffer) { - b.Reset() - _, _ = io.Copy(b, response.Body) - }) + customWriter.resetAndCopyToBuffer(response.Body) _, err := customWriter.Send() return err diff --git a/plugins/traefik/override/middleware/writer.go b/plugins/traefik/override/middleware/writer.go index 13208fad1..fe029fccb 100644 --- a/plugins/traefik/override/middleware/writer.go +++ b/plugins/traefik/override/middleware/writer.go @@ -3,6 +3,7 @@ package middleware import ( "bytes" "fmt" + "io" "net/http" "strconv" "sync" @@ -39,12 +40,25 @@ type CustomWriter struct { statusCode int } -func (r *CustomWriter) handleBuffer(callback func(*bytes.Buffer)) { +func (r *CustomWriter) resetBuffer() { r.mutex.Lock() - callback(r.Buf) + r.Buf.Reset() r.mutex.Unlock() } +func (r *CustomWriter) copyToBuffer(src io.Reader) (int64, error) { + r.mutex.Lock() + defer r.mutex.Unlock() + return io.Copy(r.Buf, src) +} + +func (r *CustomWriter) resetAndCopyToBuffer(src io.Reader) (int64, error) { + r.mutex.Lock() + defer r.mutex.Unlock() + r.Buf.Reset() + return io.Copy(r.Buf, src) +} + // Header will write the response headers func (r *CustomWriter) Header() http.Header { r.mutex.Lock() @@ -77,17 +91,17 @@ func (r *CustomWriter) WriteHeader(code int) { // Write will write the response body func (r *CustomWriter) Write(b []byte) (int, error) { - r.handleBuffer(func(actual *bytes.Buffer) { - actual.Grow(len(b)) - _, _ = actual.Write(b) - }) + r.mutex.Lock() + r.Buf.Grow(len(b)) + _, _ = r.Buf.Write(b) + r.mutex.Unlock() return len(b), nil } // Send delays the response to handle Cache-Status func (r *CustomWriter) Send() (int, error) { - defer r.Buf.Reset() + defer r.resetBuffer() fmt.Println("Upstream Send", r.Buf.Len()) storedLength := r.Header().Get(rfc.StoredLengthHeader) if storedLength != "" { From 94afe38d1db8b17e2be087b80a940c7670bfa9ef Mon Sep 17 00:00:00 2001 From: Mohammed Al Sahaf Date: Fri, 1 Aug 2025 19:18:19 +0300 Subject: [PATCH 3/6] run `go mod vendor` for traefik Signed-off-by: Mohammed Al Sahaf --- .../github.com/darkweak/souin/api/main.go | 50 --- .../souin/api/prometheus/prometheus.go | 5 - .../github.com/darkweak/souin/api/souin.go | 233 ----------- .../github.com/darkweak/souin/api/types.go | 12 - .../souin/configurationtypes/types.go | 24 ++ .../darkweak/souin/context/cache.go | 1 + .../darkweak/souin/context/graphql.go | 1 + .../github.com/darkweak/souin/context/key.go | 67 ++- .../darkweak/souin/context/method.go | 1 + .../github.com/darkweak/souin/context/mode.go | 1 + .../github.com/darkweak/souin/context/now.go | 12 +- .../darkweak/souin/context/timeout.go | 2 + .../darkweak/souin/pkg/api/debug/debug.go | 87 ++++ .../github.com/darkweak/souin/pkg/api/main.go | 9 +- .../souin/pkg/api/prometheus/prometheus.go | 106 ++++- .../darkweak/souin/pkg/api/souin.go | 133 +++++- .../souin/pkg/middleware/configuration.go | 12 + .../souin/pkg/middleware/middleware.go | 391 ++++++++++++------ .../darkweak/souin/pkg/middleware/writer.go | 33 +- .../darkweak/souin/pkg/rfc/revalidation.go | 184 +-------- .../github.com/darkweak/souin/pkg/rfc/vary.go | 8 +- .../souin/pkg/storage/abstractProvider.go | 57 --- .../souin/pkg/storage/cacheProvider.go | 178 -------- .../souin/pkg/storage/defaultProvider.go | 213 ++++++++++ .../darkweak/souin/pkg/storage/types/types.go | 23 +- .../souin/pkg/surrogate/providers/common.go | 119 +++--- 26 files changed, 1013 insertions(+), 949 deletions(-) delete mode 100644 plugins/traefik/vendor/github.com/darkweak/souin/api/main.go delete mode 100644 plugins/traefik/vendor/github.com/darkweak/souin/api/prometheus/prometheus.go delete mode 100644 plugins/traefik/vendor/github.com/darkweak/souin/api/souin.go delete mode 100644 plugins/traefik/vendor/github.com/darkweak/souin/api/types.go create mode 100644 plugins/traefik/vendor/github.com/darkweak/souin/pkg/api/debug/debug.go delete mode 100644 plugins/traefik/vendor/github.com/darkweak/souin/pkg/storage/abstractProvider.go delete mode 100644 plugins/traefik/vendor/github.com/darkweak/souin/pkg/storage/cacheProvider.go create mode 100644 plugins/traefik/vendor/github.com/darkweak/souin/pkg/storage/defaultProvider.go diff --git a/plugins/traefik/vendor/github.com/darkweak/souin/api/main.go b/plugins/traefik/vendor/github.com/darkweak/souin/api/main.go deleted file mode 100644 index 03230b136..000000000 --- a/plugins/traefik/vendor/github.com/darkweak/souin/api/main.go +++ /dev/null @@ -1,50 +0,0 @@ -package api - -import ( - "net/http" - - "github.com/darkweak/souin/configurationtypes" - "github.com/darkweak/souin/pkg/storage/types" - "github.com/darkweak/souin/pkg/surrogate/providers" -) - -// MapHandler is a map to store the available http Handlers -type MapHandler struct { - Handlers *map[string]http.HandlerFunc -} - -// GenerateHandlerMap generate the MapHandler -func GenerateHandlerMap( - configuration configurationtypes.AbstractConfigurationInterface, - storers []types.Storer, - surrogateStorage providers.SurrogateInterface, -) *MapHandler { - hm := make(map[string]http.HandlerFunc) - shouldEnable := false - - souinAPI := configuration.GetAPI() - basePathAPIS := souinAPI.BasePath - if basePathAPIS == "" { - basePathAPIS = "/souin-api" - } - - for _, endpoint := range Initialize(configuration, storers, surrogateStorage) { - if endpoint.IsEnabled() { - shouldEnable = true - if e, ok := endpoint.(*SouinAPI); ok { - hm[basePathAPIS+endpoint.GetBasePath()] = e.HandleRequest - } - } - } - - if shouldEnable { - return &MapHandler{Handlers: &hm} - } - - return nil -} - -// Initialize contains all apis that should be enabled -func Initialize(c configurationtypes.AbstractConfigurationInterface, storers []types.Storer, surrogateStorage providers.SurrogateInterface) []EndpointInterface { - return []EndpointInterface{initializeSouin(c, storers, surrogateStorage)} -} diff --git a/plugins/traefik/vendor/github.com/darkweak/souin/api/prometheus/prometheus.go b/plugins/traefik/vendor/github.com/darkweak/souin/api/prometheus/prometheus.go deleted file mode 100644 index 2712571d6..000000000 --- a/plugins/traefik/vendor/github.com/darkweak/souin/api/prometheus/prometheus.go +++ /dev/null @@ -1,5 +0,0 @@ -package prometheus - -const RequestCounter = "" - -func Increment(string) {} diff --git a/plugins/traefik/vendor/github.com/darkweak/souin/api/souin.go b/plugins/traefik/vendor/github.com/darkweak/souin/api/souin.go deleted file mode 100644 index 8f5305cb9..000000000 --- a/plugins/traefik/vendor/github.com/darkweak/souin/api/souin.go +++ /dev/null @@ -1,233 +0,0 @@ -package api - -import ( - "encoding/json" - "fmt" - "net/http" - "regexp" - "strings" - - "github.com/darkweak/souin/configurationtypes" - "github.com/darkweak/souin/pkg/storage/types" - "github.com/darkweak/souin/pkg/surrogate/providers" -) - -// SouinAPI object contains informations related to the endpoints -type SouinAPI struct { - basePath string - enabled bool - storers []types.Storer - surrogateStorage providers.SurrogateInterface - allowedMethods []string -} - -type invalidationType string - -const ( - uriInvalidationType invalidationType = "uri" - uriPrefixInvalidationType invalidationType = "uri-prefix" - originInvalidationType invalidationType = "origin" - groupInvalidationType invalidationType = "group" -) - -type invalidation struct { - Type invalidationType `json:"type"` - Selectors []string `json:"selectors"` - Groups []string `json:"groups"` - Purge bool `json:"purge"` -} - -func initializeSouin( - configuration configurationtypes.AbstractConfigurationInterface, - storers []types.Storer, - surrogateStorage providers.SurrogateInterface, -) *SouinAPI { - basePath := configuration.GetAPI().Souin.BasePath - if basePath == "" { - basePath = "/souin" - } - - allowedMethods := configuration.GetDefaultCache().GetAllowedHTTPVerbs() - if len(allowedMethods) == 0 { - allowedMethods = []string{http.MethodGet, http.MethodHead} - } - - return &SouinAPI{ - basePath, - configuration.GetAPI().Souin.Enable, - storers, - surrogateStorage, - allowedMethods, - } -} - -// BulkDelete allow user to delete multiple items with regexp -func (s *SouinAPI) BulkDelete(key string) { - for _, current := range s.storers { - current.DeleteMany(key) - } -} - -// Delete will delete a record into the provider cache system and will update the Souin API if enabled -func (s *SouinAPI) Delete(key string) { - for _, current := range s.storers { - current.Delete(key) - } -} - -// GetAll will retrieve all stored keys in the provider -func (s *SouinAPI) GetAll() []string { - keys := []string{} - for _, current := range s.storers { - keys = append(keys, current.ListKeys()...) - } - - return keys -} - -// GetBasePath will return the basepath for this resource -func (s *SouinAPI) GetBasePath() string { - return s.basePath -} - -// IsEnabled will return enabled status -func (s *SouinAPI) IsEnabled() bool { - return s.enabled -} - -func (s *SouinAPI) listKeys(search string) []string { - res := []string{} - re, err := regexp.Compile(search) - if err != nil { - return res - } - for _, key := range s.GetAll() { - if re.MatchString(key) { - res = append(res, key) - } - } - - return res -} - -// HandleRequest will handle the request -func (s *SouinAPI) HandleRequest(w http.ResponseWriter, r *http.Request) { - res := []byte{} - compile := regexp.MustCompile(s.GetBasePath()+"/.+").FindString(r.RequestURI) != "" - switch r.Method { - case http.MethodGet: - if regexp.MustCompile(s.GetBasePath()+"/surrogate_keys").FindString(r.RequestURI) != "" { - res, _ = json.Marshal(s.surrogateStorage.List()) - } else if compile { - search := regexp.MustCompile(s.GetBasePath()+"/(.+)").FindAllStringSubmatch(r.RequestURI, -1)[0][1] - res, _ = json.Marshal(s.listKeys(search)) - if len(res) == 2 { - w.WriteHeader(http.StatusNotFound) - } - } else { - res, _ = json.Marshal(s.GetAll()) - } - w.Header().Set("Content-Type", "application/json") - case http.MethodPost: - var invalidator invalidation - err := json.NewDecoder(r.Body).Decode(&invalidator) - if err != nil { - w.WriteHeader(http.StatusBadRequest) - return - } - - keysToInvalidate := []string{} - switch invalidator.Type { - case groupInvalidationType: - keysToInvalidate, _ = s.surrogateStorage.Purge(http.Header{"Surrogate-Key": invalidator.Groups}) - case uriPrefixInvalidationType, uriInvalidationType: - bodyKeys := []string{} - listedKeys := s.GetAll() - for _, k := range invalidator.Selectors { - if !strings.Contains(k, "//") { - rq, err := http.NewRequest(http.MethodGet, "//"+k, nil) - if err != nil { - continue - } - - bodyKeys = append(bodyKeys, rq.Host+"-"+rq.URL.Path) - } - } - - for _, allKey := range listedKeys { - for _, bk := range bodyKeys { - if invalidator.Type == uriInvalidationType { - if strings.Contains(allKey, bk) && strings.Contains(allKey, bk+"-") && strings.HasSuffix(allKey, bk) { - keysToInvalidate = append(keysToInvalidate, allKey) - break - } - } else { - if strings.Contains(allKey, bk) && - (strings.Contains(allKey, bk+"-") || strings.Contains(allKey, bk+"?") || strings.Contains(allKey, bk+"/") || strings.HasSuffix(allKey, bk)) { - keysToInvalidate = append(keysToInvalidate, allKey) - break - } - } - } - } - case originInvalidationType: - bodyKeys := []string{} - listedKeys := s.GetAll() - for _, k := range invalidator.Selectors { - if !strings.Contains(k, "//") { - rq, err := http.NewRequest(http.MethodGet, "//"+k, nil) - if err != nil { - continue - } - - bodyKeys = append(bodyKeys, rq.Host) - } - } - - for _, allKey := range listedKeys { - for _, bk := range bodyKeys { - if strings.Contains(allKey, bk) { - keysToInvalidate = append(keysToInvalidate, allKey) - break - } - } - } - } - - for _, k := range keysToInvalidate { - for _, current := range s.storers { - current.Delete(k) - } - } - w.WriteHeader(http.StatusOK) - case "PURGE": - if compile { - keysRg := regexp.MustCompile(s.GetBasePath() + "/(.+)") - flushRg := regexp.MustCompile(s.GetBasePath() + "/flush$") - - if flushRg.FindString(r.RequestURI) != "" { - for _, current := range s.storers { - current.DeleteMany(".+") - } - e := s.surrogateStorage.Destruct() - if e != nil { - fmt.Printf("Error while purging the surrogate keys: %+v.", e) - } - fmt.Println("Successfully clear the cache and the surrogate keys storage.") - } else { - submatch := keysRg.FindAllStringSubmatch(r.RequestURI, -1)[0][1] - s.BulkDelete(submatch) - } - } else { - ck, _ := s.surrogateStorage.Purge(r.Header) - for _, k := range ck { - for _, current := range s.storers { - current.Delete(k) - } - } - } - w.WriteHeader(http.StatusNoContent) - default: - } - _, _ = w.Write(res) -} diff --git a/plugins/traefik/vendor/github.com/darkweak/souin/api/types.go b/plugins/traefik/vendor/github.com/darkweak/souin/api/types.go deleted file mode 100644 index a1c5c53e9..000000000 --- a/plugins/traefik/vendor/github.com/darkweak/souin/api/types.go +++ /dev/null @@ -1,12 +0,0 @@ -package api - -import ( - "net/http" -) - -// EndpointInterface is the contract to be able to enable your custom endpoints -type EndpointInterface interface { - GetBasePath() string - IsEnabled() bool - HandleRequest(http.ResponseWriter, *http.Request) -} diff --git a/plugins/traefik/vendor/github.com/darkweak/souin/configurationtypes/types.go b/plugins/traefik/vendor/github.com/darkweak/souin/configurationtypes/types.go index e03ed7f93..f9434b57d 100644 --- a/plugins/traefik/vendor/github.com/darkweak/souin/configurationtypes/types.go +++ b/plugins/traefik/vendor/github.com/darkweak/souin/configurationtypes/types.go @@ -9,6 +9,7 @@ import ( "strings" "time" + "github.com/darkweak/storages/core" "gopkg.in/yaml.v3" ) @@ -198,6 +199,26 @@ type CacheProvider struct { Configuration interface{} `json:"configuration" yaml:"configuration"` } +func (c *CacheProvider) MarshalJSON() ([]byte, error) { + if !c.Found && c.URL == "" && c.Path == "" && c.Configuration == nil && c.Uuid == "" { + return []byte("null"), nil + } + + return json.Marshal(struct { + Uuid string + Found bool `json:"found"` + URL string `json:"url"` + Path string `json:"path"` + Configuration interface{} `json:"configuration"` + }{ + Uuid: c.Uuid, + Found: c.Found, + URL: c.URL, + Path: c.Path, + Configuration: c.Configuration, + }) +} + // Timeout configuration to handle the cache provider and the // reverse-proxy timeout. type Timeout struct { @@ -456,7 +477,10 @@ type AbstractConfigurationInterface interface { GetDefaultCache() DefaultCacheInterface GetAPI() API GetLogLevel() string + GetLogger() core.Logger + SetLogger(core.Logger) GetYkeys() map[string]SurrogateKeys GetSurrogateKeys() map[string]SurrogateKeys + IsSurrogateDisabled() bool GetCacheKeys() CacheKeys } diff --git a/plugins/traefik/vendor/github.com/darkweak/souin/context/cache.go b/plugins/traefik/vendor/github.com/darkweak/souin/context/cache.go index c4051f0c4..0e24066b4 100644 --- a/plugins/traefik/vendor/github.com/darkweak/souin/context/cache.go +++ b/plugins/traefik/vendor/github.com/darkweak/souin/context/cache.go @@ -28,6 +28,7 @@ func (cc *cacheContext) SetupContext(c configurationtypes.AbstractConfigurationI if c.GetDefaultCache().GetCacheName() != "" { cc.cacheName = c.GetDefaultCache().GetCacheName() } + c.GetLogger().Debugf("Set %s as Cache-Status name", cc.cacheName) } func (cc *cacheContext) SetContext(req *http.Request) *http.Request { diff --git a/plugins/traefik/vendor/github.com/darkweak/souin/context/graphql.go b/plugins/traefik/vendor/github.com/darkweak/souin/context/graphql.go index 46def862e..d37922136 100644 --- a/plugins/traefik/vendor/github.com/darkweak/souin/context/graphql.go +++ b/plugins/traefik/vendor/github.com/darkweak/souin/context/graphql.go @@ -50,6 +50,7 @@ func (g *graphQLContext) SetContextWithBaseRequest(req *http.Request, baseRq *ht func (g *graphQLContext) SetupContext(c configurationtypes.AbstractConfigurationInterface) { if len(c.GetDefaultCache().GetAllowedHTTPVerbs()) != 0 { g.custom = true + c.GetLogger().Debug("Enable GraphQL logic due to your custom HTTP verbs setup.") } } diff --git a/plugins/traefik/vendor/github.com/darkweak/souin/context/key.go b/plugins/traefik/vendor/github.com/darkweak/souin/context/key.go index d4edc3a85..ab0e99dbe 100644 --- a/plugins/traefik/vendor/github.com/darkweak/souin/context/key.go +++ b/plugins/traefik/vendor/github.com/darkweak/souin/context/key.go @@ -5,7 +5,10 @@ import ( "net/http" "regexp" + "github.com/caddyserver/caddy/v2" + "github.com/caddyserver/caddy/v2/modules/caddyhttp" "github.com/darkweak/souin/configurationtypes" + "github.com/darkweak/storages/core" ) const ( @@ -20,8 +23,8 @@ type keyContext struct { disable_host bool disable_method bool disable_query bool - disable_scheme bool disable_vary bool + disable_scheme bool displayable bool hash bool headers []string @@ -50,24 +53,34 @@ func (g *keyContext) SetupContext(c configurationtypes.AbstractConfigurationInte g.overrides = make([]map[*regexp.Regexp]keyContext, 0) - // for _, cacheKey := range c.GetCacheKeys() { - // for r, v := range cacheKey { - // g.overrides = append(g.overrides, map[*regexp.Regexp]keyContext{r.Regexp: { - // disable_body: v.DisableBody, - // disable_host: v.DisableHost, - // disable_method: v.DisableMethod, - // disable_query: v.DisableQuery, - // disable_scheme: v.DisableScheme, - // hash: v.Hash, - // displayable: !v.Hide, - // template: v.Template, - // headers: v.Headers, - // }}) - // } - // } - - g.initializer = func(r *http.Request) *http.Request { - return r + for _, cacheKey := range c.GetCacheKeys() { + for r, v := range cacheKey { + g.overrides = append(g.overrides, map[*regexp.Regexp]keyContext{r.Regexp: { + disable_body: v.DisableBody, + disable_host: v.DisableHost, + disable_method: v.DisableMethod, + disable_query: v.DisableQuery, + disable_scheme: v.DisableScheme, + disable_vary: v.DisableVary, + hash: v.Hash, + displayable: !v.Hide, + template: v.Template, + headers: v.Headers, + }}) + } + } + + switch c.GetPluginName() { + case "caddy": + g.initializer = func(r *http.Request) *http.Request { + return r + } + default: + g.initializer = func(r *http.Request) *http.Request { + repl := caddy.NewReplacer() + + return caddyhttp.PrepareRequest(r, repl, nil, nil) + } } } @@ -107,6 +120,9 @@ func parseKeyInformations(req *http.Request, kCtx keyContext) (query, body, host } func (g *keyContext) computeKey(req *http.Request) (key string, headers []string, hash, displayable bool) { + if g.template != "" { + return req.Context().Value(caddy.ReplacerCtxKey).(*caddy.Replacer).ReplaceAll(g.template, ""), g.headers, g.hash, g.displayable + } key = req.URL.Path query, body, host, scheme, method, headerValues, headers, displayable, hash := parseKeyInformations(req, *g) @@ -114,6 +130,9 @@ func (g *keyContext) computeKey(req *http.Request) (key string, headers []string for _, current := range g.overrides { for k, v := range current { if k.MatchString(req.RequestURI) { + if v.template != "" { + return req.Context().Value(caddy.ReplacerCtxKey).(*caddy.Replacer).ReplaceAll(v.template, ""), v.headers, v.hash, v.displayable + } query, body, host, scheme, method, headerValues, headers, displayable, hash = parseKeyInformations(req, v) hasOverride = true break @@ -139,9 +158,13 @@ func (g *keyContext) SetContext(req *http.Request) *http.Request { context.WithValue( context.WithValue( context.WithValue( - req.Context(), - Key, - key, + context.WithValue( + req.Context(), + Key, + key, + ), + core.DISABLE_VARY_CTX, //nolint:staticcheck // we don't care about collision + g.disable_vary, ), Hashed, hash, diff --git a/plugins/traefik/vendor/github.com/darkweak/souin/context/method.go b/plugins/traefik/vendor/github.com/darkweak/souin/context/method.go index ee772a3de..502e4f2cd 100644 --- a/plugins/traefik/vendor/github.com/darkweak/souin/context/method.go +++ b/plugins/traefik/vendor/github.com/darkweak/souin/context/method.go @@ -26,6 +26,7 @@ func (m *methodContext) SetupContext(c configurationtypes.AbstractConfigurationI m.allowedVerbs = c.GetDefaultCache().GetAllowedHTTPVerbs() m.custom = true } + c.GetLogger().Debugf("Allow %d method(s). %v.", len(m.allowedVerbs), m.allowedVerbs) } func (m *methodContext) SetContext(req *http.Request) *http.Request { diff --git a/plugins/traefik/vendor/github.com/darkweak/souin/context/mode.go b/plugins/traefik/vendor/github.com/darkweak/souin/context/mode.go index ec2d5221d..d436152b5 100644 --- a/plugins/traefik/vendor/github.com/darkweak/souin/context/mode.go +++ b/plugins/traefik/vendor/github.com/darkweak/souin/context/mode.go @@ -22,6 +22,7 @@ func (mc *ModeContext) SetupContext(c configurationtypes.AbstractConfigurationIn mc.Bypass_request = mode == "bypass" || mode == "bypass_request" mc.Bypass_response = mode == "bypass" || mode == "bypass_response" mc.Strict = !mc.Bypass_request && !mc.Bypass_response + c.GetLogger().Debugf("The cache logic will run as %s: %+v", mode, mc) } func (mc *ModeContext) SetContext(req *http.Request) *http.Request { diff --git a/plugins/traefik/vendor/github.com/darkweak/souin/context/now.go b/plugins/traefik/vendor/github.com/darkweak/souin/context/now.go index d0d4e0f3b..da52beb39 100644 --- a/plugins/traefik/vendor/github.com/darkweak/souin/context/now.go +++ b/plugins/traefik/vendor/github.com/darkweak/souin/context/now.go @@ -19,8 +19,16 @@ func (*nowContext) SetContextWithBaseRequest(req *http.Request, _ *http.Request) func (cc *nowContext) SetupContext(_ configurationtypes.AbstractConfigurationInterface) {} func (cc *nowContext) SetContext(req *http.Request) *http.Request { - now := time.Now().UTC() - req.Header.Set("Date", now.Format(time.RFC1123)) + var now time.Time + var e error + + now, e = time.Parse(time.RFC1123, req.Header.Get("Date")) + + if e != nil { + now = time.Now().UTC() + req.Header.Set("Date", now.Format(time.RFC1123)) + } + return req.WithContext(context.WithValue(req.Context(), Now, now)) } diff --git a/plugins/traefik/vendor/github.com/darkweak/souin/context/timeout.go b/plugins/traefik/vendor/github.com/darkweak/souin/context/timeout.go index 4da27d984..a38fb593e 100644 --- a/plugins/traefik/vendor/github.com/darkweak/souin/context/timeout.go +++ b/plugins/traefik/vendor/github.com/darkweak/souin/context/timeout.go @@ -35,6 +35,8 @@ func (t *timeoutContext) SetupContext(c configurationtypes.AbstractConfiguration if c.GetDefaultCache().GetTimeout().Backend.Duration != 0 { t.timeoutBackend = c.GetDefaultCache().GetTimeout().Backend.Duration } + c.GetLogger().Infof("Set backend timeout to %v", t.timeoutBackend) + c.GetLogger().Infof("Set cache timeout to %v", t.timeoutCache) } func (t *timeoutContext) SetContext(req *http.Request) *http.Request { diff --git a/plugins/traefik/vendor/github.com/darkweak/souin/pkg/api/debug/debug.go b/plugins/traefik/vendor/github.com/darkweak/souin/pkg/api/debug/debug.go new file mode 100644 index 000000000..00856f5c5 --- /dev/null +++ b/plugins/traefik/vendor/github.com/darkweak/souin/pkg/api/debug/debug.go @@ -0,0 +1,87 @@ +package debug + +import ( + "net/http" + "net/http/pprof" + "strings" + + "github.com/darkweak/souin/configurationtypes" +) + +// DebugAPI object contains informations related to the endpoints +type DebugAPI struct { + basePath string + enabled bool +} + +type DefaultHandler struct{} + +func (d *DefaultHandler) ServeHTTP(w http.ResponseWriter, r *http.Request) { + pprof.Index(w, r) +} + +// InitializeDebug initialize the debug endpoints +func InitializeDebug(configuration configurationtypes.AbstractConfigurationInterface) *DebugAPI { + basePath := configuration.GetAPI().Debug.BasePath + enabled := configuration.GetAPI().Debug.Enable + if basePath == "" { + basePath = "/debug/" + } + + return &DebugAPI{ + basePath, + enabled, + } +} + +// GetBasePath will return the basepath for this resource +func (p *DebugAPI) GetBasePath() string { + return p.basePath +} + +// IsEnabled will return enabled status +func (p *DebugAPI) IsEnabled() bool { + return p.enabled +} + +// HandleRequest will handle the request +func (p *DebugAPI) HandleRequest(w http.ResponseWriter, r *http.Request) { + var executor http.Handler + executor = &DefaultHandler{} + + if strings.Contains(r.RequestURI, "allocs") { + executor = pprof.Handler("allocs") + } + if strings.Contains(r.RequestURI, "cmdline") { + executor = pprof.Handler("cmdline") + } + if strings.Contains(r.RequestURI, "profile") { + executor = pprof.Handler("profile") + } + if strings.Contains(r.RequestURI, "symbol") { + executor = pprof.Handler("symbol") + } + if strings.Contains(r.RequestURI, "trace") { + executor = pprof.Handler("trace") + } + if strings.Contains(r.RequestURI, "goroutine") { + executor = pprof.Handler("goroutine") + } + if strings.Contains(r.RequestURI, "heap") { + executor = pprof.Handler("heap") + } + if strings.Contains(r.RequestURI, "block") { + executor = pprof.Handler("block") + } + if strings.Contains(r.RequestURI, "heap") { + executor = pprof.Handler("heap") + } + if strings.Contains(r.RequestURI, "mutex") { + executor = pprof.Handler("mutex") + } + if strings.Contains(r.RequestURI, "threadcreate") { + executor = pprof.Handler("threadcreate") + } + + executor.ServeHTTP(w, r) +} diff --git a/plugins/traefik/vendor/github.com/darkweak/souin/pkg/api/main.go b/plugins/traefik/vendor/github.com/darkweak/souin/pkg/api/main.go index 03230b136..810f447ff 100644 --- a/plugins/traefik/vendor/github.com/darkweak/souin/pkg/api/main.go +++ b/plugins/traefik/vendor/github.com/darkweak/souin/pkg/api/main.go @@ -4,6 +4,8 @@ import ( "net/http" "github.com/darkweak/souin/configurationtypes" + "github.com/darkweak/souin/pkg/api/debug" + "github.com/darkweak/souin/pkg/api/prometheus" "github.com/darkweak/souin/pkg/storage/types" "github.com/darkweak/souin/pkg/surrogate/providers" ) @@ -31,9 +33,7 @@ func GenerateHandlerMap( for _, endpoint := range Initialize(configuration, storers, surrogateStorage) { if endpoint.IsEnabled() { shouldEnable = true - if e, ok := endpoint.(*SouinAPI); ok { - hm[basePathAPIS+endpoint.GetBasePath()] = e.HandleRequest - } + hm[basePathAPIS+endpoint.GetBasePath()] = endpoint.HandleRequest } } @@ -46,5 +46,6 @@ func GenerateHandlerMap( // Initialize contains all apis that should be enabled func Initialize(c configurationtypes.AbstractConfigurationInterface, storers []types.Storer, surrogateStorage providers.SurrogateInterface) []EndpointInterface { - return []EndpointInterface{initializeSouin(c, storers, surrogateStorage)} + return []EndpointInterface{initializeSouin(c, storers, + surrogateStorage), debug.InitializeDebug(c), prometheus.InitializePrometheus(c)} } diff --git a/plugins/traefik/vendor/github.com/darkweak/souin/pkg/api/prometheus/prometheus.go b/plugins/traefik/vendor/github.com/darkweak/souin/pkg/api/prometheus/prometheus.go index 2712571d6..051d122f6 100644 --- a/plugins/traefik/vendor/github.com/darkweak/souin/pkg/api/prometheus/prometheus.go +++ b/plugins/traefik/vendor/github.com/darkweak/souin/pkg/api/prometheus/prometheus.go @@ -1,5 +1,107 @@ package prometheus -const RequestCounter = "" +import ( + "net/http" -func Increment(string) {} + "github.com/darkweak/souin/configurationtypes" + "github.com/prometheus/client_golang/prometheus" + "github.com/prometheus/client_golang/prometheus/promauto" + "github.com/prometheus/client_golang/prometheus/promhttp" +) + +const ( + counter = "counter" + average = "average" + + RequestCounter = "souin_request_upstream_counter" + RequestRevalidationCounter = "souin_request_revalidation_counter" + NoCachedResponseCounter = "souin_no_cached_response_counter" + CachedResponseCounter = "souin_cached_response_counter" + AvgResponseTime = "souin_avg_response_time" +) + +// PrometheusAPI object contains informations related to the endpoints +type PrometheusAPI struct { + basePath string + enabled bool +} + +// InitializePrometheus initialize the prometheus endpoints +func InitializePrometheus(configuration configurationtypes.AbstractConfigurationInterface) *PrometheusAPI { + basePath := configuration.GetAPI().Prometheus.BasePath + enabled := configuration.GetAPI().Prometheus.Enable + if basePath == "" { + basePath = "/metrics" + } + + if registered == nil { + run() + } + return &PrometheusAPI{ + basePath, + enabled, + } +} + +// GetBasePath will return the basepath for this resource +func (p *PrometheusAPI) GetBasePath() string { + return p.basePath +} + +// IsEnabled will return enabled status +func (p *PrometheusAPI) IsEnabled() bool { + return p.enabled +} + +// HandleRequest will handle the request +func (p *PrometheusAPI) HandleRequest(w http.ResponseWriter, r *http.Request) { + promhttp.Handler().ServeHTTP(w, r) +} + +var registered map[string]interface{} + +// Increment will increment the counter. +func Increment(name string) { + if _, ok := registered[name]; ok { + registered[name].(prometheus.Counter).Inc() + } +} + +// Increment will add the referred value the counter. +func Add(name string, value float64) { + if c, ok := registered[name].(prometheus.Counter); ok { + c.Add(value) + } + if g, ok := registered[name].(prometheus.Histogram); ok { + g.Observe(value) + } +} + +func push(promType, name, help string) { + switch promType { + case counter: + registered[name] = promauto.NewCounter(prometheus.CounterOpts{ + Name: name, + Help: help, + }) + + return + case average: + avg := prometheus.NewHistogram(prometheus.HistogramOpts{ + Name: name, + Help: help, + }) + prometheus.MustRegister(avg) + registered[name] = avg + } +} + +// Run populate and prepare the map with the default values. +func run() { + registered = make(map[string]interface{}) + push(counter, RequestCounter, "Total upstream request counter") + push(counter, RequestRevalidationCounter, "Total revalidation request revalidation counter") + push(counter, NoCachedResponseCounter, "No cached response counter") + push(counter, CachedResponseCounter, "Cached response counter") + push(average, AvgResponseTime, "Average response time") +} diff --git a/plugins/traefik/vendor/github.com/darkweak/souin/pkg/api/souin.go b/plugins/traefik/vendor/github.com/darkweak/souin/pkg/api/souin.go index 8f5305cb9..131723edb 100644 --- a/plugins/traefik/vendor/github.com/darkweak/souin/pkg/api/souin.go +++ b/plugins/traefik/vendor/github.com/darkweak/souin/pkg/api/souin.go @@ -6,10 +6,14 @@ import ( "net/http" "regexp" "strings" + "time" "github.com/darkweak/souin/configurationtypes" "github.com/darkweak/souin/pkg/storage/types" "github.com/darkweak/souin/pkg/surrogate/providers" + "github.com/darkweak/storages/core" + "google.golang.org/protobuf/proto" + "google.golang.org/protobuf/types/known/timestamppb" ) // SouinAPI object contains informations related to the endpoints @@ -62,16 +66,54 @@ func initializeSouin( } // BulkDelete allow user to delete multiple items with regexp -func (s *SouinAPI) BulkDelete(key string) { +func (s *SouinAPI) BulkDelete(key string, purge bool) { + key, _ = strings.CutPrefix(key, core.MappingKeyPrefix) for _, current := range s.storers { - current.DeleteMany(key) + if b := current.Get(core.MappingKeyPrefix + key); len(b) > 0 { + var mapping core.StorageMapper + + if e := proto.Unmarshal(b, &mapping); e == nil { + for k := range mapping.GetMapping() { + current.Delete(k) + } + } + + if !purge { + newFreshTime := time.Now() + for k, v := range mapping.Mapping { + v.FreshTime = timestamppb.New(newFreshTime) + mapping.Mapping[k] = v + } + + v, e := proto.Marshal(&mapping) + if e != nil { + fmt.Println("Impossible to re-encode the mapping", core.MappingKeyPrefix+key) + current.Delete(core.MappingKeyPrefix + key) + } + _ = current.Set(core.MappingKeyPrefix+key, v, storageToInfiniteTTLMap[current.Name()]) + } + } + + if purge { + current.Delete(core.MappingKeyPrefix + key) + } + + current.Delete(key) } + + s.Delete(key) } // Delete will delete a record into the provider cache system and will update the Souin API if enabled +// The key can be a regexp to delete multiple items func (s *SouinAPI) Delete(key string) { + _, err := regexp.Compile(key) for _, current := range s.storers { - current.Delete(key) + if err != nil { + current.DeleteMany(key) + } else { + current.Delete(key) + } } } @@ -110,6 +152,64 @@ func (s *SouinAPI) listKeys(search string) []string { return res } +var storageToInfiniteTTLMap = map[string]time.Duration{ + "BADGER": types.OneYearDuration, + "ETCD": types.OneYearDuration, + "GO-REDIS": 0, + "NUTS": 0, + "OLRIC": types.OneYearDuration, + "OTTER": types.OneYearDuration, + "REDIS": -1, + "SIMPLEFS": 0, + types.DefaultStorageName: types.OneYearDuration, +} + +func EvictMapping(current types.Storer) { + values := current.MapKeys(core.MappingKeyPrefix) + now := time.Now() + infiniteStoreDuration := storageToInfiniteTTLMap[current.Name()] + + for k, v := range values { + mapping := &core.StorageMapper{} + + e := proto.Unmarshal([]byte(v), mapping) + if e != nil { + current.Delete(core.MappingKeyPrefix + k) + continue + } + + updated := false + for key, val := range mapping.GetMapping() { + if now.Sub(val.FreshTime.AsTime()) > 0 && now.Sub(val.StaleTime.AsTime()) > 0 { + delete(mapping.GetMapping(), key) + updated = true + } + } + + if updated { + v, e := proto.Marshal(mapping) + if e != nil { + fmt.Println("Impossible to re-encode the mapping", core.MappingKeyPrefix+k) + current.Delete(core.MappingKeyPrefix + k) + } + _ = current.Set(core.MappingKeyPrefix+k, v, infiniteStoreDuration) + } + + if len(mapping.GetMapping()) == 0 { + current.Delete(core.MappingKeyPrefix + k) + } + } + time.Sleep(time.Minute) +} + +func (s *SouinAPI) purgeMapping() { + for _, current := range s.storers { + EvictMapping(current) + } + + fmt.Println("Successfully clear the mappings.") +} + // HandleRequest will handle the request func (s *SouinAPI) HandleRequest(w http.ResponseWriter, r *http.Request) { res := []byte{} @@ -130,6 +230,9 @@ func (s *SouinAPI) HandleRequest(w http.ResponseWriter, r *http.Request) { w.Header().Set("Content-Type", "application/json") case http.MethodPost: var invalidator invalidation + defer func() { + _ = r.Body.Close() + }() err := json.NewDecoder(r.Body).Decode(&invalidator) if err != nil { w.WriteHeader(http.StatusBadRequest) @@ -139,7 +242,9 @@ func (s *SouinAPI) HandleRequest(w http.ResponseWriter, r *http.Request) { keysToInvalidate := []string{} switch invalidator.Type { case groupInvalidationType: - keysToInvalidate, _ = s.surrogateStorage.Purge(http.Header{"Surrogate-Key": invalidator.Groups}) + var surrogateKeys []string + keysToInvalidate, surrogateKeys = s.surrogateStorage.Purge(http.Header{"Surrogate-Key": invalidator.Groups}) + keysToInvalidate = append(keysToInvalidate, surrogateKeys...) case uriPrefixInvalidationType, uriInvalidationType: bodyKeys := []string{} listedKeys := s.GetAll() @@ -195,15 +300,14 @@ func (s *SouinAPI) HandleRequest(w http.ResponseWriter, r *http.Request) { } for _, k := range keysToInvalidate { - for _, current := range s.storers { - current.Delete(k) - } + s.BulkDelete(k, invalidator.Purge) } w.WriteHeader(http.StatusOK) case "PURGE": if compile { keysRg := regexp.MustCompile(s.GetBasePath() + "/(.+)") flushRg := regexp.MustCompile(s.GetBasePath() + "/flush$") + mappingRg := regexp.MustCompile(s.GetBasePath() + "/mapping$") if flushRg.FindString(r.RequestURI) != "" { for _, current := range s.storers { @@ -214,16 +318,21 @@ func (s *SouinAPI) HandleRequest(w http.ResponseWriter, r *http.Request) { fmt.Printf("Error while purging the surrogate keys: %+v.", e) } fmt.Println("Successfully clear the cache and the surrogate keys storage.") + } else if mappingRg.FindString(r.RequestURI) != "" { + s.purgeMapping() } else { submatch := keysRg.FindAllStringSubmatch(r.RequestURI, -1)[0][1] - s.BulkDelete(submatch) + for _, current := range s.storers { + current.DeleteMany(submatch) + } } } else { - ck, _ := s.surrogateStorage.Purge(r.Header) + ck, surrogateKeys := s.surrogateStorage.Purge(r.Header) for _, k := range ck { - for _, current := range s.storers { - current.Delete(k) - } + s.BulkDelete(k, true) + } + for _, k := range surrogateKeys { + s.BulkDelete("SURROGATE_"+k, true) } } w.WriteHeader(http.StatusNoContent) diff --git a/plugins/traefik/vendor/github.com/darkweak/souin/pkg/middleware/configuration.go b/plugins/traefik/vendor/github.com/darkweak/souin/pkg/middleware/configuration.go index 238dfcdb8..34bdd0c56 100644 --- a/plugins/traefik/vendor/github.com/darkweak/souin/pkg/middleware/configuration.go +++ b/plugins/traefik/vendor/github.com/darkweak/souin/pkg/middleware/configuration.go @@ -2,6 +2,7 @@ package middleware import ( "github.com/darkweak/souin/configurationtypes" + "github.com/darkweak/storages/core" ) // BaseConfiguration holder @@ -11,6 +12,7 @@ type BaseConfiguration struct { CacheKeys configurationtypes.CacheKeys `json:"cache_keys" yaml:"cache_keys"` URLs map[string]configurationtypes.URL `json:"urls" yaml:"urls"` LogLevel string `json:"log_level" yaml:"log_level"` + Logger core.Logger PluginName string Ykeys map[string]configurationtypes.SurrogateKeys `json:"ykeys" yaml:"ykeys"` SurrogateKeys map[string]configurationtypes.SurrogateKeys `json:"surrogate_keys" yaml:"surrogate_keys"` @@ -42,6 +44,16 @@ func (c *BaseConfiguration) GetLogLevel() string { return c.LogLevel } +// GetLogger get the logger +func (c *BaseConfiguration) GetLogger() core.Logger { + return c.Logger +} + +// SetLogger set the logger +func (c *BaseConfiguration) SetLogger(l core.Logger) { + c.Logger = l +} + // GetYkeys get the ykeys list func (c *BaseConfiguration) GetYkeys() map[string]configurationtypes.SurrogateKeys { return c.SurrogateKeys diff --git a/plugins/traefik/vendor/github.com/darkweak/souin/pkg/middleware/middleware.go b/plugins/traefik/vendor/github.com/darkweak/souin/pkg/middleware/middleware.go index e81c15dfc..33e4f1e54 100644 --- a/plugins/traefik/vendor/github.com/darkweak/souin/pkg/middleware/middleware.go +++ b/plugins/traefik/vendor/github.com/darkweak/souin/pkg/middleware/middleware.go @@ -6,6 +6,7 @@ import ( "errors" "fmt" "io" + "maps" "net/http" "net/http/httputil" "regexp" @@ -13,29 +14,123 @@ import ( "sync" "time" + xxhash "github.com/cespare/xxhash/v2" "github.com/darkweak/souin/configurationtypes" "github.com/darkweak/souin/context" "github.com/darkweak/souin/helpers" "github.com/darkweak/souin/pkg/api" + "github.com/darkweak/souin/pkg/api/prometheus" "github.com/darkweak/souin/pkg/rfc" "github.com/darkweak/souin/pkg/storage" "github.com/darkweak/souin/pkg/storage/types" "github.com/darkweak/souin/pkg/surrogate" "github.com/darkweak/souin/pkg/surrogate/providers" + "github.com/darkweak/storages/core" "github.com/google/uuid" "github.com/pquerna/cachecontrol/cacheobject" + "go.uber.org/zap" + "go.uber.org/zap/zapcore" "golang.org/x/sync/singleflight" ) +func reorderStorers(storers []types.Storer, expectedStorers []string) []types.Storer { + if len(expectedStorers) == 0 { + return storers + } + + newStorers := make([]types.Storer, 0) + for _, expectedStorer := range expectedStorers { + for _, storer := range storers { + if storer.Name() == strings.ToUpper(expectedStorer) { + newStorers = append(newStorers, storer) + } + } + } + + return newStorers +} + +func registerMappingKeysEviction(logger core.Logger, storers []types.Storer) { + for _, storer := range storers { + logger.Debugf("registering mapping eviction for storer %s", storer.Name()) + go func(current types.Storer) { + for { + logger.Debugf("run mapping eviction for storer %s", current.Name()) + + api.EvictMapping(current) + } + }(storer) + } +} + func NewHTTPCacheHandler(c configurationtypes.AbstractConfigurationInterface) *SouinBaseHandler { - storers, err := storage.NewStorages(c) - if err != nil { - panic(err) + if c.GetLogger() == nil { + var logLevel zapcore.Level + if c.GetLogLevel() == "" { + logLevel = zapcore.FatalLevel + } else if err := logLevel.UnmarshalText([]byte(c.GetLogLevel())); err != nil { + logLevel = zapcore.FatalLevel + } + cfg := zap.Config{ + Encoding: "json", + Level: zap.NewAtomicLevelAt(logLevel), + OutputPaths: []string{"stderr"}, + ErrorOutputPaths: []string{"stderr"}, + EncoderConfig: zapcore.EncoderConfig{ + MessageKey: "message", + + LevelKey: "level", + EncodeLevel: zapcore.CapitalLevelEncoder, + + TimeKey: "time", + EncodeTime: zapcore.ISO8601TimeEncoder, + + CallerKey: "caller", + EncodeCaller: zapcore.ShortCallerEncoder, + }, + } + logger, _ := cfg.Build() + c.SetLogger(logger.Sugar()) } - fmt.Println("Storers initialized.") + + storedStorers := core.GetRegisteredStorers() + storers := []types.Storer{} + if len(storedStorers) != 0 { + dc := c.GetDefaultCache() + for _, s := range []string{dc.GetBadger().Uuid, dc.GetEtcd().Uuid, dc.GetNats().Uuid, dc.GetNuts().Uuid, dc.GetOlric().Uuid, dc.GetOtter().Uuid, dc.GetRedis().Uuid, dc.GetSimpleFS().Uuid} { + if s != "" { + if st := core.GetRegisteredStorer(s); st != nil { + storers = append(storers, st.(types.Storer)) + } + } + } + + storers = reorderStorers(storers, c.GetDefaultCache().GetStorers()) + + if len(storers) > 0 { + names := []string{} + for _, storer := range storers { + names = append(names, storer.Name()) + } + c.GetLogger().Debugf("You're running Souin with the following storages in this order %s", strings.Join(names, ", ")) + } + } + if len(storers) == 0 { + c.GetLogger().Warn("You're running Souin with the default storage that is not optimized and for development purpose. We recommend to use at least one of the storages from https://github.com/darkweak/storages") + + memoryStorer, _ := storage.Factory(c) + if st := core.GetRegisteredStorer(types.DefaultStorageName + "-"); st != nil { + memoryStorer = st.(types.Storer) + } else { + core.RegisterStorage(memoryStorer) + } + storers = append(storers, memoryStorer) + } + + c.GetLogger().Debugf("Storer initialized: %#v.", storers) regexpUrls := helpers.InitializeRegexp(c) - surrogateStorage := surrogate.InitializeSurrogate(c, storers[0].Name()) - fmt.Println("Surrogate storage initialized.") + surrogateStorage := surrogate.InitializeSurrogate(c, fmt.Sprintf("%s-%s", storers[0].Name(), storers[0].Uuid())) + c.GetLogger().Debug("Surrogate storage initialized.") var excludedRegexp *regexp.Regexp = nil if c.GetDefaultCache().GetRegex().Exclude != "" { excludedRegexp = regexp.MustCompile(c.GetDefaultCache().GetRegex().Exclude) @@ -54,7 +149,9 @@ func NewHTTPCacheHandler(c configurationtypes.AbstractConfigurationInterface) *S Headers: c.GetDefaultCache().GetHeaders(), DefaultCacheControl: c.GetDefaultCache().GetDefaultCacheControl(), } - fmt.Println("Souin configuration is now loaded.") + c.GetLogger().Info("Souin configuration is now loaded.") + + registerMappingKeysEviction(c.GetLogger(), storers) return &SouinBaseHandler{ Configuration: c, @@ -137,6 +234,7 @@ func (s *SouinBaseHandler) Store( rq *http.Request, requestCc *cacheobject.RequestCacheDirectives, cachedKey string, + uri string, ) error { statusCode := customWriter.GetStatusCode() if !isCacheableCode(statusCode) && !s.hasAllowedAdditionalStatusCodesToCache(statusCode) { @@ -161,6 +259,7 @@ func (s *SouinBaseHandler) Store( } responseCc, _ := cacheobject.ParseResponseCacheControl(rfc.HeaderAllCommaSepValuesString(customWriter.Header(), headerName)) + s.Configuration.GetLogger().Debugf("Response cache-control %+v", responseCc) if responseCc == nil { customWriter.Header().Set("Cache-Status", fmt.Sprintf("%s; fwd=uri-miss; key=%s; detail=INVALID-RESPONSE-CACHE-CONTROL", rq.Context().Value(context.CacheName), rfc.GetCacheKeyFromCtx(rq.Context()))) return nil @@ -258,6 +357,11 @@ func (s *SouinBaseHandler) Store( status += "; detail=UPSTREAM-VARY-STAR" } else { variedKey := cachedKey + rfc.GetVariedCacheKey(rq, variedHeaders) + if rq.Context().Value(context.Hashed).(bool) { + cachedKey = fmt.Sprint(xxhash.Sum64String(cachedKey)) + variedKey = fmt.Sprint(xxhash.Sum64String(variedKey)) + } + s.Configuration.GetLogger().Debugf("Store the response for %s with duration %v", variedKey, ma) var wg sync.WaitGroup mu := sync.Mutex{} @@ -271,32 +375,61 @@ func (s *SouinBaseHandler) Store( hn := strings.Split(hname, ":") vhs.Set(hn[0], rq.Header.Get(hn[0])) } - for _, storer := range s.Storers { - wg.Add(1) - go func(currentStorer types.Storer) { - defer wg.Done() - if currentStorer.SetMultiLevel( - cachedKey, - variedKey, - response, - vhs, - res.Header.Get("Etag"), ma, - variedKey, - ) == nil { - res.Request = rq - } else { - mu.Lock() - fails = append(fails, fmt.Sprintf("; detail=%s-INSERTION-ERROR", currentStorer.Name())) - mu.Unlock() + if upstreamStorerTarget := res.Header.Get("X-Souin-Storer"); upstreamStorerTarget != "" { + res.Header.Del("X-Souin-Storer") + + var overridedStorer types.Storer + for _, storer := range s.Storers { + if strings.Contains(strings.ToLower(storer.Name()), strings.ToLower(upstreamStorerTarget)) { + overridedStorer = storer } - }(storer) + } + + if overridedStorer.SetMultiLevel( + cachedKey, + variedKey, + response, + vhs, + res.Header.Get("Etag"), ma, + variedKey, + ) == nil { + s.Configuration.GetLogger().Debugf("Stored the key %s in the %s provider", variedKey, overridedStorer.Name()) + res.Request = rq + } else { + fails = append(fails, fmt.Sprintf("; detail=%s-INSERTION-ERROR", overridedStorer.Name())) + } + } else { + for _, storer := range s.Storers { + wg.Add(1) + go func(currentStorer types.Storer, currentRes http.Response) { + defer wg.Done() + if currentStorer.SetMultiLevel( + cachedKey, + variedKey, + response, + vhs, + currentRes.Header.Get("Etag"), ma, + variedKey, + ) == nil { + s.Configuration.GetLogger().Debugf("Stored the key %s in the %s provider", variedKey, currentStorer.Name()) + currentRes.Request = rq + } else { + mu.Lock() + fails = append(fails, fmt.Sprintf("; detail=%s-INSERTION-ERROR", currentStorer.Name())) + mu.Unlock() + } + }(storer, res) + } } wg.Wait() if len(fails) < s.storersLen { - go func(rs http.Response, key string) { - _ = s.SurrogateKeyStorer.Store(&rs, key, "") - }(res, variedKey) + if !s.Configuration.IsSurrogateDisabled() { + go func(rs http.Response, key string) { + _ = s.SurrogateKeyStorer.Store(&rs, key, uri) + }(res, variedKey) + } + status += "; stored" } @@ -330,7 +463,11 @@ func (s *SouinBaseHandler) Upstream( next handlerFunc, requestCc *cacheobject.RequestCacheDirectives, cachedKey string, + uri string, ) error { + s.Configuration.GetLogger().Debug("Request the upstream server") + prometheus.Increment(prometheus.RequestCounter) + var recoveredFromErr error = nil defer func() { // In case of "http.ErrAbortHandler" panic, @@ -350,13 +487,16 @@ func (s *SouinBaseHandler) Upstream( if s.Configuration.GetDefaultCache().IsCoalescingDisable() { singleflightCacheKey += uuid.NewString() } - sfValue, err, _ := s.singleflightPool.Do(singleflightCacheKey, func() (interface{}, error) { + sfValue, err, shared := s.singleflightPool.Do(singleflightCacheKey, func() (interface{}, error) { if e := next(customWriter, rq); e != nil { + s.Configuration.GetLogger().Warnf("%#v", e) customWriter.Header().Set("Cache-Status", fmt.Sprintf("%s; fwd=uri-miss; key=%s; detail=SERVE-HTTP-ERROR", rq.Context().Value(context.CacheName), rfc.GetCacheKeyFromCtx(rq.Context()))) return nil, e } - s.SurrogateKeyStorer.Invalidate(rq.Method, customWriter.Header()) + if !s.Configuration.IsSurrogateDisabled() { + s.SurrogateKeyStorer.Invalidate(rq.Method, customWriter.Header()) + } statusCode := customWriter.GetStatusCode() if !isCacheableCode(statusCode) && !s.hasAllowedAdditionalStatusCodesToCache(statusCode) { @@ -373,10 +513,8 @@ func (s *SouinBaseHandler) Upstream( customWriter.Header().Set(headerName, s.DefaultMatchedUrl.DefaultCacheControl) } - err := s.Store(customWriter, rq, requestCc, cachedKey) - defer customWriter.handleBuffer(func(b *bytes.Buffer) { - b.Reset() - }) + err := s.Store(customWriter, rq, requestCc, cachedKey, uri) + defer customWriter.resetBuffer() return singleflightValue{ body: customWriter.Buf.Bytes(), @@ -399,38 +537,41 @@ func (s *SouinBaseHandler) Upstream( for _, vh := range variedHeaders { if rq.Header.Get(vh) != sfWriter.requestHeaders.Get(vh) { // cachedKey += rfc.GetVariedCacheKey(rq, variedHeaders) - return s.Upstream(customWriter, rq, next, requestCc, cachedKey) + return s.Upstream(customWriter, rq, next, requestCc, cachedKey, uri) } } } } - customWriter.Buf.Reset() - _, _ = customWriter.Write(sfWriter.body) - // Yaegi sucks, we can't use maps. - for k := range sfWriter.headers { - customWriter.Header().Set(k, sfWriter.headers.Get(k)) + if shared { + s.Configuration.GetLogger().Infof("Reused response from concurrent request with the key %s", cachedKey) } + _, _ = customWriter.Write(sfWriter.body) + maps.Copy(customWriter.Header(), sfWriter.headers) customWriter.WriteHeader(sfWriter.code) } return nil } -func (s *SouinBaseHandler) Revalidate(validator *types.Revalidator, next handlerFunc, customWriter *CustomWriter, rq *http.Request, requestCc *cacheobject.RequestCacheDirectives, cachedKey string, uri string) error { +func (s *SouinBaseHandler) Revalidate(validator *core.Revalidator, next handlerFunc, customWriter *CustomWriter, rq *http.Request, requestCc *cacheobject.RequestCacheDirectives, cachedKey string, uri string) error { + s.Configuration.GetLogger().Debug("Revalidate the request with the upstream server") + prometheus.Increment(prometheus.RequestRevalidationCounter) + singleflightCacheKey := cachedKey if s.Configuration.GetDefaultCache().IsCoalescingDisable() { singleflightCacheKey += uuid.NewString() } - sfValue, err, _ := s.singleflightPool.Do(singleflightCacheKey, func() (interface{}, error) { + sfValue, err, shared := s.singleflightPool.Do(singleflightCacheKey, func() (interface{}, error) { err := next(customWriter, rq) - s.SurrogateKeyStorer.Invalidate(rq.Method, customWriter.Header()) + + if !s.Configuration.IsSurrogateDisabled() { + s.SurrogateKeyStorer.Invalidate(rq.Method, customWriter.Header()) + } statusCode := customWriter.GetStatusCode() if err == nil { if validator.IfUnmodifiedSincePresent && statusCode != http.StatusNotModified { - customWriter.handleBuffer(func(b *bytes.Buffer) { - b.Reset() - }) + customWriter.resetBuffer() customWriter.Rw.WriteHeader(http.StatusPreconditionFailed) return nil, errors.New("") @@ -438,9 +579,7 @@ func (s *SouinBaseHandler) Revalidate(validator *types.Revalidator, next handler if validator.IfModifiedSincePresent { if lastModified, err := time.Parse(time.RFC1123, customWriter.Header().Get("Last-Modified")); err == nil && validator.IfModifiedSince.Sub(lastModified) > 0 { - customWriter.handleBuffer(func(b *bytes.Buffer) { - b.Reset() - }) + customWriter.resetBuffer() customWriter.Rw.WriteHeader(http.StatusNotModified) return nil, errors.New("") @@ -448,7 +587,7 @@ func (s *SouinBaseHandler) Revalidate(validator *types.Revalidator, next handler } if statusCode != http.StatusNotModified { - err = s.Store(customWriter, rq, requestCc, cachedKey) + err = s.Store(customWriter, rq, requestCc, cachedKey, uri) } } @@ -462,9 +601,7 @@ func (s *SouinBaseHandler) Revalidate(validator *types.Revalidator, next handler ), ) - defer customWriter.handleBuffer(func(b *bytes.Buffer) { - b.Reset() - }) + defer customWriter.resetBuffer() return singleflightValue{ body: customWriter.Buf.Bytes(), headers: customWriter.Header().Clone(), @@ -473,11 +610,11 @@ func (s *SouinBaseHandler) Revalidate(validator *types.Revalidator, next handler }) if sfWriter, ok := sfValue.(singleflightValue); ok { - _, _ = customWriter.Write(sfWriter.body) - // Yaegi sucks, we can't use maps. - for k := range sfWriter.headers { - customWriter.Header().Set(k, sfWriter.headers.Get(k)) + if shared { + s.Configuration.GetLogger().Infof("Reused response from concurrent request with the key %s", cachedKey) } + _, _ = customWriter.Write(sfWriter.body) + maps.Copy(customWriter.Header(), sfWriter.headers) customWriter.WriteHeader(sfWriter.code) } @@ -493,8 +630,7 @@ func (s *SouinBaseHandler) HandleInternally(r *http.Request) (bool, http.Handler } } - // Because Yægi interpretation sucks, we have to return the empty function instead of nil. - return false, func(w http.ResponseWriter, r *http.Request) {} + return false, nil } type handlerFunc = func(http.ResponseWriter, *http.Request) error @@ -509,6 +645,11 @@ func (s *statusCodeLogger) WriteHeader(code int) { } func (s *SouinBaseHandler) ServeHTTP(rw http.ResponseWriter, rq *http.Request, next handlerFunc) error { + start := time.Now() + defer func(s time.Time) { + prometheus.Add(prometheus.AvgResponseTime, float64(time.Since(s).Milliseconds())) + }(start) + s.Configuration.GetLogger().Debugf("Incomming request %+v", rq) if b, handler := s.HandleInternally(rq); b { handler(rw, rq) return nil @@ -530,14 +671,17 @@ func (s *SouinBaseHandler) ServeHTTP(rw http.ResponseWriter, rq *http.Request, n } err := next(nrw, req) - s.SurrogateKeyStorer.Invalidate(req.Method, rw.Header()) + + if !s.Configuration.IsSurrogateDisabled() { + s.SurrogateKeyStorer.Invalidate(req.Method, rw.Header()) + } if err == nil && req.Method != http.MethodGet && nrw.statusCode < http.StatusBadRequest { // Invalidate related GET keys when the method is not allowed and the response is valid req.Method = http.MethodGet keyname := s.context.SetContext(req, rq).Context().Value(context.Key).(string) for _, storer := range s.Storers { - storer.Delete("IDX_" + keyname) + storer.Delete(core.MappingKeyPrefix + keyname) } } @@ -551,29 +695,26 @@ func (s *SouinBaseHandler) ServeHTTP(rw http.ResponseWriter, rq *http.Request, n rw.Header().Set("Cache-Status", cacheName+"; fwd=bypass; detail=CACHE-CONTROL-EXTRACTION-ERROR") err := next(rw, req) - s.SurrogateKeyStorer.Invalidate(req.Method, rw.Header()) + + if !s.Configuration.IsSurrogateDisabled() { + s.SurrogateKeyStorer.Invalidate(req.Method, rw.Header()) + } return err } req = s.context.SetContext(req, rq) - - isMutationRequest := false - // Yaegi sucks AGAIN, it considers the value as nil if we directly try to cast as bool - mutationRequestValue := req.Context().Value(context.IsMutationRequest) - if mutationRequestValue != nil { - isMutationRequest = mutationRequestValue.(bool) - } - - if isMutationRequest { + if req.Context().Value(context.IsMutationRequest).(bool) { rw.Header().Set("Cache-Status", cacheName+"; fwd=bypass; detail=IS-MUTATION-REQUEST") err := next(rw, req) - s.SurrogateKeyStorer.Invalidate(req.Method, rw.Header()) + + if !s.Configuration.IsSurrogateDisabled() { + s.SurrogateKeyStorer.Invalidate(req.Method, rw.Header()) + } return err } - cachedKey := req.Context().Value(context.Key).(string) // Need to copy URL path before calling next because it can alter the URI @@ -582,23 +723,27 @@ func (s *SouinBaseHandler) ServeHTTP(rw http.ResponseWriter, rq *http.Request, n bufPool.Reset() defer s.bufPool.Put(bufPool) customWriter := NewCustomWriter(req, rw, bufPool) - go func(req *http.Request, crw *CustomWriter) { <-req.Context().Done() crw.mutex.Lock() crw.headersSent = true crw.mutex.Unlock() }(req, customWriter) - + s.Configuration.GetLogger().Debugf("Request cache-control %+v", requestCc) if modeContext.Bypass_request || !requestCc.NoCache { validator := rfc.ParseRequest(req) var fresh, stale *http.Response var storerName string + finalKey := cachedKey + if req.Context().Value(context.Hashed).(bool) { + finalKey = fmt.Sprint(xxhash.Sum64String(finalKey)) + } for _, currentStorer := range s.Storers { - fresh, stale = currentStorer.GetMultiLevel(cachedKey, req, validator) + fresh, stale = currentStorer.GetMultiLevel(finalKey, req, validator) if fresh != nil || stale != nil { storerName = currentStorer.Name() + s.Configuration.GetLogger().Debugf("Found at least one valid response in the %s storage", storerName) break } } @@ -613,18 +758,14 @@ func (s *SouinBaseHandler) ServeHTTP(rw http.ResponseWriter, rq *http.Request, n } if validator.NotModified { customWriter.WriteHeader(http.StatusNotModified) - customWriter.handleBuffer(func(b *bytes.Buffer) { - b.Reset() - }) + customWriter.resetBuffer() _, _ = customWriter.Send() return nil } customWriter.WriteHeader(response.StatusCode) - customWriter.handleBuffer(func(b *bytes.Buffer) { - _, _ = io.Copy(b, response.Body) - }) + customWriter.copyToBuffer(response.Body) _, _ = customWriter.Send() return nil @@ -637,6 +778,7 @@ func (s *SouinBaseHandler) ServeHTTP(rw http.ResponseWriter, rq *http.Request, n return err } if resCc, _ := cacheobject.ParseResponseCacheControl(rfc.HeaderAllCommaSepValuesString(response.Header, headerName)); !modeContext.Bypass_response && resCc.NoCachePresent { + prometheus.Increment(prometheus.NoCachedResponseCounter) err := s.Revalidate(validator, next, customWriter, req, requestCc, cachedKey, uri) _, _ = customWriter.Send() @@ -648,10 +790,10 @@ func (s *SouinBaseHandler) ServeHTTP(rw http.ResponseWriter, rq *http.Request, n customWriter.Header()[h] = v } customWriter.WriteHeader(response.StatusCode) - customWriter.handleBuffer(func(b *bytes.Buffer) { - _, _ = io.Copy(b, response.Body) - }) + s.Configuration.GetLogger().Debugf("Serve from cache %+v", req) + customWriter.copyToBuffer(response.Body) _, err := customWriter.Send() + prometheus.Increment(prometheus.CachedResponseCounter) return err } @@ -669,12 +811,10 @@ func (s *SouinBaseHandler) ServeHTTP(rw http.ResponseWriter, rq *http.Request, n } customWriter.WriteHeader(response.StatusCode) rfc.HitStaleCache(&response.Header) - customWriter.handleBuffer(func(b *bytes.Buffer) { - _, _ = io.Copy(b, response.Body) - }) + customWriter.copyToBuffer(response.Body) _, err := customWriter.Send() customWriter = NewCustomWriter(req, rw, bufPool) - go func(v *types.Revalidator, goCw *CustomWriter, goRq *http.Request, goNext func(http.ResponseWriter, *http.Request) error, goCc *cacheobject.RequestCacheDirectives, goCk string, goUri string) { + go func(v *core.Revalidator, goCw *CustomWriter, goRq *http.Request, goNext func(http.ResponseWriter, *http.Request) error, goCc *cacheobject.RequestCacheDirectives, goCk string, goUri string) { _ = s.Revalidate(v, goNext, goCw, goRq, goCc, goCk, goUri) }(validator, customWriter, req, next, requestCc, cachedKey, uri) buf := s.bufPool.Get().(*bytes.Buffer) @@ -693,23 +833,15 @@ func (s *SouinBaseHandler) ServeHTTP(rw http.ResponseWriter, rq *http.Request, n code := fmt.Sprintf("; fwd-status=%d", statusCode) rfc.HitStaleCache(&response.Header) response.Header.Set("Cache-Status", response.Header.Get("Cache-Status")+code) - // Yaegi sucks, we can't use maps. - for k := range response.Header { - customWriter.Header().Set(k, response.Header.Get(k)) - } + maps.Copy(customWriter.Header(), response.Header) customWriter.WriteHeader(response.StatusCode) - customWriter.handleBuffer(func(b *bytes.Buffer) { - b.Reset() - _, _ = io.Copy(b, response.Body) - }) + customWriter.resetAndCopyToBuffer(response.Body) _, err := customWriter.Send() return err } rw.WriteHeader(http.StatusGatewayTimeout) - customWriter.handleBuffer(func(b *bytes.Buffer) { - b.Reset() - }) + customWriter.resetBuffer() _, err := customWriter.Send() return err @@ -719,13 +851,8 @@ func (s *SouinBaseHandler) ServeHTTP(rw http.ResponseWriter, rq *http.Request, n if !validator.Matched { rfc.SetCacheStatusHeader(response, storerName) customWriter.WriteHeader(response.StatusCode) - // Yaegi sucks, we can't use maps. - for k := range response.Header { - customWriter.Header().Set(k, response.Header.Get(k)) - } - customWriter.handleBuffer(func(b *bytes.Buffer) { - _, _ = io.Copy(b, response.Body) - }) + maps.Copy(customWriter.Header(), response.Header) + customWriter.copyToBuffer(response.Body) _, _ = customWriter.Send() return err @@ -734,9 +861,7 @@ func (s *SouinBaseHandler) ServeHTTP(rw http.ResponseWriter, rq *http.Request, n if statusCode != http.StatusNotModified && validator.Matched { customWriter.WriteHeader(http.StatusNotModified) - customWriter.handleBuffer(func(b *bytes.Buffer) { - b.Reset() - }) + customWriter.resetBuffer() _, _ = customWriter.Send() return err @@ -750,13 +875,8 @@ func (s *SouinBaseHandler) ServeHTTP(rw http.ResponseWriter, rq *http.Request, n if !modeContext.Strict || rfc.ValidateMaxAgeCachedStaleResponse(requestCc, responseCc, response, int(addTime.Seconds())) != nil { customWriter.WriteHeader(response.StatusCode) rfc.HitStaleCache(&response.Header) - // Yaegi sucks, we can't use maps. - for k := range response.Header { - customWriter.Header().Set(k, response.Header.Get(k)) - } - customWriter.handleBuffer(func(b *bytes.Buffer) { - _, _ = io.Copy(b, response.Body) - }) + maps.Copy(customWriter.Header(), response.Header) + customWriter.copyToBuffer(response.Body) _, err := customWriter.Send() return err @@ -764,6 +884,18 @@ func (s *SouinBaseHandler) ServeHTTP(rw http.ResponseWriter, rq *http.Request, n } } else if stale != nil { response := stale + + if !modeContext.Strict { + rfc.SetCacheStatusHeader(response, storerName) + customWriter.WriteHeader(response.StatusCode) + rfc.HitStaleCache(&response.Header) + maps.Copy(customWriter.Header(), response.Header) + customWriter.copyToBuffer(response.Body) + _, err := customWriter.Send() + + return err + } + addTime, _ := time.ParseDuration(response.Header.Get(rfc.StoredTTLHeader)) responseCc, _ := cacheobject.ParseResponseCacheControl(rfc.HeaderAllCommaSepValuesString(response.Header, "Cache-Control")) @@ -780,15 +912,9 @@ func (s *SouinBaseHandler) ServeHTTP(rw http.ResponseWriter, rq *http.Request, n code := fmt.Sprintf("; fwd-status=%d", statusCode) rfc.HitStaleCache(&response.Header) response.Header.Set("Cache-Status", response.Header.Get("Cache-Status")+code) - // Yaegi sucks, we can't use maps. - for k := range response.Header { - customWriter.Header().Set(k, response.Header.Get(k)) - } + maps.Copy(customWriter.Header(), response.Header) customWriter.WriteHeader(response.StatusCode) - customWriter.handleBuffer(func(b *bytes.Buffer) { - b.Reset() - _, _ = io.Copy(b, response.Body) - }) + customWriter.resetAndCopyToBuffer(response.Body) _, err := customWriter.Send() return err @@ -800,28 +926,33 @@ func (s *SouinBaseHandler) ServeHTTP(rw http.ResponseWriter, rq *http.Request, n } errorCacheCh := make(chan error) + defer close(errorCacheCh) go func(vr *http.Request, cw *CustomWriter) { - errorCacheCh <- s.Upstream(cw, vr, next, requestCc, cachedKey) + defer func() { + if r := recover(); r != nil { + s.Configuration.GetLogger().Infof("recovered due to closed errorCacheCh chan, the request context has finished prematurely %s", req.URL) + } + }() + prometheus.Increment(prometheus.NoCachedResponseCounter) + errorCacheCh <- s.Upstream(cw, vr, next, requestCc, cachedKey, uri) }(req, customWriter) select { case <-req.Context().Done(): - switch req.Context().Err() { case baseCtx.DeadlineExceeded: - customWriter.WriteHeader(http.StatusGatewayTimeout) rw.Header().Set("Cache-Status", cacheName+"; fwd=bypass; detail=DEADLINE-EXCEEDED") + customWriter.Rw.WriteHeader(http.StatusGatewayTimeout) _, _ = customWriter.Rw.Write([]byte("Internal server error")) + s.Configuration.GetLogger().Infof("Internal server error on endpoint %s: %v", req.URL, s.Storers) return baseCtx.DeadlineExceeded case baseCtx.Canceled: return baseCtx.Canceled default: return nil } - case v := <-errorCacheCh: - switch v { case nil: _, _ = customWriter.Send() diff --git a/plugins/traefik/vendor/github.com/darkweak/souin/pkg/middleware/writer.go b/plugins/traefik/vendor/github.com/darkweak/souin/pkg/middleware/writer.go index 13208fad1..021d5e999 100644 --- a/plugins/traefik/vendor/github.com/darkweak/souin/pkg/middleware/writer.go +++ b/plugins/traefik/vendor/github.com/darkweak/souin/pkg/middleware/writer.go @@ -2,11 +2,12 @@ package middleware import ( "bytes" - "fmt" + "io" "net/http" "strconv" "sync" + "github.com/darkweak/go-esi/esi" "github.com/darkweak/souin/pkg/rfc" ) @@ -39,12 +40,25 @@ type CustomWriter struct { statusCode int } -func (r *CustomWriter) handleBuffer(callback func(*bytes.Buffer)) { +func (r *CustomWriter) resetBuffer() { r.mutex.Lock() - callback(r.Buf) + r.Buf.Reset() r.mutex.Unlock() } +func (r *CustomWriter) copyToBuffer(src io.Reader) (int64, error) { + r.mutex.Lock() + defer r.mutex.Unlock() + return io.Copy(r.Buf, src) +} + +func (r *CustomWriter) resetAndCopyToBuffer(src io.Reader) (int64, error) { + r.mutex.Lock() + defer r.mutex.Unlock() + r.Buf.Reset() + return io.Copy(r.Buf, src) +} + // Header will write the response headers func (r *CustomWriter) Header() http.Header { r.mutex.Lock() @@ -77,23 +91,22 @@ func (r *CustomWriter) WriteHeader(code int) { // Write will write the response body func (r *CustomWriter) Write(b []byte) (int, error) { - r.handleBuffer(func(actual *bytes.Buffer) { - actual.Grow(len(b)) - _, _ = actual.Write(b) - }) + r.mutex.Lock() + r.Buf.Grow(len(b)) + _, _ = r.Buf.Write(b) + r.mutex.Unlock() return len(b), nil } // Send delays the response to handle Cache-Status func (r *CustomWriter) Send() (int, error) { - defer r.Buf.Reset() - fmt.Println("Upstream Send", r.Buf.Len()) + defer r.resetBuffer() storedLength := r.Header().Get(rfc.StoredLengthHeader) if storedLength != "" { r.Header().Set("Content-Length", storedLength) } - b := r.Buf.Bytes() + b := esi.Parse(r.Buf.Bytes(), r.Req) if len(b) != 0 { r.Header().Set("Content-Length", strconv.Itoa(len(b))) } diff --git a/plugins/traefik/vendor/github.com/darkweak/souin/pkg/rfc/revalidation.go b/plugins/traefik/vendor/github.com/darkweak/souin/pkg/rfc/revalidation.go index ccb6e73be..c2d63e4aa 100644 --- a/plugins/traefik/vendor/github.com/darkweak/souin/pkg/rfc/revalidation.go +++ b/plugins/traefik/vendor/github.com/darkweak/souin/pkg/rfc/revalidation.go @@ -1,66 +1,31 @@ package rfc import ( - "bufio" - "bytes" - "encoding/json" "net/http" "strings" "time" - "github.com/darkweak/souin/pkg/storage/types" + "github.com/darkweak/storages/core" ) -func ValidateETagFromHeader(etag string, validator *types.Revalidator) { - validator.ResponseETag = etag - validator.NeedRevalidation = validator.NeedRevalidation || validator.ResponseETag != "" - validator.Matched = validator.ResponseETag == "" || (validator.ResponseETag != "" && len(validator.RequestETags) == 0) - - if len(validator.RequestETags) == 0 { - validator.NotModified = false - return - } - - // If-None-Match - if validator.IfNoneMatchPresent { - for _, ifNoneMatch := range validator.IfNoneMatch { - // Asrterisk special char to match any of ETag - if ifNoneMatch == "*" { - validator.Matched = true - return - } - if ifNoneMatch == validator.ResponseETag { - validator.Matched = true - return - } - } - - validator.Matched = false - return - } - - // If-Match - if validator.IfMatchPresent { - validator.Matched = false - if validator.ResponseETag == "" { - return - } - - for _, ifMatch := range validator.IfMatch { - // Asrterisk special char to match any of ETag - if ifMatch == "*" { - validator.Matched = true - return - } - if ifMatch == validator.ResponseETag { - validator.Matched = true - return - } - } - } +type Revalidator struct { + Matched bool + IfNoneMatchPresent bool + IfMatchPresent bool + IfModifiedSincePresent bool + IfUnmodifiedSincePresent bool + IfUnmotModifiedSincePresent bool + NeedRevalidation bool + NotModified bool + IfModifiedSince time.Time + IfUnmodifiedSince time.Time + IfNoneMatch []string + IfMatch []string + RequestETags []string + ResponseETag string } -func ParseRequest(req *http.Request) *types.Revalidator { +func ParseRequest(req *http.Request) *core.Revalidator { var rqEtags []string if len(req.Header.Get("If-None-Match")) > 0 { rqEtags = strings.Split(req.Header.Get("If-None-Match"), ",") @@ -68,7 +33,7 @@ func ParseRequest(req *http.Request) *types.Revalidator { for i, tag := range rqEtags { rqEtags[i] = strings.Trim(tag, " ") } - validator := types.Revalidator{ + validator := core.Revalidator{ NotModified: len(rqEtags) > 0, RequestETags: rqEtags, } @@ -94,116 +59,3 @@ func ParseRequest(req *http.Request) *types.Revalidator { return &validator } - -func DecodeMapping(item []byte) (*StorageMapper, error) { - mapping := &StorageMapper{} - e := json.Unmarshal(item, mapping) - - return mapping, e -} - -func MappingElection(provider types.Storer, item []byte, req *http.Request, validator *types.Revalidator) (resultFresh *http.Response, resultStale *http.Response, e error) { - mapping := &StorageMapper{} - - if len(item) != 0 { - mapping, e = DecodeMapping(item) - if e != nil { - return resultFresh, resultStale, e - } - } - - for keyName, keyItem := range mapping.Mapping { - valid := true - - for hname, hval := range keyItem.VariedHeaders { - if req.Header.Get(hname) != strings.Join(hval, ", ") { - valid = false - - break - } - } - - if !valid { - continue - } - - ValidateETagFromHeader(keyItem.Etag, validator) - - if validator.Matched { - // If the key is fresh enough. - if time.Since(keyItem.FreshTime) < 0 { - response := provider.Get(keyName) - if response != nil { - if resultFresh, e = http.ReadResponse(bufio.NewReader(bytes.NewBuffer(response)), req); e != nil { - return resultFresh, resultStale, e - } - - return resultFresh, resultStale, e - } - } - - // If the key is still stale. - if time.Since(keyItem.StaleTime) < 0 { - response := provider.Get(keyName) - if response != nil { - if resultStale, e = http.ReadResponse(bufio.NewReader(bytes.NewBuffer(response)), req); e != nil { - return resultFresh, resultStale, e - } - } - } - } - } - - return resultFresh, resultStale, e -} - -type KeyIndex struct { - StoredAt time.Time `json:"stored_at,omitempty"` - FreshTime time.Time `json:"fresh_time,omitempty"` - StaleTime time.Time `json:"stale_time,omitempty"` - VariedHeaders map[string][]string `json:"varied_headers,omitempty"` - Etag string `json:"etag,omitempty"` - RealKey string `json:"real_key,omitempty"` -} -type StorageMapper struct { - Mapping map[string]*KeyIndex `json:"mapping,omitempty"` -} - -func MappingUpdater(key string, item []byte, now, freshTime, staleTime time.Time, variedHeaders http.Header, etag, realKey string) (val []byte, e error) { - mapping := &StorageMapper{} - if len(item) != 0 { - e = json.Unmarshal(item, mapping) - if e != nil { - return nil, e - } - } - - if mapping.Mapping == nil { - mapping.Mapping = make(map[string]*KeyIndex) - } - - var pbvariedeheader map[string][]string - if variedHeaders != nil { - pbvariedeheader = make(map[string][]string) - } - - for k, v := range variedHeaders { - pbvariedeheader[k] = append(pbvariedeheader[k], v...) - } - - mapping.Mapping[key] = &KeyIndex{ - StoredAt: now, - FreshTime: freshTime, - StaleTime: staleTime, - VariedHeaders: pbvariedeheader, - Etag: etag, - RealKey: realKey, - } - - val, e = json.Marshal(mapping) - if e != nil { - return nil, e - } - - return val, e -} diff --git a/plugins/traefik/vendor/github.com/darkweak/souin/pkg/rfc/vary.go b/plugins/traefik/vendor/github.com/darkweak/souin/pkg/rfc/vary.go index bc958f40d..d1c0e157e 100644 --- a/plugins/traefik/vendor/github.com/darkweak/souin/pkg/rfc/vary.go +++ b/plugins/traefik/vendor/github.com/darkweak/souin/pkg/rfc/vary.go @@ -6,6 +6,8 @@ import ( "net/url" "slices" "strings" + + "github.com/darkweak/storages/core" ) const ( @@ -15,10 +17,14 @@ const ( // GetVariedCacheKey returns the varied cache key for req and resp. func GetVariedCacheKey(rq *http.Request, headers []string) string { - if len(headers) == 0 { + isVaryDisabled := rq.Context().Value(core.DISABLE_VARY_CTX) + if isVaryDisabled != nil && isVaryDisabled.(bool) { return "" } + if len(headers) == 0 { + return "" + } for i, v := range headers { h := strings.TrimSpace(rq.Header.Get(v)) if strings.Contains(h, ";") || strings.Contains(h, ":") { diff --git a/plugins/traefik/vendor/github.com/darkweak/souin/pkg/storage/abstractProvider.go b/plugins/traefik/vendor/github.com/darkweak/souin/pkg/storage/abstractProvider.go deleted file mode 100644 index 577d5fade..000000000 --- a/plugins/traefik/vendor/github.com/darkweak/souin/pkg/storage/abstractProvider.go +++ /dev/null @@ -1,57 +0,0 @@ -package storage - -import ( - "net/http" - "net/url" - "strings" - - "github.com/darkweak/souin/configurationtypes" - "github.com/darkweak/souin/pkg/storage/types" -) - -const ( - VarySeparator = "{-VARY-}" - DecodedHeaderSeparator = ";" - encodedHeaderSemiColonSeparator = "%3B" - encodedHeaderColonSeparator = "%3A" - StalePrefix = "STALE_" -) - -type StorerInstanciator func(configurationtypes.AbstractConfigurationInterface) (types.Storer, error) - -func NewStorages(configuration configurationtypes.AbstractConfigurationInterface) ([]types.Storer, error) { - s, err := Factory(configuration) - return []types.Storer{s}, err -} - -func varyVoter(baseKey string, req *http.Request, currentKey string) bool { - if currentKey == baseKey { - return true - } - - if strings.Contains(currentKey, VarySeparator) && strings.HasPrefix(currentKey, baseKey+VarySeparator) { - list := currentKey[(strings.LastIndex(currentKey, VarySeparator) + len(VarySeparator)):] - if len(list) == 0 { - return false - } - - for _, item := range strings.Split(list, ";") { - index := strings.LastIndex(item, ":") - if len(item) < index+1 { - return false - } - - hVal := item[index+1:] - if strings.Contains(hVal, encodedHeaderSemiColonSeparator) || strings.Contains(hVal, encodedHeaderColonSeparator) { - hVal, _ = url.QueryUnescape(hVal) - } - if req.Header.Get(item[:index]) != hVal { - return false - } - } - - return true - } - - return false -} diff --git a/plugins/traefik/vendor/github.com/darkweak/souin/pkg/storage/cacheProvider.go b/plugins/traefik/vendor/github.com/darkweak/souin/pkg/storage/cacheProvider.go deleted file mode 100644 index f0ff8847e..000000000 --- a/plugins/traefik/vendor/github.com/darkweak/souin/pkg/storage/cacheProvider.go +++ /dev/null @@ -1,178 +0,0 @@ -package storage - -import ( - "bufio" - "bytes" - "net/http" - "regexp" - "strings" - "time" - - "github.com/akyoto/cache" - t "github.com/darkweak/souin/configurationtypes" - "github.com/darkweak/souin/pkg/rfc" - "github.com/darkweak/souin/pkg/storage/types" -) - -// Cache provider type -type Cache struct { - *cache.Cache - stale time.Duration -} - -var sharedCache *Cache - -// Factory function create new Cache instance -func Factory(c t.AbstractConfigurationInterface) (types.Storer, error) { - provider := cache.New(1 * time.Second) - - if sharedCache == nil { - sharedCache = &Cache{Cache: provider, stale: c.GetDefaultCache().GetStale()} - } - - return sharedCache, nil -} - -// Name returns the storer name -func (provider *Cache) Name() string { - return "CACHE" -} - -// Uuid returns an unique identifier -func (provider *Cache) Uuid() string { - return "" -} - -// ListKeys method returns the list of existing keys -func (provider *Cache) ListKeys() []string { - var keys []string - provider.Range(func(key, _ interface{}) bool { - keys = append(keys, key.(string)) - return true - }) - - return keys -} - -// MapKeys method returns the map of existing keys -func (provider *Cache) MapKeys(prefix string) map[string]string { - keys := map[string]string{} - provider.Range(func(key, value interface{}) bool { - if strings.HasPrefix(key.(string), prefix) { - k, _ := strings.CutPrefix(key.(string), prefix) - keys[k] = string(value.([]byte)) - } - return true - }) - - return keys -} - -// Get method returns the populated response if exists, empty response then -func (provider *Cache) Get(key string) []byte { - result, found := provider.Cache.Get(key) - - if !found { - return []byte{} - } - - return result.([]byte) -} - -// GetMultiLevel tries to load the key and check if one of linked keys is a fresh/stale candidate. -func (provider *Cache) GetMultiLevel(key string, req *http.Request, validator *types.Revalidator) (fresh *http.Response, stale *http.Response) { - result, found := provider.Cache.Get("IDX_" + key) - if !found { - return - } - - fresh, stale, _ = rfc.MappingElection(provider, result.([]byte), req, validator) - - return -} - -// SetMultiLevel tries to store the key with the given value and update the mapping key to store metadata. -func (provider *Cache) SetMultiLevel(baseKey, variedKey string, value []byte, variedHeaders http.Header, etag string, duration time.Duration, realKey string) error { - now := time.Now() - - var e error - - provider.Cache.Set(variedKey, value, duration) - - mappingKey := "IDX_" + baseKey - item, ok := provider.Cache.Get(mappingKey) - var val []byte - if ok { - val = item.([]byte) - } - - val, e = rfc.MappingUpdater(variedKey, val, now, now.Add(duration), now.Add(duration+provider.stale), variedHeaders, etag, realKey) - if e != nil { - return e - } - - provider.Cache.Set(mappingKey, val, 0) - return nil -} - -// Prefix method returns the populated response if exists, empty response then -func (provider *Cache) Prefix(key string, req *http.Request, validator *types.Revalidator) *http.Response { - var result *http.Response - - provider.Range(func(k, v interface{}) bool { - if !strings.HasPrefix(k.(string), key) { - return true - } - - if k == key || varyVoter(key, req, k.(string)) { - if res, err := http.ReadResponse(bufio.NewReader(bytes.NewBuffer(v.([]byte))), req); err == nil { - rfc.ValidateETagFromHeader(res.Header.Get("etag"), validator) - if validator.Matched { - result = res - return false - } - } - return true - } - - return true - }) - - return result -} - -// Set method will store the response in Cache provider -func (provider *Cache) Set(key string, value []byte, duration time.Duration) error { - provider.Cache.Set(key, value, duration) - - return nil -} - -// Delete method will delete the response in Cache provider if exists corresponding to key param -func (provider *Cache) Delete(key string) { - provider.Cache.Delete(key) -} - -// DeleteMany method will delete the responses in Cache provider if exists corresponding to the regex key param -func (provider *Cache) DeleteMany(key string) { - re, _ := regexp.Compile(key) - - provider.Range(func(current, _ any) bool { - if (re != nil && re.MatchString(current.(string))) || strings.HasPrefix(current.(string), key) { - provider.Delete(current.(string)) - } - return true - }) -} - -// Init method will -func (provider *Cache) Init() error { - return nil -} - -// Reset method will reset or close provider -func (provider *Cache) Reset() error { - provider.DeleteMany("*") - - return nil -} diff --git a/plugins/traefik/vendor/github.com/darkweak/souin/pkg/storage/defaultProvider.go b/plugins/traefik/vendor/github.com/darkweak/souin/pkg/storage/defaultProvider.go new file mode 100644 index 000000000..206768691 --- /dev/null +++ b/plugins/traefik/vendor/github.com/darkweak/souin/pkg/storage/defaultProvider.go @@ -0,0 +1,213 @@ +package storage + +import ( + "bytes" + "net/http" + "regexp" + "strings" + "sync" + "time" + + "github.com/darkweak/souin/configurationtypes" + "github.com/darkweak/souin/pkg/storage/types" + "github.com/darkweak/storages/core" + "github.com/pierrec/lz4/v4" +) + +// Default provider type +type Default struct { + m *sync.Map + stale time.Duration + logger core.Logger + + mu sync.Mutex +} + +type item struct { + invalidAt time.Time + value []byte +} + +// Factory function create new Default instance +func Factory(c configurationtypes.AbstractConfigurationInterface) (types.Storer, error) { + return &Default{m: &sync.Map{}, logger: c.GetLogger(), stale: c.GetDefaultCache().GetStale()}, nil +} + +// Name returns the storer name +func (provider *Default) Name() string { + return types.DefaultStorageName +} + +// Uuid returns an unique identifier +func (provider *Default) Uuid() string { + return "" +} + +// MapKeys method returns a map with the key and value +func (provider *Default) MapKeys(prefix string) map[string]string { + provider.mu.Lock() + defer provider.mu.Unlock() + + now := time.Now() + keys := map[string]string{} + + provider.m.Range(func(key, value any) bool { + if strings.HasPrefix(key.(string), prefix) { + k, _ := strings.CutPrefix(key.(string), prefix) + if v, ok := value.(item); ok { + if v.invalidAt.After(now) { + keys[k] = string(v.value) + } + + return true + } + + if v, ok := value.(*core.StorageMapper); ok { + for _, v := range v.Mapping { + if v.StaleTime.AsTime().After(now) { + keys[v.RealKey] = string(provider.Get(v.RealKey)) + } + } + } + } + + return true + }) + + return keys +} + +// ListKeys method returns the list of existing keys +func (provider *Default) ListKeys() []string { + now := time.Now() + keys := []string{} + + provider.m.Range(func(key, value any) bool { + if strings.HasPrefix(key.(string), core.MappingKeyPrefix) { + mapping, err := core.DecodeMapping(value.([]byte)) + if err == nil { + for _, v := range mapping.Mapping { + if v.StaleTime.AsTime().After(now) { + keys = append(keys, v.RealKey) + } else { + provider.m.Delete(v.RealKey) + } + } + } + } + + return true + }) + + return keys +} + +// Get method returns the populated response if exists, empty response then +func (provider *Default) Get(key string) []byte { + result, ok := provider.m.Load(key) + if !ok || result == nil { + return nil + } + + res, ok := result.(item) + if !ok { + return nil + } + + if res.invalidAt.After(time.Now()) { + return res.value + } + + return nil +} + +// GetMultiLevel tries to load the key and check if one of linked keys is a fresh/stale candidate. +func (provider *Default) GetMultiLevel(key string, req *http.Request, validator *core.Revalidator) (fresh *http.Response, stale *http.Response) { + result, found := provider.m.Load(core.MappingKeyPrefix + key) + if !found { + return + } + + fresh, stale, _ = core.MappingElection(provider, result.([]byte), req, validator, provider.logger) + + return +} + +// SetMultiLevel tries to store the key with the given value and update the mapping key to store metadata. +func (provider *Default) SetMultiLevel(baseKey, variedKey string, value []byte, variedHeaders http.Header, etag string, duration time.Duration, realKey string) error { + now := time.Now() + + var e error + compressed := new(bytes.Buffer) + if _, e = lz4.NewWriter(compressed).ReadFrom(bytes.NewReader(value)); e != nil { + provider.logger.Errorf("Impossible to compress the key %s into Badger, %v", variedKey, e) + return e + } + + provider.m.Store(variedKey, item{ + invalidAt: now.Add(duration + provider.stale), + value: compressed.Bytes(), + }) + + mappingKey := core.MappingKeyPrefix + baseKey + item, ok := provider.m.Load(mappingKey) + var val []byte + if ok { + val = item.([]byte) + } + + val, e = core.MappingUpdater(variedKey, val, provider.logger, now, now.Add(duration), now.Add(duration+provider.stale), variedHeaders, etag, realKey) + if e != nil { + return e + } + + provider.logger.Debugf("Store the new mapping for the key %s in Default", variedKey) + provider.m.Store(mappingKey, val) + return nil +} + +// Set method will store the response in Badger provider +func (provider *Default) Set(key string, value []byte, duration time.Duration) error { + provider.m.Store(key, item{ + invalidAt: time.Now().Add(duration), + value: value, + }) + + return nil +} + +// Delete method will delete the response in Badger provider if exists corresponding to key param +func (provider *Default) Delete(key string) { + provider.m.Delete(key) +} + +// DeleteMany method will delete the responses in Badger provider if exists corresponding to the regex key param +func (provider *Default) DeleteMany(key string) { + re, e := regexp.Compile(key) + + if e != nil { + provider.logger.Infof("Failed to compile key %s, %v", key, e) + } + + provider.m.Range(func(current, _ any) bool { + if (re != nil && re.MatchString(current.(string))) || strings.HasPrefix(current.(string), key) { + provider.m.Delete(key) + } + + return true + }) +} + +// Init method will +func (provider *Default) Init() error { + return nil +} + +// Reset method will reset or close provider +func (provider *Default) Reset() error { + provider.mu.Lock() + provider.m = new(sync.Map) + provider.mu.Unlock() + + return nil +} diff --git a/plugins/traefik/vendor/github.com/darkweak/souin/pkg/storage/types/types.go b/plugins/traefik/vendor/github.com/darkweak/souin/pkg/storage/types/types.go index e41007745..e4c8603fc 100644 --- a/plugins/traefik/vendor/github.com/darkweak/souin/pkg/storage/types/types.go +++ b/plugins/traefik/vendor/github.com/darkweak/souin/pkg/storage/types/types.go @@ -3,26 +3,11 @@ package types import ( "net/http" "time" -) -type Revalidator struct { - Matched bool - IfNoneMatchPresent bool - IfMatchPresent bool - IfModifiedSincePresent bool - IfUnmodifiedSincePresent bool - IfUnmotModifiedSincePresent bool - NeedRevalidation bool - NotModified bool - IfModifiedSince time.Time - IfUnmodifiedSince time.Time - IfNoneMatch []string - IfMatch []string - RequestETags []string - ResponseETag string -} + "github.com/darkweak/storages/core" +) -const DefaultStorageName = "CACHE" +const DefaultStorageName = "DEFAULT" const OneYearDuration = 365 * 24 * time.Hour type Storer interface { @@ -38,6 +23,6 @@ type Storer interface { Reset() error // Multi level storer to handle fresh/stale at once - GetMultiLevel(key string, req *http.Request, validator *Revalidator) (fresh *http.Response, stale *http.Response) + GetMultiLevel(key string, req *http.Request, validator *core.Revalidator) (fresh *http.Response, stale *http.Response) SetMultiLevel(baseKey, variedKey string, value []byte, variedHeaders http.Header, etag string, duration time.Duration, realKey string) error } diff --git a/plugins/traefik/vendor/github.com/darkweak/souin/pkg/surrogate/providers/common.go b/plugins/traefik/vendor/github.com/darkweak/souin/pkg/surrogate/providers/common.go index 1fb560034..6a82dd04e 100644 --- a/plugins/traefik/vendor/github.com/darkweak/souin/pkg/surrogate/providers/common.go +++ b/plugins/traefik/vendor/github.com/darkweak/souin/pkg/surrogate/providers/common.go @@ -1,7 +1,6 @@ package providers import ( - "fmt" "net/http" "net/url" "regexp" @@ -10,8 +9,8 @@ import ( "time" "github.com/darkweak/souin/configurationtypes" - "github.com/darkweak/souin/pkg/storage" "github.com/darkweak/souin/pkg/storage/types" + "github.com/darkweak/storages/core" ) const ( @@ -28,16 +27,38 @@ const ( cacheTags = "Cache-Tags" cacheTag = "Cache-Tag" - stalePrefix = "STALE_" surrogatePrefix = "SURROGATE_" ) var storageToInfiniteTTLMap = map[string]time.Duration{ - "CACHE": 365 * 24 * time.Hour, + "BADGER": types.OneYearDuration, + "ETCD": types.OneYearDuration, + "NUTS": 0, + "OLRIC": types.OneYearDuration, + "OTTER": types.OneYearDuration, + "REDIS": -1, + "SIMPLEFS": 0, + types.DefaultStorageName: types.OneYearDuration, } func (s *baseStorage) ParseHeaders(value string) []string { - return regexp.MustCompile(s.parent.getHeaderSeparator()+" *").Split(value, -1) + res := strings.Split(value, s.parent.getHeaderSeparator()) + for i, v := range res { + res[i] = strings.TrimSpace(v) + } + + return res +} + +func getCandidateHeader(header http.Header, getCandidates func() []string) (string, string) { + candidates := getCandidates() + for _, candidate := range candidates { + if h := header.Get(candidate); h != "" { + return candidate, h + } + } + + return candidates[len(candidates)-1], "" } func isSafeHTTPMethod(method string) bool { @@ -59,8 +80,11 @@ func uniqueTag(values []string) []string { } if _, found := tmp[item]; !found { tmp[item] = true - i, _ := url.QueryUnescape(item) - list = append(list, i) + + if strings.Contains(item, "%") { + item, _ = url.QueryUnescape(item) + } + list = append(list, item) } } @@ -79,17 +103,35 @@ type baseStorage struct { keysRegexp map[string]keysRegexpInner dynamic bool keepStale bool + logger core.Logger mu sync.Mutex duration time.Duration } -func (s *baseStorage) init(config configurationtypes.AbstractConfigurationInterface, _ string) { - storers, err := storage.NewStorages(config) - if err != nil { - panic(fmt.Sprintf("Impossible to instanciate the storer for the surrogate-keys: %v", err)) - } +func (s *baseStorage) init(config configurationtypes.AbstractConfigurationInterface, defaultStorerName string) { + if configuration, ok := config.GetSurrogateKeys()["_configuration"]; ok { + storer := core.GetRegisteredStorer(configuration.Storer) + if storer == nil { + storer = core.GetRegisteredStorer(types.DefaultStorageName + "-") + if storer == nil { + config.GetLogger().Errorf("Impossible to retrieve the storers %s for the surrogate-keys from it's configuration", configuration.Storer) + } + } + + s.Storage = storer + } else { + config.GetLogger().Debugf("Try to load the storer %s as surrogate backend", defaultStorerName) + storer := core.GetRegisteredStorer(defaultStorerName) + if storer == nil { + config.GetLogger().Errorf("Impossible to retrieve the storers %s for the surrogate-keys fallback to the default storage", configuration.Storer) + storer = core.GetRegisteredStorer(types.DefaultStorageName + "-") + if storer == nil { + config.GetLogger().Error("Impossible to retrieve the default storer") + } + } - s.Storage = storers[0] + s.Storage = storer + } s.Keys = config.GetSurrogateKeys() s.keepStale = config.GetDefaultCache().GetCDN().Strategy != "hard" @@ -115,8 +157,8 @@ func (s *baseStorage) init(config configurationtypes.AbstractConfigurationInterf } s.dynamic = config.GetDefaultCache().GetCDN().Dynamic + s.logger = config.GetLogger() s.keysRegexp = keysRegexp - s.mu = sync.Mutex{} s.duration = storageToInfiniteTTLMap[s.Storage.Name()] } @@ -125,8 +167,8 @@ func (s *baseStorage) storeTag(tag string, cacheKey string, re *regexp.Regexp) { s.mu.Lock() currentValue := string(s.Storage.Get(surrogatePrefix + tag)) if !re.MatchString(currentValue) { - fmt.Printf("Store the tag %s", tag) - _ = s.Storage.Set(surrogatePrefix+tag, []byte(currentValue+souinStorageSeparator+cacheKey), -1) + s.logger.Debugf("Store the tag %s", tag) + _ = s.Storage.Set(surrogatePrefix+tag, []byte(currentValue+souinStorageSeparator+cacheKey), s.duration) } } @@ -136,6 +178,7 @@ func (*baseStorage) candidateStore(tag string) bool { func (*baseStorage) getOrderedSurrogateKeyHeadersCandidate() []string { return []string{ + cacheGroupKey, surrogateKey, edgeCacheTag, cacheTags, @@ -152,14 +195,7 @@ func (*baseStorage) getOrderedSurrogateControlHeadersCandidate() []string { } func (s *baseStorage) GetSurrogateControl(header http.Header) (string, string) { - parent := s.parent.getOrderedSurrogateControlHeadersCandidate() - for _, candidate := range parent { - if h := header.Get(candidate); h != "" { - return candidate, h - } - } - - return parent[len(parent)-1], "" + return getCandidateHeader(header, s.parent.getOrderedSurrogateControlHeadersCandidate) } func (s *baseStorage) GetSurrogateControlName() string { @@ -167,23 +203,15 @@ func (s *baseStorage) GetSurrogateControlName() string { } func (s *baseStorage) getSurrogateKey(header http.Header) string { - for _, candidate := range s.parent.getOrderedSurrogateKeyHeadersCandidate() { - if h := header.Get(candidate); h != "" { - return h - } - } - - return "" + _, v := getCandidateHeader(header, s.parent.getOrderedSurrogateKeyHeadersCandidate) + return v } func (s *baseStorage) purgeTag(tag string) []string { - toInvalidate := string(s.Storage.Get(tag)) - fmt.Printf("Purge the tag %s", tag) - s.Storage.Delete(surrogatePrefix + tag) + toInvalidate := string(s.Storage.Get(surrogatePrefix + tag)) + s.logger.Debugf("Purge the tag %s", tag) if !s.keepStale { - toInvalidate = toInvalidate + "," + string(s.Storage.Get(stalePrefix+tag)) - fmt.Printf("Purge the tag %s", stalePrefix+tag) - s.Storage.Delete(surrogatePrefix + stalePrefix + tag) + s.Storage.Delete(surrogatePrefix + tag) } return strings.Split(toInvalidate, souinStorageSeparator) } @@ -193,11 +221,8 @@ func (s *baseStorage) Store(response *http.Response, cacheKey, uri string) error h := response.Header cacheKey = url.QueryEscape(cacheKey) - staleKey := stalePrefix + cacheKey urlRegexp := regexp.MustCompile("(^|" + regexp.QuoteMeta(souinStorageSeparator) + ")" + regexp.QuoteMeta(cacheKey) + "(" + regexp.QuoteMeta(souinStorageSeparator) + "|$)") - staleUrlRegexp := regexp.MustCompile("(^|" + regexp.QuoteMeta(souinStorageSeparator) + ")" + regexp.QuoteMeta(staleKey) + "(" + regexp.QuoteMeta(souinStorageSeparator) + "|$)") - keys := s.ParseHeaders(s.parent.getSurrogateKey(h)) for _, key := range keys { @@ -205,19 +230,21 @@ func (s *baseStorage) Store(response *http.Response, cacheKey, uri string) error if controls := s.ParseHeaders(v); len(controls) != 0 { if len(controls) == 1 && controls[0] == "" { s.storeTag(key, cacheKey, urlRegexp) - s.storeTag(stalePrefix+key, staleKey, staleUrlRegexp) + s.storeTag(uri, cacheKey, urlRegexp) continue } for _, control := range controls { if s.parent.candidateStore(control) { s.storeTag(key, cacheKey, urlRegexp) - s.storeTag(stalePrefix+key, staleKey, staleUrlRegexp) + s.storeTag(uri, cacheKey, urlRegexp) + + break } } } else { s.storeTag(key, cacheKey, urlRegexp) - s.storeTag(stalePrefix+key, staleKey, staleUrlRegexp) + s.storeTag(uri, cacheKey, urlRegexp) } } @@ -233,6 +260,8 @@ func (s *baseStorage) Purge(header http.Header) (cacheKeys []string, surrogateKe toInvalidate = append(toInvalidate, s.purgeTag(su)...) } + s.logger.Debugf("Purge the following tags: %+v", toInvalidate) + return uniqueTag(toInvalidate), surrogates } @@ -252,7 +281,5 @@ func (s *baseStorage) List() map[string]string { // Destruct method will shutdown properly the provider func (s *baseStorage) Destruct() error { - s.Storage.DeleteMany(surrogatePrefix + ".*") - - return nil + return s.Storage.Reset() } From 391cbeaa1399613060173557221c565c413872d4 Mon Sep 17 00:00:00 2001 From: Mohammed Al Sahaf Date: Fri, 1 Aug 2025 19:37:23 +0300 Subject: [PATCH 4/6] appease the linter Signed-off-by: Mohammed Al Sahaf --- pkg/middleware/middleware.go | 28 ++++++++++--------- .../souin/pkg/middleware/middleware.go | 28 ++++++++++--------- 2 files changed, 30 insertions(+), 26 deletions(-) diff --git a/pkg/middleware/middleware.go b/pkg/middleware/middleware.go index 33e4f1e54..0b94bda18 100644 --- a/pkg/middleware/middleware.go +++ b/pkg/middleware/middleware.go @@ -633,11 +633,13 @@ func (s *SouinBaseHandler) HandleInternally(r *http.Request) (bool, http.Handler return false, nil } -type handlerFunc = func(http.ResponseWriter, *http.Request) error -type statusCodeLogger struct { - http.ResponseWriter - statusCode int -} +type ( + handlerFunc = func(http.ResponseWriter, *http.Request) error + statusCodeLogger struct { + http.ResponseWriter + statusCode int + } +) func (s *statusCodeLogger) WriteHeader(code int) { s.statusCode = code @@ -765,7 +767,7 @@ func (s *SouinBaseHandler) ServeHTTP(rw http.ResponseWriter, rq *http.Request, n } customWriter.WriteHeader(response.StatusCode) - customWriter.copyToBuffer(response.Body) + _, _ = customWriter.copyToBuffer(response.Body) _, _ = customWriter.Send() return nil @@ -791,7 +793,7 @@ func (s *SouinBaseHandler) ServeHTTP(rw http.ResponseWriter, rq *http.Request, n } customWriter.WriteHeader(response.StatusCode) s.Configuration.GetLogger().Debugf("Serve from cache %+v", req) - customWriter.copyToBuffer(response.Body) + _, _ = customWriter.copyToBuffer(response.Body) _, err := customWriter.Send() prometheus.Increment(prometheus.CachedResponseCounter) @@ -811,7 +813,7 @@ func (s *SouinBaseHandler) ServeHTTP(rw http.ResponseWriter, rq *http.Request, n } customWriter.WriteHeader(response.StatusCode) rfc.HitStaleCache(&response.Header) - customWriter.copyToBuffer(response.Body) + _, _ = customWriter.copyToBuffer(response.Body) _, err := customWriter.Send() customWriter = NewCustomWriter(req, rw, bufPool) go func(v *core.Revalidator, goCw *CustomWriter, goRq *http.Request, goNext func(http.ResponseWriter, *http.Request) error, goCc *cacheobject.RequestCacheDirectives, goCk string, goUri string) { @@ -835,7 +837,7 @@ func (s *SouinBaseHandler) ServeHTTP(rw http.ResponseWriter, rq *http.Request, n response.Header.Set("Cache-Status", response.Header.Get("Cache-Status")+code) maps.Copy(customWriter.Header(), response.Header) customWriter.WriteHeader(response.StatusCode) - customWriter.resetAndCopyToBuffer(response.Body) + _, _ = customWriter.resetAndCopyToBuffer(response.Body) _, err := customWriter.Send() return err @@ -852,7 +854,7 @@ func (s *SouinBaseHandler) ServeHTTP(rw http.ResponseWriter, rq *http.Request, n rfc.SetCacheStatusHeader(response, storerName) customWriter.WriteHeader(response.StatusCode) maps.Copy(customWriter.Header(), response.Header) - customWriter.copyToBuffer(response.Body) + _, _ = customWriter.copyToBuffer(response.Body) _, _ = customWriter.Send() return err @@ -876,7 +878,7 @@ func (s *SouinBaseHandler) ServeHTTP(rw http.ResponseWriter, rq *http.Request, n customWriter.WriteHeader(response.StatusCode) rfc.HitStaleCache(&response.Header) maps.Copy(customWriter.Header(), response.Header) - customWriter.copyToBuffer(response.Body) + _, _ = customWriter.copyToBuffer(response.Body) _, err := customWriter.Send() return err @@ -890,7 +892,7 @@ func (s *SouinBaseHandler) ServeHTTP(rw http.ResponseWriter, rq *http.Request, n customWriter.WriteHeader(response.StatusCode) rfc.HitStaleCache(&response.Header) maps.Copy(customWriter.Header(), response.Header) - customWriter.copyToBuffer(response.Body) + _, _ = customWriter.copyToBuffer(response.Body) _, err := customWriter.Send() return err @@ -914,7 +916,7 @@ func (s *SouinBaseHandler) ServeHTTP(rw http.ResponseWriter, rq *http.Request, n response.Header.Set("Cache-Status", response.Header.Get("Cache-Status")+code) maps.Copy(customWriter.Header(), response.Header) customWriter.WriteHeader(response.StatusCode) - customWriter.resetAndCopyToBuffer(response.Body) + _, _ = customWriter.resetAndCopyToBuffer(response.Body) _, err := customWriter.Send() return err diff --git a/plugins/traefik/vendor/github.com/darkweak/souin/pkg/middleware/middleware.go b/plugins/traefik/vendor/github.com/darkweak/souin/pkg/middleware/middleware.go index 33e4f1e54..0b94bda18 100644 --- a/plugins/traefik/vendor/github.com/darkweak/souin/pkg/middleware/middleware.go +++ b/plugins/traefik/vendor/github.com/darkweak/souin/pkg/middleware/middleware.go @@ -633,11 +633,13 @@ func (s *SouinBaseHandler) HandleInternally(r *http.Request) (bool, http.Handler return false, nil } -type handlerFunc = func(http.ResponseWriter, *http.Request) error -type statusCodeLogger struct { - http.ResponseWriter - statusCode int -} +type ( + handlerFunc = func(http.ResponseWriter, *http.Request) error + statusCodeLogger struct { + http.ResponseWriter + statusCode int + } +) func (s *statusCodeLogger) WriteHeader(code int) { s.statusCode = code @@ -765,7 +767,7 @@ func (s *SouinBaseHandler) ServeHTTP(rw http.ResponseWriter, rq *http.Request, n } customWriter.WriteHeader(response.StatusCode) - customWriter.copyToBuffer(response.Body) + _, _ = customWriter.copyToBuffer(response.Body) _, _ = customWriter.Send() return nil @@ -791,7 +793,7 @@ func (s *SouinBaseHandler) ServeHTTP(rw http.ResponseWriter, rq *http.Request, n } customWriter.WriteHeader(response.StatusCode) s.Configuration.GetLogger().Debugf("Serve from cache %+v", req) - customWriter.copyToBuffer(response.Body) + _, _ = customWriter.copyToBuffer(response.Body) _, err := customWriter.Send() prometheus.Increment(prometheus.CachedResponseCounter) @@ -811,7 +813,7 @@ func (s *SouinBaseHandler) ServeHTTP(rw http.ResponseWriter, rq *http.Request, n } customWriter.WriteHeader(response.StatusCode) rfc.HitStaleCache(&response.Header) - customWriter.copyToBuffer(response.Body) + _, _ = customWriter.copyToBuffer(response.Body) _, err := customWriter.Send() customWriter = NewCustomWriter(req, rw, bufPool) go func(v *core.Revalidator, goCw *CustomWriter, goRq *http.Request, goNext func(http.ResponseWriter, *http.Request) error, goCc *cacheobject.RequestCacheDirectives, goCk string, goUri string) { @@ -835,7 +837,7 @@ func (s *SouinBaseHandler) ServeHTTP(rw http.ResponseWriter, rq *http.Request, n response.Header.Set("Cache-Status", response.Header.Get("Cache-Status")+code) maps.Copy(customWriter.Header(), response.Header) customWriter.WriteHeader(response.StatusCode) - customWriter.resetAndCopyToBuffer(response.Body) + _, _ = customWriter.resetAndCopyToBuffer(response.Body) _, err := customWriter.Send() return err @@ -852,7 +854,7 @@ func (s *SouinBaseHandler) ServeHTTP(rw http.ResponseWriter, rq *http.Request, n rfc.SetCacheStatusHeader(response, storerName) customWriter.WriteHeader(response.StatusCode) maps.Copy(customWriter.Header(), response.Header) - customWriter.copyToBuffer(response.Body) + _, _ = customWriter.copyToBuffer(response.Body) _, _ = customWriter.Send() return err @@ -876,7 +878,7 @@ func (s *SouinBaseHandler) ServeHTTP(rw http.ResponseWriter, rq *http.Request, n customWriter.WriteHeader(response.StatusCode) rfc.HitStaleCache(&response.Header) maps.Copy(customWriter.Header(), response.Header) - customWriter.copyToBuffer(response.Body) + _, _ = customWriter.copyToBuffer(response.Body) _, err := customWriter.Send() return err @@ -890,7 +892,7 @@ func (s *SouinBaseHandler) ServeHTTP(rw http.ResponseWriter, rq *http.Request, n customWriter.WriteHeader(response.StatusCode) rfc.HitStaleCache(&response.Header) maps.Copy(customWriter.Header(), response.Header) - customWriter.copyToBuffer(response.Body) + _, _ = customWriter.copyToBuffer(response.Body) _, err := customWriter.Send() return err @@ -914,7 +916,7 @@ func (s *SouinBaseHandler) ServeHTTP(rw http.ResponseWriter, rq *http.Request, n response.Header.Set("Cache-Status", response.Header.Get("Cache-Status")+code) maps.Copy(customWriter.Header(), response.Header) customWriter.WriteHeader(response.StatusCode) - customWriter.resetAndCopyToBuffer(response.Body) + _, _ = customWriter.resetAndCopyToBuffer(response.Body) _, err := customWriter.Send() return err From 8029ca1e9d3fa6ed0f1c253f14a0f6a905cd47c9 Mon Sep 17 00:00:00 2001 From: Mohammed Al Sahaf Date: Thu, 2 Oct 2025 01:37:04 +0300 Subject: [PATCH 5/6] another fix at memory bloat by copilot (claude Sonnet 4) Signed-off-by: Mohammed Al Sahaf --- pkg/middleware/middleware.go | 15 +++++++++-- .../traefik/override/middleware/middleware.go | 27 ++++++++++++++----- .../souin/pkg/middleware/middleware.go | 15 +++++++++-- 3 files changed, 46 insertions(+), 11 deletions(-) diff --git a/pkg/middleware/middleware.go b/pkg/middleware/middleware.go index 18036b6ca..75178fa14 100644 --- a/pkg/middleware/middleware.go +++ b/pkg/middleware/middleware.go @@ -516,8 +516,13 @@ func (s *SouinBaseHandler) Upstream( err := s.Store(customWriter, rq, requestCc, cachedKey, uri) defer customWriter.resetBuffer() + // Create a copy of the buffer to prevent memory retention + // when the buffer is returned to the pool + bodyCopy := make([]byte, customWriter.Buf.Len()) + copy(bodyCopy, customWriter.Buf.Bytes()) + return singleflightValue{ - body: customWriter.Buf.Bytes(), + body: bodyCopy, headers: customWriter.Header().Clone(), requestHeaders: rq.Header, code: statusCode, @@ -602,8 +607,14 @@ func (s *SouinBaseHandler) Revalidate(validator *core.Revalidator, next handlerF ) defer customWriter.resetBuffer() + + // Create a copy of the buffer to prevent memory retention + // when the buffer is returned to the pool + bodyCopy := make([]byte, customWriter.Buf.Len()) + copy(bodyCopy, customWriter.Buf.Bytes()) + return singleflightValue{ - body: customWriter.Buf.Bytes(), + body: bodyCopy, headers: customWriter.Header().Clone(), code: statusCode, }, err diff --git a/plugins/traefik/override/middleware/middleware.go b/plugins/traefik/override/middleware/middleware.go index e56fc8e70..9a73f709e 100644 --- a/plugins/traefik/override/middleware/middleware.go +++ b/plugins/traefik/override/middleware/middleware.go @@ -376,8 +376,13 @@ func (s *SouinBaseHandler) Upstream( err := s.Store(customWriter, rq, requestCc, cachedKey) defer customWriter.resetBuffer() + // Create a copy of the buffer to prevent memory retention + // when the buffer is returned to the pool + bodyCopy := make([]byte, customWriter.Buf.Len()) + copy(bodyCopy, customWriter.Buf.Bytes()) + return singleflightValue{ - body: customWriter.Buf.Bytes(), + body: bodyCopy, headers: customWriter.Header().Clone(), requestHeaders: rq.Header, code: statusCode, @@ -457,8 +462,14 @@ func (s *SouinBaseHandler) Revalidate(validator *types.Revalidator, next handler ) defer customWriter.resetBuffer() + + // Create a copy of the buffer to prevent memory retention + // when the buffer is returned to the pool + bodyCopy := make([]byte, customWriter.Buf.Len()) + copy(bodyCopy, customWriter.Buf.Bytes()) + return singleflightValue{ - body: customWriter.Buf.Bytes(), + body: bodyCopy, headers: customWriter.Header().Clone(), code: statusCode, }, err @@ -489,11 +500,13 @@ func (s *SouinBaseHandler) HandleInternally(r *http.Request) (bool, http.Handler return false, func(w http.ResponseWriter, r *http.Request) {} } -type handlerFunc = func(http.ResponseWriter, *http.Request) error -type statusCodeLogger struct { - http.ResponseWriter - statusCode int -} +type ( + handlerFunc = func(http.ResponseWriter, *http.Request) error + statusCodeLogger struct { + http.ResponseWriter + statusCode int + } +) func (s *statusCodeLogger) WriteHeader(code int) { s.statusCode = code diff --git a/plugins/traefik/vendor/github.com/darkweak/souin/pkg/middleware/middleware.go b/plugins/traefik/vendor/github.com/darkweak/souin/pkg/middleware/middleware.go index 0b94bda18..00fe71465 100644 --- a/plugins/traefik/vendor/github.com/darkweak/souin/pkg/middleware/middleware.go +++ b/plugins/traefik/vendor/github.com/darkweak/souin/pkg/middleware/middleware.go @@ -516,8 +516,13 @@ func (s *SouinBaseHandler) Upstream( err := s.Store(customWriter, rq, requestCc, cachedKey, uri) defer customWriter.resetBuffer() + // Create a copy of the buffer to prevent memory retention + // when the buffer is returned to the pool + bodyCopy := make([]byte, customWriter.Buf.Len()) + copy(bodyCopy, customWriter.Buf.Bytes()) + return singleflightValue{ - body: customWriter.Buf.Bytes(), + body: bodyCopy, headers: customWriter.Header().Clone(), requestHeaders: rq.Header, code: statusCode, @@ -602,8 +607,14 @@ func (s *SouinBaseHandler) Revalidate(validator *core.Revalidator, next handlerF ) defer customWriter.resetBuffer() + + // Create a copy of the buffer to prevent memory retention + // when the buffer is returned to the pool + bodyCopy := make([]byte, customWriter.Buf.Len()) + copy(bodyCopy, customWriter.Buf.Bytes()) + return singleflightValue{ - body: customWriter.Buf.Bytes(), + body: bodyCopy, headers: customWriter.Header().Clone(), code: statusCode, }, err From 8911ffd5909cbbff304b5c313749ecf02811122e Mon Sep 17 00:00:00 2001 From: Mohammed Al Sahaf Date: Thu, 2 Oct 2025 01:47:35 +0300 Subject: [PATCH 6/6] fix traefik build Signed-off-by: Mohammed Al Sahaf --- plugins/traefik/configuration.go | 15 ++- .../souin/pkg/middleware/middleware.go | 8 +- .../darkweak/souin/pkg/middleware/writer.go | 125 +++++++++++++++++- 3 files changed, 141 insertions(+), 7 deletions(-) diff --git a/plugins/traefik/configuration.go b/plugins/traefik/configuration.go index 68a0c2cf1..ed61fc27f 100644 --- a/plugins/traefik/configuration.go +++ b/plugins/traefik/configuration.go @@ -2,6 +2,7 @@ package traefik import ( "github.com/darkweak/souin/configurationtypes" + "github.com/darkweak/storages/core" ) // Configuration holder @@ -13,7 +14,19 @@ type Configuration struct { LogLevel string `json:"log_level" yaml:"log_level"` Ykeys map[string]configurationtypes.SurrogateKeys `json:"ykeys" yaml:"ykeys"` SurrogateKeys map[string]configurationtypes.SurrogateKeys `json:"surrogate_keys" yaml:"surrogate_keys"` - SurrogateKeyDisabled bool `json:"disable_surrogate_key" yaml:"disable_surrogate_key"` + SurrogateKeyDisabled bool + + logger core.Logger +} + +// GetLogger implements configurationtypes.AbstractConfigurationInterface. +func (c *Configuration) GetLogger() core.Logger { + return c.logger +} + +// SetLogger implements configurationtypes.AbstractConfigurationInterface. +func (c *Configuration) SetLogger(logger core.Logger) { + c.logger = logger } // GetUrls get the urls list in the configuration diff --git a/plugins/traefik/vendor/github.com/darkweak/souin/pkg/middleware/middleware.go b/plugins/traefik/vendor/github.com/darkweak/souin/pkg/middleware/middleware.go index 00fe71465..75178fa14 100644 --- a/plugins/traefik/vendor/github.com/darkweak/souin/pkg/middleware/middleware.go +++ b/plugins/traefik/vendor/github.com/darkweak/souin/pkg/middleware/middleware.go @@ -14,7 +14,7 @@ import ( "sync" "time" - xxhash "github.com/cespare/xxhash/v2" + "github.com/cespare/xxhash/v2" "github.com/darkweak/souin/configurationtypes" "github.com/darkweak/souin/context" "github.com/darkweak/souin/helpers" @@ -350,7 +350,7 @@ func (s *SouinBaseHandler) Store( } res.Header.Set(rfc.StoredLengthHeader, res.Header.Get("Content-Length")) response, err := httputil.DumpResponse(&res, true) - if err == nil && (bLen > 0 || canStatusCodeEmptyContent(statusCode) || s.hasAllowedAdditionalStatusCodesToCache(statusCode)) { + if err == nil && (bLen > 0 || rq.Method == http.MethodHead || canStatusCodeEmptyContent(statusCode) || s.hasAllowedAdditionalStatusCodesToCache(statusCode)) { variedHeaders, isVaryStar := rfc.VariedHeaderAllCommaSepValues(res.Header) if isVaryStar { // "Implies that the response is uncacheable" @@ -735,7 +735,11 @@ func (s *SouinBaseHandler) ServeHTTP(rw http.ResponseWriter, rq *http.Request, n bufPool := s.bufPool.Get().(*bytes.Buffer) bufPool.Reset() defer s.bufPool.Put(bufPool) + customWriter := NewCustomWriter(req, rw, bufPool) + customWriter.Headers.Add("Range", req.Header.Get("Range")) + // req.Header.Del("Range") + go func(req *http.Request, crw *CustomWriter) { <-req.Context().Done() crw.mutex.Lock() diff --git a/plugins/traefik/vendor/github.com/darkweak/souin/pkg/middleware/writer.go b/plugins/traefik/vendor/github.com/darkweak/souin/pkg/middleware/writer.go index 021d5e999..6ba0fd619 100644 --- a/plugins/traefik/vendor/github.com/darkweak/souin/pkg/middleware/writer.go +++ b/plugins/traefik/vendor/github.com/darkweak/souin/pkg/middleware/writer.go @@ -2,9 +2,11 @@ package middleware import ( "bytes" + "fmt" "io" "net/http" "strconv" + "strings" "sync" "github.com/darkweak/go-esi/esi" @@ -99,6 +101,59 @@ func (r *CustomWriter) Write(b []byte) (int, error) { return len(b), nil } +type rangeValue struct { + from, to int64 +} + +const separator = "--SOUIN-HTTP-CACHE-SEPARATOR" + +func parseRange(rangeHeaders []string, contentRange string) ([]rangeValue, rangeValue, int64) { + if len(rangeHeaders) == 0 { + return nil, rangeValue{}, -1 + } + + crv := rangeValue{from: 0, to: 0} + var total int64 = -1 + if contentRange != "" { + crVal := strings.Split(strings.TrimPrefix(contentRange, "bytes "), "/") + total, _ = strconv.ParseInt(crVal[1], 10, 64) + total-- + + crSplit := strings.Split(crVal[0], "-") + crv.from, _ = strconv.ParseInt(crSplit[0], 10, 64) + crv.to, _ = strconv.ParseInt(crSplit[1], 10, 64) + } + + values := make([]rangeValue, len(rangeHeaders)) + + for idx, header := range rangeHeaders { + ranges := strings.Split(header, "-") + rv := rangeValue{from: -1, to: total} + + // e.g. Range: -5 + if len(ranges) == 2 && ranges[0] == "" { + ranges[0] = "-" + ranges[1] + from, _ := strconv.ParseInt(ranges[0], 10, 64) + rv.from = total + from + + values[idx] = rv + + continue + } + + rv.from, _ = strconv.ParseInt(ranges[0], 10, 64) + + if ranges[1] != "" { + rv.to, _ = strconv.ParseInt(ranges[1], 10, 64) + rv.to++ + } + + values[idx] = rv + } + + return values, crv, total + 1 +} + // Send delays the response to handle Cache-Status func (r *CustomWriter) Send() (int, error) { defer r.resetBuffer() @@ -106,10 +161,72 @@ func (r *CustomWriter) Send() (int, error) { if storedLength != "" { r.Header().Set("Content-Length", storedLength) } - b := esi.Parse(r.Buf.Bytes(), r.Req) - if len(b) != 0 { - r.Header().Set("Content-Length", strconv.Itoa(len(b))) + + result := r.Buf.Bytes() + + result = esi.Parse(result, r.Req) + + if r.Headers.Get("Range") != "" { + + var bufStr string + mimeType := r.Header().Get("Content-Type") + + r.WriteHeader(http.StatusPartialContent) + + rangeHeader, contentRangeValue, total := parseRange( + strings.Split(strings.TrimPrefix(r.Headers.Get("Range"), "bytes="), ", "), + r.Header().Get("Content-Range"), + ) + bodyBytes := r.Buf.Bytes() + bufLen := int64(r.Buf.Len()) + if total > 0 { + bufLen = total + } + + if len(rangeHeader) == 1 { + header := rangeHeader[0] + internalFrom := (header.from - contentRangeValue.from) % bufLen + internalTo := (header.to - contentRangeValue.from) % bufLen + + content := bodyBytes[internalFrom:] + + r.Header().Set("Content-Range", fmt.Sprintf("bytes %d-%d/%d", contentRangeValue.from, contentRangeValue.to, bufLen)) + + if internalTo >= 0 { + content = content[:internalTo-internalFrom] + r.Header().Set("Content-Range", fmt.Sprintf("bytes %d-%d/%d", header.from, header.to, bufLen)) + } + + result = content + } + + if len(rangeHeader) > 1 { + r.Header().Set("Content-Type", "multipart/byteranges; boundary="+separator) + + for _, header := range rangeHeader { + + content := bodyBytes[header.from:] + if header.to >= 0 { + content = content[:header.to-header.from] + } + + bufStr += fmt.Sprintf(` +%s +Content-Type: %s +Content-Range: bytes %d-%d/%d + +%s +`, separator, mimeType, header.from, header.to, r.Buf.Len(), content) + } + + result = []byte(bufStr + separator + "--") + } + } + + if len(result) != 0 { + r.Header().Set("Content-Length", strconv.Itoa(len(result))) } + r.Header().Del(rfc.StoredLengthHeader) r.Header().Del(rfc.StoredTTLHeader) @@ -118,5 +235,5 @@ func (r *CustomWriter) Send() (int, error) { r.headersSent = true } - return r.Rw.Write(b) + return r.Rw.Write(result) }