From 417315efd5130e441d9e2cf88e547d8020dbb7ab Mon Sep 17 00:00:00 2001 From: darkweak Date: Tue, 29 Aug 2023 18:38:48 +0200 Subject: [PATCH] fix(plugins): traefik and caddy to use Storers --- pkg/middleware/middleware.go | 11 - plugins/caddy/app.go | 2 +- plugins/caddy/httpcache.go | 2 +- plugins/traefik/override/api/main.go | 8 +- plugins/traefik/override/api/souin.go | 126 +++++- plugins/traefik/override/context/mode.go | 27 ++ plugins/traefik/override/context/now.go | 23 + plugins/traefik/override/context/types.go | 8 +- .../traefik/override/middleware/middleware.go | 399 ++++++++++++++--- .../override/storage/abstractProvider.go | 9 +- .../traefik/override/storage/cacheProvider.go | 29 +- .../override/surrogate/providers/common.go | 18 + .../github.com/darkweak/souin/api/main.go | 8 +- .../github.com/darkweak/souin/api/souin.go | 126 +++++- .../github.com/darkweak/souin/context/mode.go | 27 ++ .../github.com/darkweak/souin/context/now.go | 23 + .../darkweak/souin/context/types.go | 8 +- .../github.com/darkweak/souin/pkg/api/main.go | 8 +- .../darkweak/souin/pkg/api/souin.go | 126 +++++- .../souin/pkg/middleware/middleware.go | 400 +++++++++++++++--- .../souin/pkg/storage/abstractProvider.go | 9 +- .../souin/pkg/storage/cacheProvider.go | 29 +- .../souin/pkg/surrogate/providers/common.go | 18 + .../souin/pkg/surrogate/providers/types.go | 1 + 24 files changed, 1242 insertions(+), 203 deletions(-) create mode 100644 plugins/traefik/override/context/mode.go create mode 100644 plugins/traefik/override/context/now.go create mode 100644 plugins/traefik/vendor/github.com/darkweak/souin/context/mode.go create mode 100644 plugins/traefik/vendor/github.com/darkweak/souin/context/now.go diff --git a/pkg/middleware/middleware.go b/pkg/middleware/middleware.go index f47c71081..d393b81ac 100644 --- a/pkg/middleware/middleware.go +++ b/pkg/middleware/middleware.go @@ -101,7 +101,6 @@ func NewHTTPCacheHandler(c configurationtypes.AbstractConfigurationInterface) *S type SouinBaseHandler struct { Configuration configurationtypes.AbstractConfigurationInterface - Storer storage.Storer Storers []storage.Storer InternalEndpointHandlers *api.MapHandler ExcludeRegex *regexp.Regexp @@ -267,16 +266,6 @@ func (s *SouinBaseHandler) Store( if len(fails) > 0 { status += strings.Join(fails, "") } - - // if s.Storer.Set(cachedKey, response, currentMatchedURL, ma) == nil { - // s.Configuration.GetLogger().Sugar().Debugf("Store the cache key %s into the surrogate keys from the following headers %v", cachedKey, res) - // go func(rs http.Response, key string) { - // _ = s.SurrogateKeyStorer.Store(&rs, key) - // }(res, cachedKey) - // status += "; stored" - // } else { - // status += "; detail=STORAGE-INSERTION-ERROR" - // } } } else { status += "; detail=NO-STORE-DIRECTIVE" diff --git a/plugins/caddy/app.go b/plugins/caddy/app.go index 1a9c4237b..b2cb230ee 100644 --- a/plugins/caddy/app.go +++ b/plugins/caddy/app.go @@ -13,7 +13,7 @@ import ( type SouinApp struct { *DefaultCache // The provider to use. - Storer storage.Storer + Storers []storage.Storer // Surrogate storage to support th econfiguration reload without surrogate-key data loss. SurrogateStorage providers.SurrogateInterface // Cache-key tweaking. diff --git a/plugins/caddy/httpcache.go b/plugins/caddy/httpcache.go index bafaab84c..80184a37d 100644 --- a/plugins/caddy/httpcache.go +++ b/plugins/caddy/httpcache.go @@ -269,7 +269,7 @@ func (s *SouinCaddyMiddleware) Provision(ctx caddy.Context) error { }) if l && e == nil { - s.SouinBaseHandler.Storer = v.(storage.Storer) + s.SouinBaseHandler.Storers = append(s.SouinBaseHandler.Storers, v.(storage.Storer)) } } else { s.logger.Sugar().Debug("Store the olric instance.") diff --git a/plugins/traefik/override/api/main.go b/plugins/traefik/override/api/main.go index a5d66938a..157fd4b67 100644 --- a/plugins/traefik/override/api/main.go +++ b/plugins/traefik/override/api/main.go @@ -16,7 +16,7 @@ type MapHandler struct { // GenerateHandlerMap generate the MapHandler func GenerateHandlerMap( configuration configurationtypes.AbstractConfigurationInterface, - storer storage.Storer, + storers []storage.Storer, surrogateStorage providers.SurrogateInterface, ) *MapHandler { hm := make(map[string]http.HandlerFunc) @@ -28,7 +28,7 @@ func GenerateHandlerMap( basePathAPIS = "/souin-api" } - for _, endpoint := range Initialize(configuration, storer, surrogateStorage) { + for _, endpoint := range Initialize(configuration, storers, surrogateStorage) { if endpoint.IsEnabled() { shouldEnable = true if e, ok := endpoint.(*SouinAPI); ok { @@ -45,6 +45,6 @@ func GenerateHandlerMap( } // Initialize contains all apis that should be enabled -func Initialize(c configurationtypes.AbstractConfigurationInterface, storer storage.Storer, surrogateStorage providers.SurrogateInterface) []EndpointInterface { - return []EndpointInterface{initializeSouin(c, storer, surrogateStorage)} +func Initialize(c configurationtypes.AbstractConfigurationInterface, storers []storage.Storer, surrogateStorage providers.SurrogateInterface) []EndpointInterface { + return []EndpointInterface{initializeSouin(c, storers, surrogateStorage)} } diff --git a/plugins/traefik/override/api/souin.go b/plugins/traefik/override/api/souin.go index 81c72f68d..5a5fb3279 100644 --- a/plugins/traefik/override/api/souin.go +++ b/plugins/traefik/override/api/souin.go @@ -5,6 +5,7 @@ import ( "fmt" "net/http" "regexp" + "strings" "github.com/darkweak/souin/configurationtypes" "github.com/darkweak/souin/pkg/storage" @@ -15,40 +16,73 @@ import ( type SouinAPI struct { basePath string enabled bool - storer storage.Storer + storers []storage.Storer surrogateStorage providers.SurrogateInterface + allowedMethods []string +} + +type invalidationType string + +const ( + uriInvalidationType invalidationType = "uri" + uriPrefixInvalidationType invalidationType = "uri-prefix" + originInvalidationType invalidationType = "origin" + groupInvalidationType invalidationType = "group" +) + +type invalidation struct { + Type invalidationType `json:"type"` + Selectors []string `json:"selectors"` + Groups []string `json:"groups"` + Purge bool `json:"purge"` } func initializeSouin( configuration configurationtypes.AbstractConfigurationInterface, - storer storage.Storer, + storers []storage.Storer, surrogateStorage providers.SurrogateInterface, ) *SouinAPI { basePath := configuration.GetAPI().Souin.BasePath if basePath == "" { basePath = "/souin" } + + allowedMethods := configuration.GetDefaultCache().GetAllowedHTTPVerbs() + if len(allowedMethods) == 0 { + allowedMethods = []string{http.MethodGet, http.MethodHead} + } + return &SouinAPI{ basePath, configuration.GetAPI().Souin.Enable, - storer, + storers, surrogateStorage, + allowedMethods, } } // BulkDelete allow user to delete multiple items with regexp func (s *SouinAPI) BulkDelete(key string) { - s.storer.DeleteMany(key) + for _, current := range s.storers { + current.DeleteMany(key) + } } // Delete will delete a record into the provider cache system and will update the Souin API if enabled func (s *SouinAPI) Delete(key string) { - s.storer.Delete(key) + for _, current := range s.storers { + current.Delete(key) + } } // GetAll will retrieve all stored keys in the provider func (s *SouinAPI) GetAll() []string { - return s.storer.ListKeys() + keys := []string{} + for _, current := range s.storers { + keys = append(keys, current.ListKeys()...) + } + + return keys } // GetBasePath will return the basepath for this resource @@ -94,13 +128,87 @@ func (s *SouinAPI) HandleRequest(w http.ResponseWriter, r *http.Request) { res, _ = json.Marshal(s.GetAll()) } w.Header().Set("Content-Type", "application/json") + case http.MethodPost: + var invalidator invalidation + err := json.NewDecoder(r.Body).Decode(&invalidator) + if err != nil { + w.WriteHeader(http.StatusBadRequest) + return + } + + keysToInvalidate := []string{} + switch invalidator.Type { + case groupInvalidationType: + keysToInvalidate, _ = s.surrogateStorage.Purge(http.Header{"Surrogate-Key": invalidator.Groups}) + case uriPrefixInvalidationType, uriInvalidationType: + bodyKeys := []string{} + listedKeys := s.GetAll() + for _, k := range invalidator.Selectors { + if !strings.Contains(k, "//") { + rq, err := http.NewRequest(http.MethodGet, "//"+k, nil) + if err != nil { + continue + } + + bodyKeys = append(bodyKeys, rq.Host+"-"+rq.URL.Path) + } + } + + for _, allKey := range listedKeys { + for _, bk := range bodyKeys { + if invalidator.Type == uriInvalidationType { + if strings.Contains(allKey, bk) && strings.Contains(allKey, bk+"-") && strings.HasSuffix(allKey, bk) { + keysToInvalidate = append(keysToInvalidate, allKey) + break + } + } else { + if strings.Contains(allKey, bk) && + (strings.Contains(allKey, bk+"-") || strings.Contains(allKey, bk+"?") || strings.Contains(allKey, bk+"/") || strings.HasSuffix(allKey, bk)) { + keysToInvalidate = append(keysToInvalidate, allKey) + break + } + } + } + } + case originInvalidationType: + bodyKeys := []string{} + listedKeys := s.GetAll() + for _, k := range invalidator.Selectors { + if !strings.Contains(k, "//") { + rq, err := http.NewRequest(http.MethodGet, "//"+k, nil) + if err != nil { + continue + } + + bodyKeys = append(bodyKeys, rq.Host) + } + } + + for _, allKey := range listedKeys { + for _, bk := range bodyKeys { + if strings.Contains(allKey, bk) { + keysToInvalidate = append(keysToInvalidate, allKey) + break + } + } + } + } + + for _, k := range keysToInvalidate { + for _, current := range s.storers { + current.Delete(k) + } + } + w.WriteHeader(http.StatusOK) case "PURGE": if compile { keysRg := regexp.MustCompile(s.GetBasePath() + "/(.+)") flushRg := regexp.MustCompile(s.GetBasePath() + "/flush$") if flushRg.FindString(r.RequestURI) != "" { - s.storer.DeleteMany(".+") + for _, current := range s.storers { + current.DeleteMany(".+") + } e := s.surrogateStorage.Destruct() if e != nil { fmt.Printf("Error while purging the surrogate keys: %+v.", e) @@ -113,7 +221,9 @@ func (s *SouinAPI) HandleRequest(w http.ResponseWriter, r *http.Request) { } else { ck, _ := s.surrogateStorage.Purge(r.Header) for _, k := range ck { - s.storer.Delete(k) + for _, current := range s.storers { + current.Delete(k) + } } } w.WriteHeader(http.StatusNoContent) diff --git a/plugins/traefik/override/context/mode.go b/plugins/traefik/override/context/mode.go new file mode 100644 index 000000000..b041abb15 --- /dev/null +++ b/plugins/traefik/override/context/mode.go @@ -0,0 +1,27 @@ +package context + +import ( + "context" + "net/http" + + "github.com/darkweak/souin/configurationtypes" +) + +const Mode ctxKey = "souin_ctx.MODE" + +type ModeContext struct { + Strict, Bypass_request, Bypass_response bool +} + +func (mc *ModeContext) SetupContext(c configurationtypes.AbstractConfigurationInterface) { + mode := c.GetDefaultCache().GetMode() + mc.Bypass_request = mode == "bypass" || mode == "bypass_request" + mc.Bypass_response = mode == "bypass" || mode == "bypass_response" + mc.Strict = !mc.Bypass_request && !mc.Bypass_response +} + +func (mc *ModeContext) SetContext(req *http.Request) *http.Request { + return req.WithContext(context.WithValue(req.Context(), Mode, mc)) +} + +var _ ctx = (*cacheContext)(nil) diff --git a/plugins/traefik/override/context/now.go b/plugins/traefik/override/context/now.go new file mode 100644 index 000000000..898cc18fe --- /dev/null +++ b/plugins/traefik/override/context/now.go @@ -0,0 +1,23 @@ +package context + +import ( + "context" + "net/http" + "time" + + "github.com/darkweak/souin/configurationtypes" +) + +const Now ctxKey = "souin_ctx.NOW" + +type nowContext struct{} + +func (cc *nowContext) SetupContext(_ configurationtypes.AbstractConfigurationInterface) {} + +func (cc *nowContext) SetContext(req *http.Request) *http.Request { + now := time.Now().UTC() + req.Header.Set("Date", now.Format(time.RFC1123)) + return req.WithContext(context.WithValue(req.Context(), Now, now)) +} + +var _ ctx = (*nowContext)(nil) diff --git a/plugins/traefik/override/context/types.go b/plugins/traefik/override/context/types.go index e788c3854..38bf5ed19 100644 --- a/plugins/traefik/override/context/types.go +++ b/plugins/traefik/override/context/types.go @@ -19,6 +19,8 @@ type ( GraphQL ctx Key ctx Method ctx + Mode ctx + Now ctx Timeout ctx } ) @@ -31,6 +33,8 @@ func GetContext() *Context { GraphQL: &graphQLContext{}, Key: &keyContext{}, Method: &methodContext{}, + Mode: &ModeContext{}, + Now: &nowContext{}, Timeout: &timeoutContext{}, } } @@ -40,11 +44,13 @@ func (c *Context) Init(co configurationtypes.AbstractConfigurationInterface) { c.GraphQL.SetupContext(co) c.Key.SetupContext(co) c.Method.SetupContext(co) + c.Mode.SetupContext(co) + c.Now.SetupContext(co) c.Timeout.SetupContext(co) } func (c *Context) SetBaseContext(req *http.Request) *http.Request { - return c.Timeout.SetContext(c.Method.SetContext(c.CacheName.SetContext(req))) + return c.Mode.SetContext(c.Timeout.SetContext(c.Method.SetContext(c.CacheName.SetContext(c.Now.SetContext(req))))) } func (c *Context) SetContext(req *http.Request) *http.Request { diff --git a/plugins/traefik/override/middleware/middleware.go b/plugins/traefik/override/middleware/middleware.go index 1983a404b..9fbc90e5b 100644 --- a/plugins/traefik/override/middleware/middleware.go +++ b/plugins/traefik/override/middleware/middleware.go @@ -1,8 +1,9 @@ package middleware import ( - "bufio" "bytes" + baseCtx "context" + "errors" "fmt" "io" "net/http" @@ -16,7 +17,6 @@ import ( "github.com/darkweak/souin/context" "github.com/darkweak/souin/helpers" "github.com/darkweak/souin/pkg/api" - "github.com/darkweak/souin/pkg/api/prometheus" "github.com/darkweak/souin/pkg/rfc" "github.com/darkweak/souin/pkg/storage" "github.com/darkweak/souin/pkg/surrogate" @@ -25,11 +25,11 @@ import ( ) func NewHTTPCacheHandler(c configurationtypes.AbstractConfigurationInterface) *SouinBaseHandler { - storer, err := storage.NewStorage(c) + storers, err := storage.NewStorages(c) if err != nil { panic(err) } - fmt.Println("Storer initialized.") + fmt.Println("Storers initialized.") regexpUrls := helpers.InitializeRegexp(c) surrogateStorage := surrogate.InitializeSurrogate(c) fmt.Println("Surrogate storage initialized.") @@ -55,20 +55,22 @@ func NewHTTPCacheHandler(c configurationtypes.AbstractConfigurationInterface) *S return &SouinBaseHandler{ Configuration: c, - Storer: storer, - InternalEndpointHandlers: api.GenerateHandlerMap(c, storer, surrogateStorage), + Storers: storers, + InternalEndpointHandlers: api.GenerateHandlerMap(c, storers, surrogateStorage), ExcludeRegex: excludedRegexp, RegexpUrls: regexpUrls, DefaultMatchedUrl: defaultMatchedUrl, SurrogateKeyStorer: surrogateStorage, context: ctx, bufPool: bufPool, + storersLen: len(storers), } } type SouinBaseHandler struct { Configuration configurationtypes.AbstractConfigurationInterface Storer storage.Storer + Storers []storage.Storer InternalEndpointHandlers *api.MapHandler ExcludeRegex *regexp.Regexp RegexpUrls regexp.Regexp @@ -77,6 +79,7 @@ type SouinBaseHandler struct { DefaultMatchedUrl configurationtypes.URL context *context.Context bufPool *sync.Pool + storersLen int } type upsreamError struct{} @@ -85,27 +88,62 @@ func (upsreamError) Error() string { return "Upstream error" } -func (s *SouinBaseHandler) Upstream( +func isCacheableCode(code int) bool { + switch code { + case 200, 203, 204, 206, 300, 301, 404, 405, 410, 414, 501: + return true + } + + return false +} + +func canBypassAuthorizationRestriction(headers http.Header, bypassed []string) bool { + for _, header := range bypassed { + if strings.ToLower(header) == "authorization" { + return true + } + } + + return strings.Contains(strings.ToLower(headers.Get("Vary")), "authorization") +} + +func (s *SouinBaseHandler) Store( customWriter *CustomWriter, rq *http.Request, - next handlerFunc, requestCc *cacheobject.RequestCacheDirectives, cachedKey string, ) error { - now := time.Now().UTC() - rq.Header.Set("Date", now.Format(time.RFC1123)) - prometheus.Increment(prometheus.RequestCounter) - if err := next(customWriter, rq); err != nil { - customWriter.Header().Set("Cache-Status", fmt.Sprintf("%s; fwd=uri-miss; key=%s; detail=SERVE-HTTP-ERROR", rq.Context().Value(context.CacheName), rfc.GetCacheKeyFromCtx(rq.Context()))) - return err + if !isCacheableCode(customWriter.statusCode) { + customWriter.Headers.Set("Cache-Status", fmt.Sprintf("%s; fwd=uri-miss; key=%s; detail=UNCACHEABLE-STATUS-CODE", rq.Context().Value(context.CacheName), rfc.GetCacheKeyFromCtx(rq.Context()))) + + switch customWriter.statusCode { + case 500, 502, 503, 504: + return new(upsreamError) + } + + return nil } - switch customWriter.statusCode { - case 500, 502, 503, 504: - return new(upsreamError) + if customWriter.Header().Get("Cache-Control") == "" { + // TODO see with @mnot if mandatory to not store the response when no Cache-Control given. + // if s.DefaultMatchedUrl.DefaultCacheControl == "" { + // customWriter.Headers.Set("Cache-Status", fmt.Sprintf("%s; fwd=uri-miss; key=%s; detail=EMPTY-RESPONSE-CACHE-CONTROL", rq.Context().Value(context.CacheName), rfc.GetCacheKeyFromCtx(rq.Context()))) + // return nil + // } + customWriter.Header().Set("Cache-Control", s.DefaultMatchedUrl.DefaultCacheControl) } responseCc, _ := cacheobject.ParseResponseCacheControl(customWriter.Header().Get("Cache-Control")) + if responseCc == nil { + customWriter.Headers.Set("Cache-Status", fmt.Sprintf("%s; fwd=uri-miss; key=%s; detail=INVALID-RESPONSE-CACHE-CONTROL", rq.Context().Value(context.CacheName), rfc.GetCacheKeyFromCtx(rq.Context()))) + return nil + } + + modeContext := rq.Context().Value(context.Mode).(*context.ModeContext) + if !modeContext.Bypass_request && (responseCc.PrivatePresent || rq.Header.Get("Authorization") != "") && !canBypassAuthorizationRestriction(customWriter.Header(), rq.Context().Value(context.IgnoredHeaders).([]string)) { + customWriter.Headers.Set("Cache-Status", fmt.Sprintf("%s; fwd=uri-miss; key=%s; detail=PRIVATE-OR-AUTHENTICATED-RESPONSE", rq.Context().Value(context.CacheName), rfc.GetCacheKeyFromCtx(rq.Context()))) + return nil + } currentMatchedURL := s.DefaultMatchedUrl if regexpURL := s.RegexpUrls.FindString(rq.Host + rq.URL.Path); regexpURL != "" { @@ -119,39 +157,80 @@ func (s *SouinBaseHandler) Upstream( } ma := currentMatchedURL.TTL.Duration - if responseCc.MaxAge > 0 { - ma = time.Duration(responseCc.MaxAge) * time.Second - } else if responseCc.SMaxAge > 0 { + if responseCc.SMaxAge >= 0 { ma = time.Duration(responseCc.SMaxAge) * time.Second + } else if responseCc.MaxAge >= 0 { + ma = time.Duration(responseCc.MaxAge) * time.Second } if ma > currentMatchedURL.TTL.Duration { ma = currentMatchedURL.TTL.Duration } + + now := rq.Context().Value(context.Now).(time.Time) date, _ := http.ParseTime(now.Format(http.TimeFormat)) customWriter.Headers.Set(rfc.StoredTTLHeader, ma.String()) ma = ma - time.Since(date) + if exp := customWriter.Header().Get("Expires"); exp != "" { + delta, _ := time.Parse(exp, time.RFC1123) + if sub := delta.Sub(now); sub > 0 { + ma = sub + } + } + status := fmt.Sprintf("%s; fwd=uri-miss", rq.Context().Value(context.CacheName)) - if !requestCc.NoStore && !responseCc.NoStore { + if (modeContext.Bypass_request || !requestCc.NoStore) && + (modeContext.Bypass_response || !responseCc.NoStore) { + headers := customWriter.Headers.Clone() + for hname, shouldDelete := range responseCc.NoCache { + if shouldDelete { + headers.Del(hname) + } + } res := http.Response{ StatusCode: customWriter.statusCode, Body: io.NopCloser(bytes.NewBuffer(customWriter.Buf.Bytes())), - Header: customWriter.Headers, + Header: headers, } - res.Header.Set("Date", time.Now().UTC().Format(http.TimeFormat)) - res.Request = rq + if res.Header.Get("Date") == "" { + res.Header.Set("Date", now.Format(http.TimeFormat)) + } + if res.Header.Get("Content-Length") == "" { + res.Header.Set("Content-Length", fmt.Sprint(customWriter.Buf.Len())) + } + res.Header.Set(rfc.StoredLengthHeader, res.Header.Get("Content-Length")) response, err := httputil.DumpResponse(&res, true) if err == nil { variedHeaders := rfc.HeaderAllCommaSepValues(res.Header) cachedKey += rfc.GetVariedCacheKey(rq, variedHeaders) - if s.Storer.Set(cachedKey, response, currentMatchedURL, ma) == nil { + + var wg sync.WaitGroup + mu := sync.Mutex{} + fails := []string{} + for _, storer := range s.Storers { + wg.Add(1) + go func(currentStorer storage.Storer) { + defer wg.Done() + if currentStorer.Set(cachedKey, response, currentMatchedURL, ma) == nil { + } else { + mu.Lock() + fails = append(fails, fmt.Sprintf("; detail=%s-INSERTION-ERROR", currentStorer.Name())) + mu.Unlock() + } + }(storer) + } + + wg.Wait() + if len(fails) < s.storersLen { go func(rs http.Response, key string) { _ = s.SurrogateKeyStorer.Store(&rs, key) }(res, cachedKey) status += "; stored" - } else { - status += "; detail=STORAGE-INSERTION-ERROR" + } + + if len(fails) > 0 { + status += strings.Join(fails, "") } } } else { @@ -162,6 +241,81 @@ func (s *SouinBaseHandler) Upstream( return nil } +func (s *SouinBaseHandler) Upstream( + customWriter *CustomWriter, + rq *http.Request, + next handlerFunc, + requestCc *cacheobject.RequestCacheDirectives, + cachedKey string, +) error { + if err := next(customWriter, rq); err != nil { + customWriter.Header().Set("Cache-Status", fmt.Sprintf("%s; fwd=uri-miss; key=%s; detail=SERVE-HTTP-ERROR", rq.Context().Value(context.CacheName), rfc.GetCacheKeyFromCtx(rq.Context()))) + return err + } + + s.SurrogateKeyStorer.Invalidate(rq.Method, customWriter.Header()) + if !isCacheableCode(customWriter.statusCode) { + customWriter.Headers.Set("Cache-Status", fmt.Sprintf("%s; fwd=uri-miss; key=%s; detail=UNCACHEABLE-STATUS-CODE", rq.Context().Value(context.CacheName), rfc.GetCacheKeyFromCtx(rq.Context()))) + + switch customWriter.statusCode { + case 500, 502, 503, 504: + return new(upsreamError) + } + + return nil + } + + if customWriter.Header().Get("Cache-Control") == "" { + // TODO see with @mnot if mandatory to not store the response when no Cache-Control given. + // if s.DefaultMatchedUrl.DefaultCacheControl == "" { + // customWriter.Headers.Set("Cache-Status", fmt.Sprintf("%s; fwd=uri-miss; key=%s; detail=EMPTY-RESPONSE-CACHE-CONTROL", rq.Context().Value(context.CacheName), rfc.GetCacheKeyFromCtx(rq.Context()))) + // return nil + // } + customWriter.Header().Set("Cache-Control", s.DefaultMatchedUrl.DefaultCacheControl) + } + + select { + case <-rq.Context().Done(): + return baseCtx.Canceled + default: + return s.Store(customWriter, rq, requestCc, cachedKey) + } +} + +func (s *SouinBaseHandler) Revalidate(validator *rfc.Revalidator, next handlerFunc, customWriter *CustomWriter, rq *http.Request, requestCc *cacheobject.RequestCacheDirectives, cachedKey string) error { + err := next(customWriter, rq) + s.SurrogateKeyStorer.Invalidate(rq.Method, customWriter.Header()) + + if err == nil { + if validator.IfUnmodifiedSincePresent && customWriter.statusCode != http.StatusNotModified { + customWriter.Buf.Reset() + for h, v := range customWriter.Headers { + if len(v) > 0 { + customWriter.Rw.Header().Set(h, strings.Join(v, ", ")) + } + } + customWriter.Rw.WriteHeader(http.StatusPreconditionFailed) + + return errors.New("") + } + + if customWriter.statusCode != http.StatusNotModified { + err = s.Store(customWriter, rq, requestCc, cachedKey) + } + } + + customWriter.Header().Set( + "Cache-Status", + fmt.Sprintf( + "%s; fwd=request; fwd-status=%d; key=%s; detail=REQUEST-REVALIDATION", + rq.Context().Value(context.CacheName), + customWriter.statusCode, + rfc.GetCacheKeyFromCtx(rq.Context()), + ), + ) + return err +} + func (s *SouinBaseHandler) HandleInternally(r *http.Request) (bool, http.HandlerFunc) { if s.InternalEndpointHandlers != nil { for k, handler := range *s.InternalEndpointHandlers.Handlers { @@ -194,42 +348,106 @@ func (s *SouinBaseHandler) ServeHTTP(rw http.ResponseWriter, rq *http.Request, n if !rq.Context().Value(context.SupportedMethod).(bool) { rw.Header().Set("Cache-Status", cacheName+"; fwd=bypass; detail=UNSUPPORTED-METHOD") - return next(rw, rq) + err := next(rw, rq) + s.SurrogateKeyStorer.Invalidate(rq.Method, rw.Header()) + + return err } requestCc, coErr := cacheobject.ParseRequestCacheControl(rq.Header.Get("Cache-Control")) - if coErr != nil || requestCc == nil { + modeContext := rq.Context().Value(context.Mode).(*context.ModeContext) + if !modeContext.Bypass_request && (coErr != nil || requestCc == nil) { rw.Header().Set("Cache-Status", cacheName+"; fwd=bypass; detail=CACHE-CONTROL-EXTRACTION-ERROR") - return next(rw, rq) + err := next(rw, rq) + s.SurrogateKeyStorer.Invalidate(rq.Method, rw.Header()) + + return err } rq = s.context.SetContext(rq) + + // Yaegi sucks again, it considers false as true + isMutationRequest := rq.Context().Value(context.IsMutationRequest).(bool) + if isMutationRequest { + rw.Header().Set("Cache-Status", cacheName+"; fwd=bypass; detail=IS-MUTATION-REQUEST") + + err := next(rw, rq) + s.SurrogateKeyStorer.Invalidate(rq.Method, rw.Header()) + + return err + } cachedKey := rq.Context().Value(context.Key).(string) bufPool := s.bufPool.Get().(*bytes.Buffer) bufPool.Reset() defer s.bufPool.Put(bufPool) customWriter := NewCustomWriter(rq, rw, bufPool) - if !requestCc.NoCache { - cachedVal := s.Storer.Prefix(cachedKey, rq) - response, _ := http.ReadResponse(bufio.NewReader(bytes.NewBuffer(cachedVal)), rq) + go func(req *http.Request, crw *CustomWriter) { + <-req.Context().Done() + crw.mutex.Lock() + crw.headersSent = true + crw.mutex.Unlock() + }(rq, customWriter) + if modeContext.Bypass_request || !requestCc.NoCache { + validator := rfc.ParseRequest(rq) + var response *http.Response + for _, currentStorer := range s.Storers { + response = currentStorer.Prefix(cachedKey, rq, validator) + if response != nil { + break + } + } - if response != nil && rfc.ValidateCacheControl(response, requestCc) { - rfc.SetCacheStatusHeader(response) - if rfc.ValidateMaxAgeCachedResponse(requestCc, response) != nil { + if response != nil && (!modeContext.Strict || rfc.ValidateCacheControl(response, requestCc)) { + if validator.ResponseETag != "" && validator.Matched { + rfc.SetCacheStatusHeader(response) customWriter.Headers = response.Header + if validator.NotModified { + customWriter.statusCode = http.StatusNotModified + customWriter.Buf.Reset() + _, _ = customWriter.Send() + + return nil + } + customWriter.statusCode = response.StatusCode - io.Copy(customWriter.Buf, response.Body) - customWriter.Send() + _, _ = io.Copy(customWriter.Buf, response.Body) + _, _ = customWriter.Send() return nil } - } else if response == nil && (requestCc.MaxStaleSet || requestCc.MaxStale > -1) { - staleCachedVal := s.Storer.Prefix(storage.StalePrefix+cachedKey, rq) - response, _ = http.ReadResponse(bufio.NewReader(bytes.NewBuffer(staleCachedVal)), rq) - if nil != response && rfc.ValidateCacheControl(response, requestCc) { + + if validator.NeedRevalidation { + err := s.Revalidate(validator, next, customWriter, rq, requestCc, cachedKey) + _, _ = customWriter.Send() + + return err + } + if resCc, _ := cacheobject.ParseResponseCacheControl(response.Header.Get("Cache-Control")); resCc.NoCachePresent { + err := s.Revalidate(validator, next, customWriter, rq, requestCc, cachedKey) + _, _ = customWriter.Send() + + return err + } + rfc.SetCacheStatusHeader(response) + if !modeContext.Strict || rfc.ValidateMaxAgeCachedResponse(requestCc, response) != nil { + customWriter.Headers = response.Header + customWriter.statusCode = response.StatusCode + _, _ = io.Copy(customWriter.Buf, response.Body) + _, err := customWriter.Send() + + return err + } + } else if response == nil && !requestCc.OnlyIfCached && (requestCc.MaxStaleSet || requestCc.MaxStale > -1) { + for _, currentStorer := range s.Storers { + response = currentStorer.Prefix(storage.StalePrefix+cachedKey, rq, validator) + if response != nil { + break + } + } + if nil != response && (!modeContext.Strict || rfc.ValidateCacheControl(response, requestCc)) { addTime, _ := time.ParseDuration(response.Header.Get(rfc.StoredTTLHeader)) rfc.SetCacheStatusHeader(response) @@ -238,44 +456,101 @@ func (s *SouinBaseHandler) ServeHTTP(rw http.ResponseWriter, rq *http.Request, n customWriter.Headers = response.Header customWriter.statusCode = response.StatusCode rfc.HitStaleCache(&response.Header) - io.Copy(customWriter.Buf, response.Body) - customWriter.Send() + _, _ = io.Copy(customWriter.Buf, response.Body) + _, err := customWriter.Send() customWriter = NewCustomWriter(rq, rw, bufPool) - go s.Upstream(customWriter, rq, next, requestCc, cachedKey) + go func(v *rfc.Revalidator, goCw *CustomWriter, goRq *http.Request, goNext func(http.ResponseWriter, *http.Request) error, goCc *cacheobject.RequestCacheDirectives, goCk string) { + _ = s.Revalidate(v, goNext, goCw, goRq, goCc, goCk) + }(validator, customWriter, rq, next, requestCc, cachedKey) buf := s.bufPool.Get().(*bytes.Buffer) buf.Reset() defer s.bufPool.Put(buf) - return nil + return err } - if responseCc.StaleIfError > 0 && s.Upstream(customWriter, rq, next, requestCc, cachedKey) != nil { - customWriter.Headers = response.Header - customWriter.statusCode = response.StatusCode - rfc.HitStaleCache(&response.Header) - io.Copy(customWriter.Buf, response.Body) - customWriter.Send() - - return nil + if responseCc.MustRevalidate || responseCc.NoCachePresent || validator.NeedRevalidation { + rq.Header["If-None-Match"] = append(rq.Header["If-None-Match"], validator.ResponseETag) + err := s.Revalidate(validator, next, customWriter, rq, requestCc, cachedKey) + if err != nil { + if responseCc.StaleIfError > -1 || requestCc.StaleIfError > 0 { + code := fmt.Sprintf("; fwd-status=%d", customWriter.statusCode) + customWriter.Headers = response.Header + customWriter.statusCode = response.StatusCode + rfc.HitStaleCache(&response.Header) + response.Header.Set("Cache-Status", response.Header.Get("Cache-Status")+code) + _, _ = io.Copy(customWriter.Buf, response.Body) + _, err := customWriter.Send() + + return err + } + rw.WriteHeader(http.StatusGatewayTimeout) + customWriter.Buf.Reset() + _, err := customWriter.Send() + + return err + } + + if customWriter.statusCode == http.StatusNotModified { + if !validator.Matched { + rfc.SetCacheStatusHeader(response) + customWriter.statusCode = response.StatusCode + customWriter.Headers = response.Header + _, _ = io.Copy(customWriter.Buf, response.Body) + _, _ = customWriter.Send() + + return err + } + } + + if customWriter.statusCode != http.StatusNotModified && validator.Matched { + customWriter.statusCode = http.StatusNotModified + customWriter.Buf.Reset() + _, _ = customWriter.Send() + + return err + } + + _, _ = customWriter.Send() + + return err } if rfc.ValidateMaxAgeCachedStaleResponse(requestCc, response, int(addTime.Seconds())) != nil { customWriter.Headers = response.Header customWriter.statusCode = response.StatusCode rfc.HitStaleCache(&response.Header) - io.Copy(customWriter.Buf, response.Body) - customWriter.Send() + _, _ = io.Copy(customWriter.Buf, response.Body) + _, err := customWriter.Send() - return nil + return err } } } } - if err := s.Upstream(customWriter, rq, next, requestCc, cachedKey); err != nil { - return err + errorCacheCh := make(chan error) + go func() { + errorCacheCh <- s.Upstream(customWriter, rq, next, requestCc, cachedKey) + }() + + select { + case <-rq.Context().Done(): + switch rq.Context().Err() { + case baseCtx.DeadlineExceeded: + customWriter.WriteHeader(http.StatusGatewayTimeout) + rw.Header().Set("Cache-Status", cacheName+"; fwd=bypass; detail=DEADLINE-EXCEEDED") + _, _ = customWriter.Rw.Write([]byte("Internal server error")) + return baseCtx.DeadlineExceeded + case baseCtx.Canceled: + return baseCtx.Canceled + default: + return nil + } + case v := <-errorCacheCh: + if v == nil { + _, _ = customWriter.Send() + } + return v } - - _, _ = customWriter.Send() - return nil } diff --git a/plugins/traefik/override/storage/abstractProvider.go b/plugins/traefik/override/storage/abstractProvider.go index 00f325f60..54578aa68 100644 --- a/plugins/traefik/override/storage/abstractProvider.go +++ b/plugins/traefik/override/storage/abstractProvider.go @@ -7,6 +7,7 @@ import ( "time" "github.com/darkweak/souin/configurationtypes" + "github.com/darkweak/souin/pkg/rfc" ) const ( @@ -19,19 +20,21 @@ const ( type Storer interface { ListKeys() []string - Prefix(key string, req *http.Request) []byte + Prefix(key string, req *http.Request, validator *rfc.Revalidator) *http.Response Get(key string) []byte Set(key string, value []byte, url configurationtypes.URL, duration time.Duration) error Delete(key string) DeleteMany(key string) Init() error + Name() string Reset() error } type StorerInstanciator func(configurationtypes.AbstractConfigurationInterface) (Storer, error) -func NewStorage(configuration configurationtypes.AbstractConfigurationInterface) (Storer, error) { - return CacheConnectionFactory(configuration) +func NewStorages(configuration configurationtypes.AbstractConfigurationInterface) ([]Storer, error) { + s, err := CacheConnectionFactory(configuration) + return []Storer{s}, err } func varyVoter(baseKey string, req *http.Request, currentKey string) bool { diff --git a/plugins/traefik/override/storage/cacheProvider.go b/plugins/traefik/override/storage/cacheProvider.go index c590f355a..c66ab8948 100644 --- a/plugins/traefik/override/storage/cacheProvider.go +++ b/plugins/traefik/override/storage/cacheProvider.go @@ -1,6 +1,8 @@ package storage import ( + "bufio" + "bytes" "net/http" "regexp" "strings" @@ -8,6 +10,7 @@ import ( "github.com/akyoto/cache" t "github.com/darkweak/souin/configurationtypes" + "github.com/darkweak/souin/pkg/rfc" ) // Cache provider type @@ -22,6 +25,11 @@ func CacheConnectionFactory(c t.AbstractConfigurationInterface) (Storer, error) return &Cache{Cache: provider, stale: c.GetDefaultCache().GetStale()}, nil } +// Name returns the storer name +func (provider *Cache) Name() string { + return "CACHE" +} + // ListKeys method returns the list of existing keys func (provider *Cache) ListKeys() []string { var keys []string @@ -45,22 +53,23 @@ func (provider *Cache) Get(key string) []byte { } // Prefix method returns the populated response if exists, empty response then -func (provider *Cache) Prefix(key string, req *http.Request) []byte { - var result []byte +func (provider *Cache) Prefix(key string, req *http.Request, validator *rfc.Revalidator) *http.Response { + var result *http.Response provider.Cache.Range(func(k, v interface{}) bool { - if k == key { - result = v.([]byte) - return false - } - if !strings.HasPrefix(k.(string), key) { return true } - if varyVoter(key, req, k.(string)) { - result = v.([]byte) - return false + if k == key || varyVoter(key, req, k.(string)) { + if res, err := http.ReadResponse(bufio.NewReader(bytes.NewBuffer(v.([]byte))), req); err == nil { + rfc.ValidateETag(res, validator) + if validator.Matched { + result = res + return false + } + } + return true } return true diff --git a/plugins/traefik/override/surrogate/providers/common.go b/plugins/traefik/override/surrogate/providers/common.go index 3c3051621..d6869434c 100644 --- a/plugins/traefik/override/surrogate/providers/common.go +++ b/plugins/traefik/override/surrogate/providers/common.go @@ -31,6 +31,15 @@ func (s *baseStorage) ParseHeaders(value string) []string { return regexp.MustCompile(s.parent.getHeaderSeparator()+" *").Split(value, -1) } +func isSafeHTTPMethod(method string) bool { + switch method { + case http.MethodGet, http.MethodHead, http.MethodOptions, http.MethodTrace: + return true + } + + return false +} + func uniqueTag(values []string) []string { tmp := make(map[string]bool) list := []string{} @@ -204,6 +213,15 @@ func (s *baseStorage) Purge(header http.Header) (cacheKeys []string, surrogateKe return uniqueTag(toInvalidate), surrogates } +// Invalidate the grouped responses from the Cache-Group-Invalidation HTTP response header +func (s *baseStorage) Invalidate(method string, headers http.Header) { + if !isSafeHTTPMethod(method) { + for _, group := range headers["Cache-Group-Invalidation"] { + s.purgeTag(group) + } + } +} + // List returns the stored keys associated to resources func (s *baseStorage) List() map[string]string { return s.Storage diff --git a/plugins/traefik/vendor/github.com/darkweak/souin/api/main.go b/plugins/traefik/vendor/github.com/darkweak/souin/api/main.go index a5d66938a..157fd4b67 100644 --- a/plugins/traefik/vendor/github.com/darkweak/souin/api/main.go +++ b/plugins/traefik/vendor/github.com/darkweak/souin/api/main.go @@ -16,7 +16,7 @@ type MapHandler struct { // GenerateHandlerMap generate the MapHandler func GenerateHandlerMap( configuration configurationtypes.AbstractConfigurationInterface, - storer storage.Storer, + storers []storage.Storer, surrogateStorage providers.SurrogateInterface, ) *MapHandler { hm := make(map[string]http.HandlerFunc) @@ -28,7 +28,7 @@ func GenerateHandlerMap( basePathAPIS = "/souin-api" } - for _, endpoint := range Initialize(configuration, storer, surrogateStorage) { + for _, endpoint := range Initialize(configuration, storers, surrogateStorage) { if endpoint.IsEnabled() { shouldEnable = true if e, ok := endpoint.(*SouinAPI); ok { @@ -45,6 +45,6 @@ func GenerateHandlerMap( } // Initialize contains all apis that should be enabled -func Initialize(c configurationtypes.AbstractConfigurationInterface, storer storage.Storer, surrogateStorage providers.SurrogateInterface) []EndpointInterface { - return []EndpointInterface{initializeSouin(c, storer, surrogateStorage)} +func Initialize(c configurationtypes.AbstractConfigurationInterface, storers []storage.Storer, surrogateStorage providers.SurrogateInterface) []EndpointInterface { + return []EndpointInterface{initializeSouin(c, storers, surrogateStorage)} } diff --git a/plugins/traefik/vendor/github.com/darkweak/souin/api/souin.go b/plugins/traefik/vendor/github.com/darkweak/souin/api/souin.go index 81c72f68d..5a5fb3279 100644 --- a/plugins/traefik/vendor/github.com/darkweak/souin/api/souin.go +++ b/plugins/traefik/vendor/github.com/darkweak/souin/api/souin.go @@ -5,6 +5,7 @@ import ( "fmt" "net/http" "regexp" + "strings" "github.com/darkweak/souin/configurationtypes" "github.com/darkweak/souin/pkg/storage" @@ -15,40 +16,73 @@ import ( type SouinAPI struct { basePath string enabled bool - storer storage.Storer + storers []storage.Storer surrogateStorage providers.SurrogateInterface + allowedMethods []string +} + +type invalidationType string + +const ( + uriInvalidationType invalidationType = "uri" + uriPrefixInvalidationType invalidationType = "uri-prefix" + originInvalidationType invalidationType = "origin" + groupInvalidationType invalidationType = "group" +) + +type invalidation struct { + Type invalidationType `json:"type"` + Selectors []string `json:"selectors"` + Groups []string `json:"groups"` + Purge bool `json:"purge"` } func initializeSouin( configuration configurationtypes.AbstractConfigurationInterface, - storer storage.Storer, + storers []storage.Storer, surrogateStorage providers.SurrogateInterface, ) *SouinAPI { basePath := configuration.GetAPI().Souin.BasePath if basePath == "" { basePath = "/souin" } + + allowedMethods := configuration.GetDefaultCache().GetAllowedHTTPVerbs() + if len(allowedMethods) == 0 { + allowedMethods = []string{http.MethodGet, http.MethodHead} + } + return &SouinAPI{ basePath, configuration.GetAPI().Souin.Enable, - storer, + storers, surrogateStorage, + allowedMethods, } } // BulkDelete allow user to delete multiple items with regexp func (s *SouinAPI) BulkDelete(key string) { - s.storer.DeleteMany(key) + for _, current := range s.storers { + current.DeleteMany(key) + } } // Delete will delete a record into the provider cache system and will update the Souin API if enabled func (s *SouinAPI) Delete(key string) { - s.storer.Delete(key) + for _, current := range s.storers { + current.Delete(key) + } } // GetAll will retrieve all stored keys in the provider func (s *SouinAPI) GetAll() []string { - return s.storer.ListKeys() + keys := []string{} + for _, current := range s.storers { + keys = append(keys, current.ListKeys()...) + } + + return keys } // GetBasePath will return the basepath for this resource @@ -94,13 +128,87 @@ func (s *SouinAPI) HandleRequest(w http.ResponseWriter, r *http.Request) { res, _ = json.Marshal(s.GetAll()) } w.Header().Set("Content-Type", "application/json") + case http.MethodPost: + var invalidator invalidation + err := json.NewDecoder(r.Body).Decode(&invalidator) + if err != nil { + w.WriteHeader(http.StatusBadRequest) + return + } + + keysToInvalidate := []string{} + switch invalidator.Type { + case groupInvalidationType: + keysToInvalidate, _ = s.surrogateStorage.Purge(http.Header{"Surrogate-Key": invalidator.Groups}) + case uriPrefixInvalidationType, uriInvalidationType: + bodyKeys := []string{} + listedKeys := s.GetAll() + for _, k := range invalidator.Selectors { + if !strings.Contains(k, "//") { + rq, err := http.NewRequest(http.MethodGet, "//"+k, nil) + if err != nil { + continue + } + + bodyKeys = append(bodyKeys, rq.Host+"-"+rq.URL.Path) + } + } + + for _, allKey := range listedKeys { + for _, bk := range bodyKeys { + if invalidator.Type == uriInvalidationType { + if strings.Contains(allKey, bk) && strings.Contains(allKey, bk+"-") && strings.HasSuffix(allKey, bk) { + keysToInvalidate = append(keysToInvalidate, allKey) + break + } + } else { + if strings.Contains(allKey, bk) && + (strings.Contains(allKey, bk+"-") || strings.Contains(allKey, bk+"?") || strings.Contains(allKey, bk+"/") || strings.HasSuffix(allKey, bk)) { + keysToInvalidate = append(keysToInvalidate, allKey) + break + } + } + } + } + case originInvalidationType: + bodyKeys := []string{} + listedKeys := s.GetAll() + for _, k := range invalidator.Selectors { + if !strings.Contains(k, "//") { + rq, err := http.NewRequest(http.MethodGet, "//"+k, nil) + if err != nil { + continue + } + + bodyKeys = append(bodyKeys, rq.Host) + } + } + + for _, allKey := range listedKeys { + for _, bk := range bodyKeys { + if strings.Contains(allKey, bk) { + keysToInvalidate = append(keysToInvalidate, allKey) + break + } + } + } + } + + for _, k := range keysToInvalidate { + for _, current := range s.storers { + current.Delete(k) + } + } + w.WriteHeader(http.StatusOK) case "PURGE": if compile { keysRg := regexp.MustCompile(s.GetBasePath() + "/(.+)") flushRg := regexp.MustCompile(s.GetBasePath() + "/flush$") if flushRg.FindString(r.RequestURI) != "" { - s.storer.DeleteMany(".+") + for _, current := range s.storers { + current.DeleteMany(".+") + } e := s.surrogateStorage.Destruct() if e != nil { fmt.Printf("Error while purging the surrogate keys: %+v.", e) @@ -113,7 +221,9 @@ func (s *SouinAPI) HandleRequest(w http.ResponseWriter, r *http.Request) { } else { ck, _ := s.surrogateStorage.Purge(r.Header) for _, k := range ck { - s.storer.Delete(k) + for _, current := range s.storers { + current.Delete(k) + } } } w.WriteHeader(http.StatusNoContent) diff --git a/plugins/traefik/vendor/github.com/darkweak/souin/context/mode.go b/plugins/traefik/vendor/github.com/darkweak/souin/context/mode.go new file mode 100644 index 000000000..b041abb15 --- /dev/null +++ b/plugins/traefik/vendor/github.com/darkweak/souin/context/mode.go @@ -0,0 +1,27 @@ +package context + +import ( + "context" + "net/http" + + "github.com/darkweak/souin/configurationtypes" +) + +const Mode ctxKey = "souin_ctx.MODE" + +type ModeContext struct { + Strict, Bypass_request, Bypass_response bool +} + +func (mc *ModeContext) SetupContext(c configurationtypes.AbstractConfigurationInterface) { + mode := c.GetDefaultCache().GetMode() + mc.Bypass_request = mode == "bypass" || mode == "bypass_request" + mc.Bypass_response = mode == "bypass" || mode == "bypass_response" + mc.Strict = !mc.Bypass_request && !mc.Bypass_response +} + +func (mc *ModeContext) SetContext(req *http.Request) *http.Request { + return req.WithContext(context.WithValue(req.Context(), Mode, mc)) +} + +var _ ctx = (*cacheContext)(nil) diff --git a/plugins/traefik/vendor/github.com/darkweak/souin/context/now.go b/plugins/traefik/vendor/github.com/darkweak/souin/context/now.go new file mode 100644 index 000000000..898cc18fe --- /dev/null +++ b/plugins/traefik/vendor/github.com/darkweak/souin/context/now.go @@ -0,0 +1,23 @@ +package context + +import ( + "context" + "net/http" + "time" + + "github.com/darkweak/souin/configurationtypes" +) + +const Now ctxKey = "souin_ctx.NOW" + +type nowContext struct{} + +func (cc *nowContext) SetupContext(_ configurationtypes.AbstractConfigurationInterface) {} + +func (cc *nowContext) SetContext(req *http.Request) *http.Request { + now := time.Now().UTC() + req.Header.Set("Date", now.Format(time.RFC1123)) + return req.WithContext(context.WithValue(req.Context(), Now, now)) +} + +var _ ctx = (*nowContext)(nil) diff --git a/plugins/traefik/vendor/github.com/darkweak/souin/context/types.go b/plugins/traefik/vendor/github.com/darkweak/souin/context/types.go index e788c3854..38bf5ed19 100644 --- a/plugins/traefik/vendor/github.com/darkweak/souin/context/types.go +++ b/plugins/traefik/vendor/github.com/darkweak/souin/context/types.go @@ -19,6 +19,8 @@ type ( GraphQL ctx Key ctx Method ctx + Mode ctx + Now ctx Timeout ctx } ) @@ -31,6 +33,8 @@ func GetContext() *Context { GraphQL: &graphQLContext{}, Key: &keyContext{}, Method: &methodContext{}, + Mode: &ModeContext{}, + Now: &nowContext{}, Timeout: &timeoutContext{}, } } @@ -40,11 +44,13 @@ func (c *Context) Init(co configurationtypes.AbstractConfigurationInterface) { c.GraphQL.SetupContext(co) c.Key.SetupContext(co) c.Method.SetupContext(co) + c.Mode.SetupContext(co) + c.Now.SetupContext(co) c.Timeout.SetupContext(co) } func (c *Context) SetBaseContext(req *http.Request) *http.Request { - return c.Timeout.SetContext(c.Method.SetContext(c.CacheName.SetContext(req))) + return c.Mode.SetContext(c.Timeout.SetContext(c.Method.SetContext(c.CacheName.SetContext(c.Now.SetContext(req))))) } func (c *Context) SetContext(req *http.Request) *http.Request { diff --git a/plugins/traefik/vendor/github.com/darkweak/souin/pkg/api/main.go b/plugins/traefik/vendor/github.com/darkweak/souin/pkg/api/main.go index a5d66938a..157fd4b67 100644 --- a/plugins/traefik/vendor/github.com/darkweak/souin/pkg/api/main.go +++ b/plugins/traefik/vendor/github.com/darkweak/souin/pkg/api/main.go @@ -16,7 +16,7 @@ type MapHandler struct { // GenerateHandlerMap generate the MapHandler func GenerateHandlerMap( configuration configurationtypes.AbstractConfigurationInterface, - storer storage.Storer, + storers []storage.Storer, surrogateStorage providers.SurrogateInterface, ) *MapHandler { hm := make(map[string]http.HandlerFunc) @@ -28,7 +28,7 @@ func GenerateHandlerMap( basePathAPIS = "/souin-api" } - for _, endpoint := range Initialize(configuration, storer, surrogateStorage) { + for _, endpoint := range Initialize(configuration, storers, surrogateStorage) { if endpoint.IsEnabled() { shouldEnable = true if e, ok := endpoint.(*SouinAPI); ok { @@ -45,6 +45,6 @@ func GenerateHandlerMap( } // Initialize contains all apis that should be enabled -func Initialize(c configurationtypes.AbstractConfigurationInterface, storer storage.Storer, surrogateStorage providers.SurrogateInterface) []EndpointInterface { - return []EndpointInterface{initializeSouin(c, storer, surrogateStorage)} +func Initialize(c configurationtypes.AbstractConfigurationInterface, storers []storage.Storer, surrogateStorage providers.SurrogateInterface) []EndpointInterface { + return []EndpointInterface{initializeSouin(c, storers, surrogateStorage)} } diff --git a/plugins/traefik/vendor/github.com/darkweak/souin/pkg/api/souin.go b/plugins/traefik/vendor/github.com/darkweak/souin/pkg/api/souin.go index 81c72f68d..5a5fb3279 100644 --- a/plugins/traefik/vendor/github.com/darkweak/souin/pkg/api/souin.go +++ b/plugins/traefik/vendor/github.com/darkweak/souin/pkg/api/souin.go @@ -5,6 +5,7 @@ import ( "fmt" "net/http" "regexp" + "strings" "github.com/darkweak/souin/configurationtypes" "github.com/darkweak/souin/pkg/storage" @@ -15,40 +16,73 @@ import ( type SouinAPI struct { basePath string enabled bool - storer storage.Storer + storers []storage.Storer surrogateStorage providers.SurrogateInterface + allowedMethods []string +} + +type invalidationType string + +const ( + uriInvalidationType invalidationType = "uri" + uriPrefixInvalidationType invalidationType = "uri-prefix" + originInvalidationType invalidationType = "origin" + groupInvalidationType invalidationType = "group" +) + +type invalidation struct { + Type invalidationType `json:"type"` + Selectors []string `json:"selectors"` + Groups []string `json:"groups"` + Purge bool `json:"purge"` } func initializeSouin( configuration configurationtypes.AbstractConfigurationInterface, - storer storage.Storer, + storers []storage.Storer, surrogateStorage providers.SurrogateInterface, ) *SouinAPI { basePath := configuration.GetAPI().Souin.BasePath if basePath == "" { basePath = "/souin" } + + allowedMethods := configuration.GetDefaultCache().GetAllowedHTTPVerbs() + if len(allowedMethods) == 0 { + allowedMethods = []string{http.MethodGet, http.MethodHead} + } + return &SouinAPI{ basePath, configuration.GetAPI().Souin.Enable, - storer, + storers, surrogateStorage, + allowedMethods, } } // BulkDelete allow user to delete multiple items with regexp func (s *SouinAPI) BulkDelete(key string) { - s.storer.DeleteMany(key) + for _, current := range s.storers { + current.DeleteMany(key) + } } // Delete will delete a record into the provider cache system and will update the Souin API if enabled func (s *SouinAPI) Delete(key string) { - s.storer.Delete(key) + for _, current := range s.storers { + current.Delete(key) + } } // GetAll will retrieve all stored keys in the provider func (s *SouinAPI) GetAll() []string { - return s.storer.ListKeys() + keys := []string{} + for _, current := range s.storers { + keys = append(keys, current.ListKeys()...) + } + + return keys } // GetBasePath will return the basepath for this resource @@ -94,13 +128,87 @@ func (s *SouinAPI) HandleRequest(w http.ResponseWriter, r *http.Request) { res, _ = json.Marshal(s.GetAll()) } w.Header().Set("Content-Type", "application/json") + case http.MethodPost: + var invalidator invalidation + err := json.NewDecoder(r.Body).Decode(&invalidator) + if err != nil { + w.WriteHeader(http.StatusBadRequest) + return + } + + keysToInvalidate := []string{} + switch invalidator.Type { + case groupInvalidationType: + keysToInvalidate, _ = s.surrogateStorage.Purge(http.Header{"Surrogate-Key": invalidator.Groups}) + case uriPrefixInvalidationType, uriInvalidationType: + bodyKeys := []string{} + listedKeys := s.GetAll() + for _, k := range invalidator.Selectors { + if !strings.Contains(k, "//") { + rq, err := http.NewRequest(http.MethodGet, "//"+k, nil) + if err != nil { + continue + } + + bodyKeys = append(bodyKeys, rq.Host+"-"+rq.URL.Path) + } + } + + for _, allKey := range listedKeys { + for _, bk := range bodyKeys { + if invalidator.Type == uriInvalidationType { + if strings.Contains(allKey, bk) && strings.Contains(allKey, bk+"-") && strings.HasSuffix(allKey, bk) { + keysToInvalidate = append(keysToInvalidate, allKey) + break + } + } else { + if strings.Contains(allKey, bk) && + (strings.Contains(allKey, bk+"-") || strings.Contains(allKey, bk+"?") || strings.Contains(allKey, bk+"/") || strings.HasSuffix(allKey, bk)) { + keysToInvalidate = append(keysToInvalidate, allKey) + break + } + } + } + } + case originInvalidationType: + bodyKeys := []string{} + listedKeys := s.GetAll() + for _, k := range invalidator.Selectors { + if !strings.Contains(k, "//") { + rq, err := http.NewRequest(http.MethodGet, "//"+k, nil) + if err != nil { + continue + } + + bodyKeys = append(bodyKeys, rq.Host) + } + } + + for _, allKey := range listedKeys { + for _, bk := range bodyKeys { + if strings.Contains(allKey, bk) { + keysToInvalidate = append(keysToInvalidate, allKey) + break + } + } + } + } + + for _, k := range keysToInvalidate { + for _, current := range s.storers { + current.Delete(k) + } + } + w.WriteHeader(http.StatusOK) case "PURGE": if compile { keysRg := regexp.MustCompile(s.GetBasePath() + "/(.+)") flushRg := regexp.MustCompile(s.GetBasePath() + "/flush$") if flushRg.FindString(r.RequestURI) != "" { - s.storer.DeleteMany(".+") + for _, current := range s.storers { + current.DeleteMany(".+") + } e := s.surrogateStorage.Destruct() if e != nil { fmt.Printf("Error while purging the surrogate keys: %+v.", e) @@ -113,7 +221,9 @@ func (s *SouinAPI) HandleRequest(w http.ResponseWriter, r *http.Request) { } else { ck, _ := s.surrogateStorage.Purge(r.Header) for _, k := range ck { - s.storer.Delete(k) + for _, current := range s.storers { + current.Delete(k) + } } } w.WriteHeader(http.StatusNoContent) diff --git a/plugins/traefik/vendor/github.com/darkweak/souin/pkg/middleware/middleware.go b/plugins/traefik/vendor/github.com/darkweak/souin/pkg/middleware/middleware.go index 1983a404b..d4feddd91 100644 --- a/plugins/traefik/vendor/github.com/darkweak/souin/pkg/middleware/middleware.go +++ b/plugins/traefik/vendor/github.com/darkweak/souin/pkg/middleware/middleware.go @@ -1,8 +1,9 @@ package middleware import ( - "bufio" "bytes" + baseCtx "context" + "errors" "fmt" "io" "net/http" @@ -16,7 +17,6 @@ import ( "github.com/darkweak/souin/context" "github.com/darkweak/souin/helpers" "github.com/darkweak/souin/pkg/api" - "github.com/darkweak/souin/pkg/api/prometheus" "github.com/darkweak/souin/pkg/rfc" "github.com/darkweak/souin/pkg/storage" "github.com/darkweak/souin/pkg/surrogate" @@ -25,14 +25,12 @@ import ( ) func NewHTTPCacheHandler(c configurationtypes.AbstractConfigurationInterface) *SouinBaseHandler { - storer, err := storage.NewStorage(c) + storers, err := storage.NewStorages(c) if err != nil { panic(err) } - fmt.Println("Storer initialized.") regexpUrls := helpers.InitializeRegexp(c) surrogateStorage := surrogate.InitializeSurrogate(c) - fmt.Println("Surrogate storage initialized.") var excludedRegexp *regexp.Regexp = nil if c.GetDefaultCache().GetRegex().Exclude != "" { excludedRegexp = regexp.MustCompile(c.GetDefaultCache().GetRegex().Exclude) @@ -51,24 +49,25 @@ func NewHTTPCacheHandler(c configurationtypes.AbstractConfigurationInterface) *S Headers: c.GetDefaultCache().GetHeaders(), DefaultCacheControl: c.GetDefaultCache().GetDefaultCacheControl(), } - fmt.Println("Souin configuration is now loaded.") return &SouinBaseHandler{ Configuration: c, - Storer: storer, - InternalEndpointHandlers: api.GenerateHandlerMap(c, storer, surrogateStorage), + Storers: storers, + InternalEndpointHandlers: api.GenerateHandlerMap(c, storers, surrogateStorage), ExcludeRegex: excludedRegexp, RegexpUrls: regexpUrls, DefaultMatchedUrl: defaultMatchedUrl, SurrogateKeyStorer: surrogateStorage, context: ctx, bufPool: bufPool, + storersLen: len(storers), } } type SouinBaseHandler struct { Configuration configurationtypes.AbstractConfigurationInterface Storer storage.Storer + Storers []storage.Storer InternalEndpointHandlers *api.MapHandler ExcludeRegex *regexp.Regexp RegexpUrls regexp.Regexp @@ -77,6 +76,7 @@ type SouinBaseHandler struct { DefaultMatchedUrl configurationtypes.URL context *context.Context bufPool *sync.Pool + storersLen int } type upsreamError struct{} @@ -85,27 +85,62 @@ func (upsreamError) Error() string { return "Upstream error" } -func (s *SouinBaseHandler) Upstream( +func isCacheableCode(code int) bool { + switch code { + case 200, 203, 204, 206, 300, 301, 404, 405, 410, 414, 501: + return true + } + + return false +} + +func canBypassAuthorizationRestriction(headers http.Header, bypassed []string) bool { + for _, header := range bypassed { + if strings.ToLower(header) == "authorization" { + return true + } + } + + return strings.Contains(strings.ToLower(headers.Get("Vary")), "authorization") +} + +func (s *SouinBaseHandler) Store( customWriter *CustomWriter, rq *http.Request, - next handlerFunc, requestCc *cacheobject.RequestCacheDirectives, cachedKey string, ) error { - now := time.Now().UTC() - rq.Header.Set("Date", now.Format(time.RFC1123)) - prometheus.Increment(prometheus.RequestCounter) - if err := next(customWriter, rq); err != nil { - customWriter.Header().Set("Cache-Status", fmt.Sprintf("%s; fwd=uri-miss; key=%s; detail=SERVE-HTTP-ERROR", rq.Context().Value(context.CacheName), rfc.GetCacheKeyFromCtx(rq.Context()))) - return err + if !isCacheableCode(customWriter.statusCode) { + customWriter.Headers.Set("Cache-Status", fmt.Sprintf("%s; fwd=uri-miss; key=%s; detail=UNCACHEABLE-STATUS-CODE", rq.Context().Value(context.CacheName), rfc.GetCacheKeyFromCtx(rq.Context()))) + + switch customWriter.statusCode { + case 500, 502, 503, 504: + return new(upsreamError) + } + + return nil } - switch customWriter.statusCode { - case 500, 502, 503, 504: - return new(upsreamError) + if customWriter.Header().Get("Cache-Control") == "" { + // TODO see with @mnot if mandatory to not store the response when no Cache-Control given. + // if s.DefaultMatchedUrl.DefaultCacheControl == "" { + // customWriter.Headers.Set("Cache-Status", fmt.Sprintf("%s; fwd=uri-miss; key=%s; detail=EMPTY-RESPONSE-CACHE-CONTROL", rq.Context().Value(context.CacheName), rfc.GetCacheKeyFromCtx(rq.Context()))) + // return nil + // } + customWriter.Header().Set("Cache-Control", s.DefaultMatchedUrl.DefaultCacheControl) } responseCc, _ := cacheobject.ParseResponseCacheControl(customWriter.Header().Get("Cache-Control")) + if responseCc == nil { + customWriter.Headers.Set("Cache-Status", fmt.Sprintf("%s; fwd=uri-miss; key=%s; detail=INVALID-RESPONSE-CACHE-CONTROL", rq.Context().Value(context.CacheName), rfc.GetCacheKeyFromCtx(rq.Context()))) + return nil + } + + modeContext := rq.Context().Value(context.Mode).(*context.ModeContext) + if !modeContext.Bypass_request && (responseCc.PrivatePresent || rq.Header.Get("Authorization") != "") && !canBypassAuthorizationRestriction(customWriter.Header(), rq.Context().Value(context.IgnoredHeaders).([]string)) { + customWriter.Headers.Set("Cache-Status", fmt.Sprintf("%s; fwd=uri-miss; key=%s; detail=PRIVATE-OR-AUTHENTICATED-RESPONSE", rq.Context().Value(context.CacheName), rfc.GetCacheKeyFromCtx(rq.Context()))) + return nil + } currentMatchedURL := s.DefaultMatchedUrl if regexpURL := s.RegexpUrls.FindString(rq.Host + rq.URL.Path); regexpURL != "" { @@ -119,39 +154,80 @@ func (s *SouinBaseHandler) Upstream( } ma := currentMatchedURL.TTL.Duration - if responseCc.MaxAge > 0 { - ma = time.Duration(responseCc.MaxAge) * time.Second - } else if responseCc.SMaxAge > 0 { + if responseCc.SMaxAge >= 0 { ma = time.Duration(responseCc.SMaxAge) * time.Second + } else if responseCc.MaxAge >= 0 { + ma = time.Duration(responseCc.MaxAge) * time.Second } if ma > currentMatchedURL.TTL.Duration { ma = currentMatchedURL.TTL.Duration } + + now := rq.Context().Value(context.Now).(time.Time) date, _ := http.ParseTime(now.Format(http.TimeFormat)) customWriter.Headers.Set(rfc.StoredTTLHeader, ma.String()) ma = ma - time.Since(date) + if exp := customWriter.Header().Get("Expires"); exp != "" { + delta, _ := time.Parse(exp, time.RFC1123) + if sub := delta.Sub(now); sub > 0 { + ma = sub + } + } + status := fmt.Sprintf("%s; fwd=uri-miss", rq.Context().Value(context.CacheName)) - if !requestCc.NoStore && !responseCc.NoStore { + if (modeContext.Bypass_request || !requestCc.NoStore) && + (modeContext.Bypass_response || !responseCc.NoStore) { + headers := customWriter.Headers.Clone() + for hname, shouldDelete := range responseCc.NoCache { + if shouldDelete { + headers.Del(hname) + } + } res := http.Response{ StatusCode: customWriter.statusCode, Body: io.NopCloser(bytes.NewBuffer(customWriter.Buf.Bytes())), - Header: customWriter.Headers, + Header: headers, } - res.Header.Set("Date", time.Now().UTC().Format(http.TimeFormat)) - res.Request = rq + if res.Header.Get("Date") == "" { + res.Header.Set("Date", now.Format(http.TimeFormat)) + } + if res.Header.Get("Content-Length") == "" { + res.Header.Set("Content-Length", fmt.Sprint(customWriter.Buf.Len())) + } + res.Header.Set(rfc.StoredLengthHeader, res.Header.Get("Content-Length")) response, err := httputil.DumpResponse(&res, true) if err == nil { variedHeaders := rfc.HeaderAllCommaSepValues(res.Header) cachedKey += rfc.GetVariedCacheKey(rq, variedHeaders) - if s.Storer.Set(cachedKey, response, currentMatchedURL, ma) == nil { + + var wg sync.WaitGroup + mu := sync.Mutex{} + fails := []string{} + for _, storer := range s.Storers { + wg.Add(1) + go func(currentStorer storage.Storer) { + defer wg.Done() + if currentStorer.Set(cachedKey, response, currentMatchedURL, ma) == nil { + } else { + mu.Lock() + fails = append(fails, fmt.Sprintf("; detail=%s-INSERTION-ERROR", currentStorer.Name())) + mu.Unlock() + } + }(storer) + } + + wg.Wait() + if len(fails) < s.storersLen { go func(rs http.Response, key string) { _ = s.SurrogateKeyStorer.Store(&rs, key) }(res, cachedKey) status += "; stored" - } else { - status += "; detail=STORAGE-INSERTION-ERROR" + } + + if len(fails) > 0 { + status += strings.Join(fails, "") } } } else { @@ -162,6 +238,81 @@ func (s *SouinBaseHandler) Upstream( return nil } +func (s *SouinBaseHandler) Upstream( + customWriter *CustomWriter, + rq *http.Request, + next handlerFunc, + requestCc *cacheobject.RequestCacheDirectives, + cachedKey string, +) error { + if err := next(customWriter, rq); err != nil { + customWriter.Header().Set("Cache-Status", fmt.Sprintf("%s; fwd=uri-miss; key=%s; detail=SERVE-HTTP-ERROR", rq.Context().Value(context.CacheName), rfc.GetCacheKeyFromCtx(rq.Context()))) + return err + } + + s.SurrogateKeyStorer.Invalidate(rq.Method, customWriter.Header()) + if !isCacheableCode(customWriter.statusCode) { + customWriter.Headers.Set("Cache-Status", fmt.Sprintf("%s; fwd=uri-miss; key=%s; detail=UNCACHEABLE-STATUS-CODE", rq.Context().Value(context.CacheName), rfc.GetCacheKeyFromCtx(rq.Context()))) + + switch customWriter.statusCode { + case 500, 502, 503, 504: + return new(upsreamError) + } + + return nil + } + + if customWriter.Header().Get("Cache-Control") == "" { + // TODO see with @mnot if mandatory to not store the response when no Cache-Control given. + // if s.DefaultMatchedUrl.DefaultCacheControl == "" { + // customWriter.Headers.Set("Cache-Status", fmt.Sprintf("%s; fwd=uri-miss; key=%s; detail=EMPTY-RESPONSE-CACHE-CONTROL", rq.Context().Value(context.CacheName), rfc.GetCacheKeyFromCtx(rq.Context()))) + // return nil + // } + customWriter.Header().Set("Cache-Control", s.DefaultMatchedUrl.DefaultCacheControl) + } + + select { + case <-rq.Context().Done(): + return baseCtx.Canceled + default: + return s.Store(customWriter, rq, requestCc, cachedKey) + } +} + +func (s *SouinBaseHandler) Revalidate(validator *rfc.Revalidator, next handlerFunc, customWriter *CustomWriter, rq *http.Request, requestCc *cacheobject.RequestCacheDirectives, cachedKey string) error { + err := next(customWriter, rq) + s.SurrogateKeyStorer.Invalidate(rq.Method, customWriter.Header()) + + if err == nil { + if validator.IfUnmodifiedSincePresent && customWriter.statusCode != http.StatusNotModified { + customWriter.Buf.Reset() + for h, v := range customWriter.Headers { + if len(v) > 0 { + customWriter.Rw.Header().Set(h, strings.Join(v, ", ")) + } + } + customWriter.Rw.WriteHeader(http.StatusPreconditionFailed) + + return errors.New("") + } + + if customWriter.statusCode != http.StatusNotModified { + err = s.Store(customWriter, rq, requestCc, cachedKey) + } + } + + customWriter.Header().Set( + "Cache-Status", + fmt.Sprintf( + "%s; fwd=request; fwd-status=%d; key=%s; detail=REQUEST-REVALIDATION", + rq.Context().Value(context.CacheName), + customWriter.statusCode, + rfc.GetCacheKeyFromCtx(rq.Context()), + ), + ) + return err +} + func (s *SouinBaseHandler) HandleInternally(r *http.Request) (bool, http.HandlerFunc) { if s.InternalEndpointHandlers != nil { for k, handler := range *s.InternalEndpointHandlers.Handlers { @@ -194,42 +345,106 @@ func (s *SouinBaseHandler) ServeHTTP(rw http.ResponseWriter, rq *http.Request, n if !rq.Context().Value(context.SupportedMethod).(bool) { rw.Header().Set("Cache-Status", cacheName+"; fwd=bypass; detail=UNSUPPORTED-METHOD") - return next(rw, rq) + err := next(rw, rq) + s.SurrogateKeyStorer.Invalidate(rq.Method, rw.Header()) + + return err } requestCc, coErr := cacheobject.ParseRequestCacheControl(rq.Header.Get("Cache-Control")) - if coErr != nil || requestCc == nil { + modeContext := rq.Context().Value(context.Mode).(*context.ModeContext) + if !modeContext.Bypass_request && (coErr != nil || requestCc == nil) { rw.Header().Set("Cache-Status", cacheName+"; fwd=bypass; detail=CACHE-CONTROL-EXTRACTION-ERROR") - return next(rw, rq) + err := next(rw, rq) + s.SurrogateKeyStorer.Invalidate(rq.Method, rw.Header()) + + return err } rq = s.context.SetContext(rq) + + // Yaegi sucks again, it considers false as true + isMutationRequest := rq.Context().Value(context.IsMutationRequest).(bool) + if isMutationRequest { + rw.Header().Set("Cache-Status", cacheName+"; fwd=bypass; detail=IS-MUTATION-REQUEST") + + err := next(rw, rq) + s.SurrogateKeyStorer.Invalidate(rq.Method, rw.Header()) + + return err + } cachedKey := rq.Context().Value(context.Key).(string) bufPool := s.bufPool.Get().(*bytes.Buffer) bufPool.Reset() defer s.bufPool.Put(bufPool) customWriter := NewCustomWriter(rq, rw, bufPool) - if !requestCc.NoCache { - cachedVal := s.Storer.Prefix(cachedKey, rq) - response, _ := http.ReadResponse(bufio.NewReader(bytes.NewBuffer(cachedVal)), rq) + go func(req *http.Request, crw *CustomWriter) { + <-req.Context().Done() + crw.mutex.Lock() + crw.headersSent = true + crw.mutex.Unlock() + }(rq, customWriter) + if modeContext.Bypass_request || !requestCc.NoCache { + validator := rfc.ParseRequest(rq) + var response *http.Response + for _, currentStorer := range s.Storers { + response = currentStorer.Prefix(cachedKey, rq, validator) + if response != nil { + break + } + } - if response != nil && rfc.ValidateCacheControl(response, requestCc) { - rfc.SetCacheStatusHeader(response) - if rfc.ValidateMaxAgeCachedResponse(requestCc, response) != nil { + if response != nil && (!modeContext.Strict || rfc.ValidateCacheControl(response, requestCc)) { + if validator.ResponseETag != "" && validator.Matched { + rfc.SetCacheStatusHeader(response) customWriter.Headers = response.Header + if validator.NotModified { + customWriter.statusCode = http.StatusNotModified + customWriter.Buf.Reset() + _, _ = customWriter.Send() + + return nil + } + customWriter.statusCode = response.StatusCode - io.Copy(customWriter.Buf, response.Body) - customWriter.Send() + _, _ = io.Copy(customWriter.Buf, response.Body) + _, _ = customWriter.Send() return nil } - } else if response == nil && (requestCc.MaxStaleSet || requestCc.MaxStale > -1) { - staleCachedVal := s.Storer.Prefix(storage.StalePrefix+cachedKey, rq) - response, _ = http.ReadResponse(bufio.NewReader(bytes.NewBuffer(staleCachedVal)), rq) - if nil != response && rfc.ValidateCacheControl(response, requestCc) { + + if validator.NeedRevalidation { + err := s.Revalidate(validator, next, customWriter, rq, requestCc, cachedKey) + _, _ = customWriter.Send() + + return err + } + if resCc, _ := cacheobject.ParseResponseCacheControl(response.Header.Get("Cache-Control")); resCc.NoCachePresent { + err := s.Revalidate(validator, next, customWriter, rq, requestCc, cachedKey) + _, _ = customWriter.Send() + + return err + } + rfc.SetCacheStatusHeader(response) + if !modeContext.Strict || rfc.ValidateMaxAgeCachedResponse(requestCc, response) != nil { + customWriter.Headers = response.Header + customWriter.statusCode = response.StatusCode + _, _ = io.Copy(customWriter.Buf, response.Body) + _, err := customWriter.Send() + + return err + } + } else if response == nil && !requestCc.OnlyIfCached && (requestCc.MaxStaleSet || requestCc.MaxStale > -1) { + for _, currentStorer := range s.Storers { + response = currentStorer.Prefix(storage.StalePrefix+cachedKey, rq, validator) + if response != nil { + break + } + } + if nil != response && (!modeContext.Strict || rfc.ValidateCacheControl(response, requestCc)) { addTime, _ := time.ParseDuration(response.Header.Get(rfc.StoredTTLHeader)) rfc.SetCacheStatusHeader(response) @@ -238,44 +453,101 @@ func (s *SouinBaseHandler) ServeHTTP(rw http.ResponseWriter, rq *http.Request, n customWriter.Headers = response.Header customWriter.statusCode = response.StatusCode rfc.HitStaleCache(&response.Header) - io.Copy(customWriter.Buf, response.Body) - customWriter.Send() + _, _ = io.Copy(customWriter.Buf, response.Body) + _, err := customWriter.Send() customWriter = NewCustomWriter(rq, rw, bufPool) - go s.Upstream(customWriter, rq, next, requestCc, cachedKey) + go func(v *rfc.Revalidator, goCw *CustomWriter, goRq *http.Request, goNext func(http.ResponseWriter, *http.Request) error, goCc *cacheobject.RequestCacheDirectives, goCk string) { + _ = s.Revalidate(v, goNext, goCw, goRq, goCc, goCk) + }(validator, customWriter, rq, next, requestCc, cachedKey) buf := s.bufPool.Get().(*bytes.Buffer) buf.Reset() defer s.bufPool.Put(buf) - return nil + return err } - if responseCc.StaleIfError > 0 && s.Upstream(customWriter, rq, next, requestCc, cachedKey) != nil { - customWriter.Headers = response.Header - customWriter.statusCode = response.StatusCode - rfc.HitStaleCache(&response.Header) - io.Copy(customWriter.Buf, response.Body) - customWriter.Send() - - return nil + if responseCc.MustRevalidate || responseCc.NoCachePresent || validator.NeedRevalidation { + rq.Header["If-None-Match"] = append(rq.Header["If-None-Match"], validator.ResponseETag) + err := s.Revalidate(validator, next, customWriter, rq, requestCc, cachedKey) + if err != nil { + if responseCc.StaleIfError > -1 || requestCc.StaleIfError > 0 { + code := fmt.Sprintf("; fwd-status=%d", customWriter.statusCode) + customWriter.Headers = response.Header + customWriter.statusCode = response.StatusCode + rfc.HitStaleCache(&response.Header) + response.Header.Set("Cache-Status", response.Header.Get("Cache-Status")+code) + _, _ = io.Copy(customWriter.Buf, response.Body) + _, err := customWriter.Send() + + return err + } + rw.WriteHeader(http.StatusGatewayTimeout) + customWriter.Buf.Reset() + _, err := customWriter.Send() + + return err + } + + if customWriter.statusCode == http.StatusNotModified { + if !validator.Matched { + rfc.SetCacheStatusHeader(response) + customWriter.statusCode = response.StatusCode + customWriter.Headers = response.Header + _, _ = io.Copy(customWriter.Buf, response.Body) + _, _ = customWriter.Send() + + return err + } + } + + if customWriter.statusCode != http.StatusNotModified && validator.Matched { + customWriter.statusCode = http.StatusNotModified + customWriter.Buf.Reset() + _, _ = customWriter.Send() + + return err + } + + _, _ = customWriter.Send() + + return err } if rfc.ValidateMaxAgeCachedStaleResponse(requestCc, response, int(addTime.Seconds())) != nil { customWriter.Headers = response.Header customWriter.statusCode = response.StatusCode rfc.HitStaleCache(&response.Header) - io.Copy(customWriter.Buf, response.Body) - customWriter.Send() + _, _ = io.Copy(customWriter.Buf, response.Body) + _, err := customWriter.Send() - return nil + return err } } } } - if err := s.Upstream(customWriter, rq, next, requestCc, cachedKey); err != nil { - return err + errorCacheCh := make(chan error) + go func() { + errorCacheCh <- s.Upstream(customWriter, rq, next, requestCc, cachedKey) + }() + + select { + case <-rq.Context().Done(): + switch rq.Context().Err() { + case baseCtx.DeadlineExceeded: + customWriter.WriteHeader(http.StatusGatewayTimeout) + rw.Header().Set("Cache-Status", cacheName+"; fwd=bypass; detail=DEADLINE-EXCEEDED") + _, _ = customWriter.Rw.Write([]byte("Internal server error")) + return baseCtx.DeadlineExceeded + case baseCtx.Canceled: + return baseCtx.Canceled + default: + return nil + } + case v := <-errorCacheCh: + if v == nil { + _, _ = customWriter.Send() + } + return v } - - _, _ = customWriter.Send() - return nil } diff --git a/plugins/traefik/vendor/github.com/darkweak/souin/pkg/storage/abstractProvider.go b/plugins/traefik/vendor/github.com/darkweak/souin/pkg/storage/abstractProvider.go index 00f325f60..54578aa68 100644 --- a/plugins/traefik/vendor/github.com/darkweak/souin/pkg/storage/abstractProvider.go +++ b/plugins/traefik/vendor/github.com/darkweak/souin/pkg/storage/abstractProvider.go @@ -7,6 +7,7 @@ import ( "time" "github.com/darkweak/souin/configurationtypes" + "github.com/darkweak/souin/pkg/rfc" ) const ( @@ -19,19 +20,21 @@ const ( type Storer interface { ListKeys() []string - Prefix(key string, req *http.Request) []byte + Prefix(key string, req *http.Request, validator *rfc.Revalidator) *http.Response Get(key string) []byte Set(key string, value []byte, url configurationtypes.URL, duration time.Duration) error Delete(key string) DeleteMany(key string) Init() error + Name() string Reset() error } type StorerInstanciator func(configurationtypes.AbstractConfigurationInterface) (Storer, error) -func NewStorage(configuration configurationtypes.AbstractConfigurationInterface) (Storer, error) { - return CacheConnectionFactory(configuration) +func NewStorages(configuration configurationtypes.AbstractConfigurationInterface) ([]Storer, error) { + s, err := CacheConnectionFactory(configuration) + return []Storer{s}, err } func varyVoter(baseKey string, req *http.Request, currentKey string) bool { diff --git a/plugins/traefik/vendor/github.com/darkweak/souin/pkg/storage/cacheProvider.go b/plugins/traefik/vendor/github.com/darkweak/souin/pkg/storage/cacheProvider.go index c590f355a..c66ab8948 100644 --- a/plugins/traefik/vendor/github.com/darkweak/souin/pkg/storage/cacheProvider.go +++ b/plugins/traefik/vendor/github.com/darkweak/souin/pkg/storage/cacheProvider.go @@ -1,6 +1,8 @@ package storage import ( + "bufio" + "bytes" "net/http" "regexp" "strings" @@ -8,6 +10,7 @@ import ( "github.com/akyoto/cache" t "github.com/darkweak/souin/configurationtypes" + "github.com/darkweak/souin/pkg/rfc" ) // Cache provider type @@ -22,6 +25,11 @@ func CacheConnectionFactory(c t.AbstractConfigurationInterface) (Storer, error) return &Cache{Cache: provider, stale: c.GetDefaultCache().GetStale()}, nil } +// Name returns the storer name +func (provider *Cache) Name() string { + return "CACHE" +} + // ListKeys method returns the list of existing keys func (provider *Cache) ListKeys() []string { var keys []string @@ -45,22 +53,23 @@ func (provider *Cache) Get(key string) []byte { } // Prefix method returns the populated response if exists, empty response then -func (provider *Cache) Prefix(key string, req *http.Request) []byte { - var result []byte +func (provider *Cache) Prefix(key string, req *http.Request, validator *rfc.Revalidator) *http.Response { + var result *http.Response provider.Cache.Range(func(k, v interface{}) bool { - if k == key { - result = v.([]byte) - return false - } - if !strings.HasPrefix(k.(string), key) { return true } - if varyVoter(key, req, k.(string)) { - result = v.([]byte) - return false + if k == key || varyVoter(key, req, k.(string)) { + if res, err := http.ReadResponse(bufio.NewReader(bytes.NewBuffer(v.([]byte))), req); err == nil { + rfc.ValidateETag(res, validator) + if validator.Matched { + result = res + return false + } + } + return true } return true diff --git a/plugins/traefik/vendor/github.com/darkweak/souin/pkg/surrogate/providers/common.go b/plugins/traefik/vendor/github.com/darkweak/souin/pkg/surrogate/providers/common.go index 3c3051621..d6869434c 100644 --- a/plugins/traefik/vendor/github.com/darkweak/souin/pkg/surrogate/providers/common.go +++ b/plugins/traefik/vendor/github.com/darkweak/souin/pkg/surrogate/providers/common.go @@ -31,6 +31,15 @@ func (s *baseStorage) ParseHeaders(value string) []string { return regexp.MustCompile(s.parent.getHeaderSeparator()+" *").Split(value, -1) } +func isSafeHTTPMethod(method string) bool { + switch method { + case http.MethodGet, http.MethodHead, http.MethodOptions, http.MethodTrace: + return true + } + + return false +} + func uniqueTag(values []string) []string { tmp := make(map[string]bool) list := []string{} @@ -204,6 +213,15 @@ func (s *baseStorage) Purge(header http.Header) (cacheKeys []string, surrogateKe return uniqueTag(toInvalidate), surrogates } +// Invalidate the grouped responses from the Cache-Group-Invalidation HTTP response header +func (s *baseStorage) Invalidate(method string, headers http.Header) { + if !isSafeHTTPMethod(method) { + for _, group := range headers["Cache-Group-Invalidation"] { + s.purgeTag(group) + } + } +} + // List returns the stored keys associated to resources func (s *baseStorage) List() map[string]string { return s.Storage diff --git a/plugins/traefik/vendor/github.com/darkweak/souin/pkg/surrogate/providers/types.go b/plugins/traefik/vendor/github.com/darkweak/souin/pkg/surrogate/providers/types.go index f2b05f3a5..52f95cd68 100644 --- a/plugins/traefik/vendor/github.com/darkweak/souin/pkg/surrogate/providers/types.go +++ b/plugins/traefik/vendor/github.com/darkweak/souin/pkg/surrogate/providers/types.go @@ -13,6 +13,7 @@ type SurrogateInterface interface { getSurrogateControl(http.Header) string getSurrogateKey(http.Header) string Purge(http.Header) (cacheKeys []string, surrogateKeys []string) + Invalidate(method string, h http.Header) purgeTag(string) []string Store(*http.Response, string) error storeTag(string, string, *regexp.Regexp)