package cache import ( "context" "fmt" "net/url" "strconv" "time" "forge.cadoles.com/arcad/edge/pkg/storage" "forge.cadoles.com/arcad/edge/pkg/storage/driver" "github.com/allegro/bigcache/v3" "github.com/pkg/errors" "gitlab.com/wpetit/goweb/logger" ) func init() { driver.RegisterBlobStoreFactory("cache", blobStoreFactory) } func blobStoreFactory(dsn *url.URL) (storage.BlobStore, error) { query := dsn.Query() rawDriver := query.Get("driver") if rawDriver == "" { return nil, errors.New("missing required url parameter 'driver'") } query.Del("driver") blobStoreOptionFuncs := make([]OptionFunc, 0) rawCacheTTL := query.Get("cacheTTL") if rawCacheTTL != "" { query.Del("cacheTTL") ttl, err := time.ParseDuration(rawCacheTTL) if err != nil { return nil, errors.Wrap(err, "could not parse url parameter 'cacheTTL'") } blobStoreOptionFuncs = append(blobStoreOptionFuncs, WithCacheTTL(ttl)) } rawCacheShards := query.Get("blobCacheShards") if rawCacheShards != "" { query.Del("blobCacheShards") cacheShards, err := strconv.ParseInt(rawCacheShards, 10, 32) if err != nil { return nil, errors.Wrap(err, "could not parse url parameter 'blobCacheShards'") } blobStoreOptionFuncs = append(blobStoreOptionFuncs, WithBlobCacheShards(int(cacheShards))) } rawBlobCacheMaxMemorySize := query.Get("blobCacheMaxMemorySize") if rawBlobCacheMaxMemorySize != "" { query.Del("blobCacheMaxMemorySize") blobCacheMaxMemorySize, err := strconv.ParseInt(rawBlobCacheMaxMemorySize, 10, 32) if err != nil { return nil, errors.Wrap(err, "could not parse url parameter 'blobCacheMaxMemorySize'") } blobStoreOptionFuncs = append(blobStoreOptionFuncs, WithBlobCacheMaxMemorySize(int(blobCacheMaxMemorySize))) } rawBlobBucketCacheSize := query.Get("blobBucketCacheSize") if rawBlobBucketCacheSize != "" { query.Del("blobBucketCacheSize") blobBucketCacheSize, err := strconv.ParseInt(rawBlobBucketCacheSize, 10, 32) if err != nil { return nil, errors.Wrap(err, "could not parse url parameter 'blobBucketCacheSize'") } blobStoreOptionFuncs = append(blobStoreOptionFuncs, WithBucketCacheSize(int(blobBucketCacheSize))) } rawBlobInfoCacheSize := query.Get("blobInfoCacheSize") if rawBlobInfoCacheSize != "" { query.Del("blobInfoCacheSize") blobInfoCacheSize, err := strconv.ParseInt(rawBlobInfoCacheSize, 10, 32) if err != nil { return nil, errors.Wrap(err, "could not parse url parameter 'blobInfoCacheSize'") } blobStoreOptionFuncs = append(blobStoreOptionFuncs, WithBlobInfoCacheSize(int(blobInfoCacheSize))) } url := &url.URL{ Scheme: rawDriver, Host: dsn.Host, Path: dsn.Path, RawQuery: query.Encode(), } backend, err := driver.NewBlobStore(url.String()) if err != nil { return nil, errors.WithStack(err) } store, err := NewBlobStore(backend, blobStoreOptionFuncs...) if err != nil { return nil, errors.WithStack(err) } return store, nil } type cacheLogger struct{} func (l *cacheLogger) Printf(format string, v ...interface{}) { logger.Debug(context.Background(), fmt.Sprintf(format, v...)) } var _ bigcache.Logger = &cacheLogger{}