edge/pkg/storage/driver/cache/driver.go

120 lines
3.1 KiB
Go
Raw Normal View History

2023-10-24 22:52:33 +02:00
package cache
import (
"context"
"fmt"
2023-10-24 22:52:33 +02:00
"net/url"
"strconv"
"time"
"forge.cadoles.com/arcad/edge/pkg/storage"
"forge.cadoles.com/arcad/edge/pkg/storage/driver"
"github.com/allegro/bigcache/v3"
2023-10-24 22:52:33 +02:00
"github.com/pkg/errors"
"gitlab.com/wpetit/goweb/logger"
2023-10-24 22:52:33 +02:00
)
func init() {
driver.RegisterBlobStoreFactory("cache", blobStoreFactory)
}
func blobStoreFactory(dsn *url.URL) (storage.BlobStore, error) {
query := dsn.Query()
rawDriver := query.Get("driver")
if rawDriver == "" {
return nil, errors.New("missing required url parameter 'driver'")
2023-10-24 22:52:33 +02:00
}
query.Del("driver")
2023-10-24 22:52:33 +02:00
blobStoreOptionFuncs := make([]OptionFunc, 0)
2023-10-24 22:52:33 +02:00
rawCacheTTL := query.Get("cacheTTL")
if rawCacheTTL != "" {
query.Del("cacheTTL")
2023-10-24 22:52:33 +02:00
ttl, err := time.ParseDuration(rawCacheTTL)
if err != nil {
return nil, errors.Wrap(err, "could not parse url parameter 'cacheTTL'")
}
blobStoreOptionFuncs = append(blobStoreOptionFuncs, WithCacheTTL(ttl))
2023-10-24 22:52:33 +02:00
}
rawCacheShards := query.Get("blobCacheShards")
if rawCacheShards != "" {
query.Del("blobCacheShards")
cacheShards, err := strconv.ParseInt(rawCacheShards, 10, 32)
if err != nil {
return nil, errors.Wrap(err, "could not parse url parameter 'blobCacheShards'")
}
blobStoreOptionFuncs = append(blobStoreOptionFuncs, WithBlobCacheShards(int(cacheShards)))
}
rawBlobCacheMaxMemorySize := query.Get("blobCacheMaxMemorySize")
if rawBlobCacheMaxMemorySize != "" {
query.Del("blobCacheMaxMemorySize")
blobCacheMaxMemorySize, err := strconv.ParseInt(rawBlobCacheMaxMemorySize, 10, 32)
if err != nil {
return nil, errors.Wrap(err, "could not parse url parameter 'blobCacheMaxMemorySize'")
}
blobStoreOptionFuncs = append(blobStoreOptionFuncs, WithBlobCacheMaxMemorySize(int(blobCacheMaxMemorySize)))
2023-10-24 22:52:33 +02:00
}
rawBlobBucketCacheSize := query.Get("blobBucketCacheSize")
if rawBlobBucketCacheSize != "" {
query.Del("blobBucketCacheSize")
blobBucketCacheSize, err := strconv.ParseInt(rawBlobBucketCacheSize, 10, 32)
if err != nil {
return nil, errors.Wrap(err, "could not parse url parameter 'blobBucketCacheSize'")
}
blobStoreOptionFuncs = append(blobStoreOptionFuncs, WithBucketCacheSize(int(blobBucketCacheSize)))
}
rawBlobInfoCacheSize := query.Get("blobInfoCacheSize")
if rawBlobInfoCacheSize != "" {
query.Del("blobInfoCacheSize")
blobInfoCacheSize, err := strconv.ParseInt(rawBlobInfoCacheSize, 10, 32)
if err != nil {
return nil, errors.Wrap(err, "could not parse url parameter 'blobInfoCacheSize'")
}
blobStoreOptionFuncs = append(blobStoreOptionFuncs, WithBlobInfoCacheSize(int(blobInfoCacheSize)))
}
2023-10-24 22:52:33 +02:00
url := &url.URL{
Scheme: rawDriver,
Host: dsn.Host,
Path: dsn.Path,
RawQuery: query.Encode(),
}
backend, err := driver.NewBlobStore(url.String())
2023-10-24 22:52:33 +02:00
if err != nil {
return nil, errors.WithStack(err)
}
store, err := NewBlobStore(backend, blobStoreOptionFuncs...)
if err != nil {
return nil, errors.WithStack(err)
}
return store, nil
2023-10-24 22:52:33 +02:00
}
type cacheLogger struct{}
func (l *cacheLogger) Printf(format string, v ...interface{}) {
logger.Debug(context.Background(), fmt.Sprintf(format, v...))
}
var _ bigcache.Logger = &cacheLogger{}