2023-10-24 22:52:33 +02:00
|
|
|
package cache
|
|
|
|
|
|
|
|
import (
|
2023-11-29 11:10:29 +01:00
|
|
|
"context"
|
|
|
|
"fmt"
|
2023-10-24 22:52:33 +02:00
|
|
|
"net/url"
|
|
|
|
"strconv"
|
|
|
|
"time"
|
|
|
|
|
|
|
|
"forge.cadoles.com/arcad/edge/pkg/storage"
|
|
|
|
"forge.cadoles.com/arcad/edge/pkg/storage/driver"
|
2023-11-29 11:10:29 +01:00
|
|
|
"github.com/allegro/bigcache/v3"
|
2023-10-24 22:52:33 +02:00
|
|
|
"github.com/pkg/errors"
|
2023-11-29 11:10:29 +01:00
|
|
|
"gitlab.com/wpetit/goweb/logger"
|
2023-10-24 22:52:33 +02:00
|
|
|
)
|
|
|
|
|
|
|
|
func init() {
|
|
|
|
driver.RegisterBlobStoreFactory("cache", blobStoreFactory)
|
|
|
|
}
|
|
|
|
|
|
|
|
func blobStoreFactory(dsn *url.URL) (storage.BlobStore, error) {
|
|
|
|
query := dsn.Query()
|
|
|
|
|
2023-11-29 11:10:29 +01:00
|
|
|
rawDriver := query.Get("driver")
|
|
|
|
if rawDriver == "" {
|
|
|
|
return nil, errors.New("missing required url parameter 'driver'")
|
2023-10-24 22:52:33 +02:00
|
|
|
}
|
|
|
|
|
2023-11-29 11:10:29 +01:00
|
|
|
query.Del("driver")
|
2023-10-24 22:52:33 +02:00
|
|
|
|
2023-11-30 19:09:51 +01:00
|
|
|
blobStoreOptionFuncs := make([]OptionFunc, 0)
|
2023-10-24 22:52:33 +02:00
|
|
|
|
|
|
|
rawCacheTTL := query.Get("cacheTTL")
|
2023-11-29 11:10:29 +01:00
|
|
|
if rawCacheTTL != "" {
|
|
|
|
query.Del("cacheTTL")
|
2023-10-24 22:52:33 +02:00
|
|
|
|
2023-11-29 11:10:29 +01:00
|
|
|
ttl, err := time.ParseDuration(rawCacheTTL)
|
|
|
|
if err != nil {
|
|
|
|
return nil, errors.Wrap(err, "could not parse url parameter 'cacheTTL'")
|
|
|
|
}
|
|
|
|
|
2023-11-30 19:09:51 +01:00
|
|
|
blobStoreOptionFuncs = append(blobStoreOptionFuncs, WithCacheTTL(ttl))
|
2023-10-24 22:52:33 +02:00
|
|
|
}
|
|
|
|
|
2023-11-30 19:09:51 +01:00
|
|
|
rawCacheShards := query.Get("blobCacheShards")
|
2023-11-29 11:10:29 +01:00
|
|
|
if rawCacheShards != "" {
|
2023-11-30 19:09:51 +01:00
|
|
|
query.Del("blobCacheShards")
|
2023-11-29 11:10:29 +01:00
|
|
|
|
|
|
|
cacheShards, err := strconv.ParseInt(rawCacheShards, 10, 32)
|
|
|
|
if err != nil {
|
2023-11-30 19:09:51 +01:00
|
|
|
return nil, errors.Wrap(err, "could not parse url parameter 'blobCacheShards'")
|
|
|
|
}
|
|
|
|
|
|
|
|
blobStoreOptionFuncs = append(blobStoreOptionFuncs, WithBlobCacheShards(int(cacheShards)))
|
|
|
|
}
|
|
|
|
|
|
|
|
rawBlobCacheMaxMemorySize := query.Get("blobCacheMaxMemorySize")
|
|
|
|
if rawBlobCacheMaxMemorySize != "" {
|
|
|
|
query.Del("blobCacheMaxMemorySize")
|
|
|
|
|
|
|
|
blobCacheMaxMemorySize, err := strconv.ParseInt(rawBlobCacheMaxMemorySize, 10, 32)
|
|
|
|
if err != nil {
|
|
|
|
return nil, errors.Wrap(err, "could not parse url parameter 'blobCacheMaxMemorySize'")
|
2023-11-29 11:10:29 +01:00
|
|
|
}
|
|
|
|
|
2023-11-30 19:09:51 +01:00
|
|
|
blobStoreOptionFuncs = append(blobStoreOptionFuncs, WithBlobCacheMaxMemorySize(int(blobCacheMaxMemorySize)))
|
2023-10-24 22:52:33 +02:00
|
|
|
}
|
|
|
|
|
2023-11-30 19:09:51 +01:00
|
|
|
rawBlobBucketCacheSize := query.Get("blobBucketCacheSize")
|
|
|
|
if rawBlobBucketCacheSize != "" {
|
|
|
|
query.Del("blobBucketCacheSize")
|
2023-11-29 11:10:29 +01:00
|
|
|
|
2023-11-30 19:09:51 +01:00
|
|
|
blobBucketCacheSize, err := strconv.ParseInt(rawBlobBucketCacheSize, 10, 32)
|
2023-11-29 11:10:29 +01:00
|
|
|
if err != nil {
|
2023-11-30 19:09:51 +01:00
|
|
|
return nil, errors.Wrap(err, "could not parse url parameter 'blobBucketCacheSize'")
|
2023-11-29 11:10:29 +01:00
|
|
|
}
|
|
|
|
|
2023-11-30 19:09:51 +01:00
|
|
|
blobStoreOptionFuncs = append(blobStoreOptionFuncs, WithBucketCacheSize(int(blobBucketCacheSize)))
|
|
|
|
}
|
2023-11-29 11:10:29 +01:00
|
|
|
|
2023-11-30 19:09:51 +01:00
|
|
|
rawBlobInfoCacheSize := query.Get("blobInfoCacheSize")
|
|
|
|
if rawBlobInfoCacheSize != "" {
|
|
|
|
query.Del("blobInfoCacheSize")
|
|
|
|
|
|
|
|
blobInfoCacheSize, err := strconv.ParseInt(rawBlobInfoCacheSize, 10, 32)
|
|
|
|
if err != nil {
|
|
|
|
return nil, errors.Wrap(err, "could not parse url parameter 'blobInfoCacheSize'")
|
2023-11-29 11:10:29 +01:00
|
|
|
}
|
|
|
|
|
2023-11-30 19:09:51 +01:00
|
|
|
blobStoreOptionFuncs = append(blobStoreOptionFuncs, WithBlobInfoCacheSize(int(blobInfoCacheSize)))
|
2023-11-29 11:10:29 +01:00
|
|
|
}
|
2023-10-24 22:52:33 +02:00
|
|
|
|
|
|
|
url := &url.URL{
|
|
|
|
Scheme: rawDriver,
|
|
|
|
Host: dsn.Host,
|
|
|
|
Path: dsn.Path,
|
|
|
|
RawQuery: query.Encode(),
|
|
|
|
}
|
|
|
|
|
2023-11-30 19:09:51 +01:00
|
|
|
backend, err := driver.NewBlobStore(url.String())
|
2023-10-24 22:52:33 +02:00
|
|
|
if err != nil {
|
|
|
|
return nil, errors.WithStack(err)
|
|
|
|
}
|
|
|
|
|
2023-11-30 19:09:51 +01:00
|
|
|
store, err := NewBlobStore(backend, blobStoreOptionFuncs...)
|
2023-11-29 11:10:29 +01:00
|
|
|
if err != nil {
|
|
|
|
return nil, errors.WithStack(err)
|
|
|
|
}
|
|
|
|
|
2023-11-30 19:09:51 +01:00
|
|
|
return store, nil
|
2023-10-24 22:52:33 +02:00
|
|
|
}
|
2023-11-29 11:10:29 +01:00
|
|
|
|
|
|
|
type cacheLogger struct{}
|
|
|
|
|
|
|
|
func (l *cacheLogger) Printf(format string, v ...interface{}) {
|
|
|
|
logger.Debug(context.Background(), fmt.Sprintf(format, v...))
|
|
|
|
}
|
|
|
|
|
|
|
|
var _ bigcache.Logger = &cacheLogger{}
|