Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 2 additions & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,8 @@ path match:
- [ ] epoch based retry & loadbalancing
- [ ] modularize
- [ ] easyjson & msgp

- [ ] consider https://dgraph.io/blog/post/introducing-ristretto-high-perf-go-cache/
- [ ] replace logrus with zerolog

# ref

Expand Down
156 changes: 0 additions & 156 deletions cache_manager.go

This file was deleted.

19 changes: 7 additions & 12 deletions cache.go → caching/bigcache.go
Original file line number Diff line number Diff line change
@@ -1,19 +1,14 @@
package main
package caching

import (
"encoding/binary"
"errors"
"github.com/allegro/bigcache"
log "github.com/sirupsen/logrus"
"github.com/revolution1/jsonrpc-proxy/fnv64"
"github.com/sirupsen/logrus"
"time"
)

type ProxyCache interface {
Set(key string, val []byte, ttl time.Duration) error
Get(key string) []byte
Clear() error
}

type BigCacheTTL struct {
*bigcache.BigCache
}
Expand All @@ -26,9 +21,9 @@ func NewBigCacheTTL(maxTTL, cleanWindow time.Duration, maxSizeMb int) *BigCacheT
MaxEntriesInWindow: 1000 * 10 * 60,
MaxEntrySize: 500,
Verbose: true,
Hasher: fnv64a{},
Hasher: fnv64.Fnv64a{},
HardMaxCacheSize: maxSizeMb,
Logger: log.StandardLogger(),
Logger: logrus.StandardLogger(),
})
if err != nil {
panic(err)
Expand Down Expand Up @@ -65,8 +60,8 @@ func (c *BigCacheTTL) Get(key string) []byte {
return val[8:]
}

func (c *BigCacheTTL) Clear() error {
return c.BigCache.Reset()
func (c *BigCacheTTL) Clear() {
_ = c.BigCache.Reset()
}

func (c *BigCacheTTL) Iterator() {}
25 changes: 20 additions & 5 deletions cache_test.go → caching/bigcache_test.go
Original file line number Diff line number Diff line change
@@ -1,23 +1,38 @@
package main
package caching

import (
"github.com/revolution1/jsonrpc-proxy/utils"
assertion "github.com/stretchr/testify/assert"
"strconv"
"testing"
"time"
)

func TestCacheValSize(t *testing.T) {
assert := assertion.New(t)
c := NewBigCacheTTL(time.Second, time.Second, 64)
for i := 1024 * 256; i < 1024*1024; i++ {
err := c.Set("key", utils.Blob('x', i), time.Millisecond)
assert.NoError(err)
_ = c.Delete("key")
if err != nil {
t.Log("size: ", i)
break
}
}
}

func TestBigCacheTTL(t *testing.T) {
assert := assertion.New(t)
c := NewBigCacheTTL(time.Second, time.Second)
c := NewBigCacheTTL(time.Second, time.Second, 64)
assert.Nil(c.Get("a"))
assert.NoError(c.Set("1", []byte("val"), time.Millisecond))
assert.Equal([]byte("val"), c.Get("1"))
time.Sleep(2 * time.Millisecond)
assert.Nil(c.Get("1"))
assert.NoError(c.Clear())
c.Clear()

c = NewBigCacheTTL(0, 0)
c = NewBigCacheTTL(0, 0, 64)
assert.NoError(c.Set("1", []byte("val"), time.Millisecond))
assert.Equal([]byte("val"), c.Get("1"))
time.Sleep(2 * time.Millisecond)
Expand All @@ -29,7 +44,7 @@ func TestBigCacheTTL(t *testing.T) {
}

func BenchmarkBigCacheTTL(b *testing.B) {
c := NewBigCacheTTL(time.Second, time.Second)
c := NewBigCacheTTL(time.Second, time.Second, 64)
for i := 0; i < b.N; i++ {
_ = c.Set(strconv.Itoa(i), []byte("test val"), time.Millisecond)
_ = c.Get(strconv.Itoa(i - 1))
Expand Down
72 changes: 72 additions & 0 deletions caching/bigmanager.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,72 @@
package caching

import (
jsoniter "github.com/json-iterator/go"
"time"
)

type BigCacheManager struct {
cache1m ProxyCache
cache1h ProxyCache
cacheSolid ProxyCache
}

func NewCacheManager() *BigCacheManager {
return &BigCacheManager{
cache1m: NewBigCacheTTL(time.Minute, 30*time.Second, 64),
cache1h: NewBigCacheTTL(time.Hour, time.Minute, 128),
cacheSolid: NewBigCacheTTL(0, 0, 256),
}
}

func (c *BigCacheManager) Set(key string, val []byte, ttl time.Duration) error {
if ttl <= 0 {
return nil
}
return c.getCacheForTTL(ttl).Set(key, val, ttl)
}

func (c *BigCacheManager) getCacheForTTL(ttl time.Duration) ProxyCache {
switch {
case ttl < time.Minute:
return c.cache1m
case ttl < time.Hour:
return c.cache1h
default:
return c.cacheSolid
}
}

func (c *BigCacheManager) Get(key string, suggestTTL time.Duration) []byte {
if val := c.getCacheForTTL(suggestTTL).Get(key); val != nil {
return val
} else if val := c.cacheSolid.Get(key); val != nil {
return val
} else if val := c.cache1h.Get(key); val != nil {
return val
} else if val := c.cache1m.Get(key); val != nil {
return val
}
return nil
}

func (c *BigCacheManager) GetItem(key string, suggestTTL time.Duration) *CachedItem {
val := c.Get(key, suggestTTL)
if val == nil {
return nil
}
item := AcquireCachedItem()
err := jsoniter.Unmarshal(val, item)
if err != nil {
log.WithError(err).Error("failed to unmarshal cached item")
ReleaseCachedItem(item)
return nil
}
return item
}

func (c *BigCacheManager) Clear() {
c.cache1m.Clear()
c.cache1h.Clear()
c.cacheSolid.Clear()
}
34 changes: 34 additions & 0 deletions caching/cache.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,34 @@
package caching

import (
jsoniter "github.com/json-iterator/go"
"github.com/revolution1/jsonrpc-proxy/jsonrpc"
"github.com/valyala/fasthttp"
"time"
)

type ProxyCache interface {
Set(key string, val []byte, ttl time.Duration) error
Get(key string) []byte
Clear()
}

type CacheManager interface {
SetRpcCache(req *jsonrpc.RpcRequest, resp *jsonrpc.RpcResponse, ttl time.Duration) error
SetHttpCache(req *jsonrpc.RpcRequest, resp *fasthttp.Response, ttl time.Duration) error
GetItem(req *jsonrpc.RpcRequest) *CachedItem
Clear()
}

type CachedHttpResp struct {
Code int `json:"c,omitempty"`
ContentEncoding []byte `json:"e,omitempty"`
ContentType []byte `json:"t,omitempty"`
Body []byte `json:"b,omitempty"`
}

type CachedItem struct {
RpcError *jsonrpc.RpcError `json:"e,omitempty"`
Result jsoniter.RawMessage `json:"r,omitempty"`
HttpResponse *CachedHttpResp `json:"h,omitempty"`
}
Loading