Skip to content

Commit

Permalink
start of v3
Browse files Browse the repository at this point in the history
  • Loading branch information
Nick Randall committed Nov 29, 2017
1 parent f7a9a10 commit aa342df
Show file tree
Hide file tree
Showing 9 changed files with 145 additions and 44 deletions.
1 change: 1 addition & 0 deletions .gitignore
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
vendor/
33 changes: 33 additions & 0 deletions Gopkg.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

34 changes: 34 additions & 0 deletions Gopkg.toml
Original file line number Diff line number Diff line change
@@ -0,0 +1,34 @@

# Gopkg.toml example
#
# Refer to https://github.com/golang/dep/blob/master/docs/Gopkg.toml.md
# for detailed Gopkg.toml documentation.
#
# required = ["github.com/user/thing/cmd/thing"]
# ignored = ["github.com/user/project/pkgX", "bitbucket.org/user/project/pkgA/pkgY"]
#
# [[constraint]]
# name = "github.com/user/project"
# version = "1.0.0"
#
# [[constraint]]
# name = "github.com/user/project2"
# branch = "dev"
# source = "github.com/myfork/project2"
#
# [[override]]
# name = "github.com/x/y"
# version = "2.4.0"


[[constraint]]
branch = "master"
name = "github.com/hashicorp/golang-lru"

[[constraint]]
name = "github.com/opentracing/opentracing-go"
version = "1.0.2"

[[constraint]]
name = "github.com/patrickmn/go-cache"
version = "2.1.0"
25 changes: 24 additions & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -40,7 +40,7 @@ if err != nil {
log.Printf("value: %#v", result)
```

## Upgrade from v1
## Upgrade from v1 to v2
The only difference between v1 and v2 is that we added use of [context](https://golang.org/pkg/context).

```diff
Expand All @@ -55,6 +55,29 @@ The only difference between v1 and v2 is that we added use of [context](https://
+ type BatchFunc func(context.Context, []string) []*Result
```

## Upgrade from v2 to v3
```diff
// dataloader.Interface as added context.Context to methods
- loader.Prime(key string, value interface{}) Interface
+ loader.Prime(ctx context.Context, key string, value interface{}) Interface
- loader.Clear(key string) Interface
+ loader.Clear(ctx context.Context, key string) Interface
```

```diff
// cache interface as added context.Context to methods
type Cache interface {
- Get(string) (Thunk, bool)
+ Get(context.Context, string) (Thunk, bool)
- Set(string, Thunk)
+ Set(context.Context, string, Thunk)
- Delete(string) bool
+ Delete(context.Context, string) bool
Clear()
}
```


### Don't need/want to use context?
You're welcome to install the v1 version of this library.

Expand Down
14 changes: 8 additions & 6 deletions cache.go
Original file line number Diff line number Diff line change
@@ -1,10 +1,12 @@
package dataloader

import "context"

// The Cache interface. If a custom cache is provided, it must implement this interface.
type Cache interface {
Get(string) (Thunk, bool)
Set(string, Thunk)
Delete(string) bool
Get(context.Context, string) (Thunk, bool)
Set(context.Context, string, Thunk)
Delete(context.Context, string) bool
Clear()
}

Expand All @@ -14,13 +16,13 @@ type Cache interface {
type NoCache struct{}

// Get is a NOOP
func (c *NoCache) Get(string) (Thunk, bool) { return nil, false }
func (c *NoCache) Get(context.Context, string) (Thunk, bool) { return nil, false }

// Set is a NOOP
func (c *NoCache) Set(string, Thunk) { return }
func (c *NoCache) Set(context.Context, string, Thunk) { return }

// Delete is a NOOP
func (c *NoCache) Delete(string) bool { return false }
func (c *NoCache) Delete(context.Context, string) bool { return false }

// Clear is a NOOP
func (c *NoCache) Clear() { return }
28 changes: 15 additions & 13 deletions dataloader.go
Original file line number Diff line number Diff line change
Expand Up @@ -22,9 +22,9 @@ import (
type Interface interface {
Load(context.Context, string) Thunk
LoadMany(context.Context, []string) ThunkMany
Clear(string) Interface
Clear(context.Context, string) Interface
ClearAll() Interface
Prime(key string, value interface{}) Interface
Prime(ctx context.Context, key string, value interface{}) Interface
}

// BatchFunc is a function, which when given a slice of keys (string), returns an slice of `results`.
Expand Down Expand Up @@ -193,6 +193,7 @@ func NewBatchedLoader(batchFn BatchFunc, opts ...Option) *Loader {
// Load load/resolves the given key, returning a channel that will contain the value and error
func (l *Loader) Load(originalContext context.Context, key string) Thunk {
ctx, finish := l.tracer.TraceLoad(originalContext, key)

c := make(chan *Result, 1)
var result struct {
mu sync.RWMutex
Expand All @@ -201,7 +202,7 @@ func (l *Loader) Load(originalContext context.Context, key string) Thunk {

// lock to prevent duplicate keys coming in before item has been added to cache.
l.cacheLock.Lock()
if v, ok := l.cache.Get(key); ok {
if v, ok := l.cache.Get(ctx, key); ok {
defer finish(v)
defer l.cacheLock.Unlock()
return v
Expand All @@ -223,8 +224,9 @@ func (l *Loader) Load(originalContext context.Context, key string) Thunk {
defer result.mu.RUnlock()
return result.value.Data, result.value.Error
}
defer finish(thunk)

l.cache.Set(key, thunk)
l.cache.Set(ctx, key, thunk)
l.cacheLock.Unlock()

// this is sent to batch fn. It contains the key and the channel to return the
Expand All @@ -236,7 +238,7 @@ func (l *Loader) Load(originalContext context.Context, key string) Thunk {
if l.curBatcher == nil {
l.curBatcher = l.newBatcher(l.silent, l.tracer)
// start the current batcher batch function
go l.curBatcher.batch(ctx)
go l.curBatcher.batch(originalContext)
// start a sleeper for the current batcher
l.endSleeper = make(chan bool)
go l.sleeper(l.curBatcher, l.endSleeper)
Expand All @@ -261,13 +263,13 @@ func (l *Loader) Load(originalContext context.Context, key string) Thunk {
}
l.batchLock.Unlock()

defer finish(thunk)
return thunk
}

// LoadMany loads mulitiple keys, returning a thunk (type: ThunkMany) that will resolve the keys passed in.
func (l *Loader) LoadMany(originalContext context.Context, keys []string) ThunkMany {
ctx, finish := l.tracer.TraceLoadMany(originalContext, keys)

length := len(keys)
data := make([]interface{}, length)
errors := make([]error, length)
Expand All @@ -276,13 +278,13 @@ func (l *Loader) LoadMany(originalContext context.Context, keys []string) ThunkM

wg.Add(length)
for i := range keys {
go func(i int) {
go func(ctx context.Context, i int) {
defer wg.Done()
thunk := l.Load(ctx, keys[i])
result, err := thunk()
data[i] = result
errors[i] = err
}(i)
}(ctx, i)
}

go func() {
Expand Down Expand Up @@ -318,9 +320,9 @@ func (l *Loader) LoadMany(originalContext context.Context, keys []string) ThunkM
}

// Clear clears the value at `key` from the cache, it it exsits. Returs self for method chaining
func (l *Loader) Clear(key string) Interface {
func (l *Loader) Clear(ctx context.Context, key string) Interface {
l.cacheLock.Lock()
l.cache.Delete(key)
l.cache.Delete(ctx, key)
l.cacheLock.Unlock()
return l
}
Expand All @@ -336,12 +338,12 @@ func (l *Loader) ClearAll() Interface {

// Prime adds the provided key and value to the cache. If the key already exists, no change is made.
// Returns self for method chaining
func (l *Loader) Prime(key string, value interface{}) Interface {
if _, ok := l.cache.Get(key); !ok {
func (l *Loader) Prime(ctx context.Context, key string, value interface{}) Interface {
if _, ok := l.cache.Get(ctx, key); !ok {
thunk := func() (interface{}, error) {
return value, nil
}
l.cache.Set(key, thunk)
l.cache.Set(ctx, key, thunk)
}
return l
}
Expand Down
24 changes: 12 additions & 12 deletions dataloader_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -250,8 +250,8 @@ func TestLoader(t *testing.T) {
t.Run("allows primed cache", func(t *testing.T) {
t.Parallel()
identityLoader, loadCalls := IDLoader(0)
identityLoader.Prime("A", "Cached")
ctx := context.Background()
identityLoader.Prime(ctx, "A", "Cached")
future1 := identityLoader.Load(ctx, "1")
future2 := identityLoader.Load(ctx, "A")

Expand Down Expand Up @@ -280,10 +280,10 @@ func TestLoader(t *testing.T) {
t.Parallel()
identityLoader, loadCalls := IDLoader(0)
ctx := context.Background()
identityLoader.Prime("A", "Cached")
identityLoader.Prime("B", "B")
identityLoader.Prime(ctx, "A", "Cached")
identityLoader.Prime(ctx, "B", "B")
future1 := identityLoader.Load(ctx, "1")
future2 := identityLoader.Clear("A").Load(ctx, "A")
future2 := identityLoader.Clear(ctx, "A").Load(ctx, "A")
future3 := identityLoader.Load(ctx, "B")

_, err := future1()
Expand Down Expand Up @@ -334,7 +334,7 @@ func TestLoader(t *testing.T) {
t.Errorf("did not batch queries. Expected %#v, got %#v", expected, calls)
}

if _, found := batchOnlyLoader.cache.Get("1"); found {
if _, found := batchOnlyLoader.cache.Get(ctx, "1"); found {
t.Errorf("did not clear cache after batch. Expected %#v, got %#v", false, found)
}
})
Expand All @@ -343,8 +343,8 @@ func TestLoader(t *testing.T) {
t.Parallel()
identityLoader, loadCalls := IDLoader(0)
ctx := context.Background()
identityLoader.Prime("A", "Cached")
identityLoader.Prime("B", "B")
identityLoader.Prime(ctx, "A", "Cached")
identityLoader.Prime(ctx, "B", "B")

identityLoader.ClearAll()

Expand Down Expand Up @@ -377,12 +377,12 @@ func TestLoader(t *testing.T) {
t.Parallel()
identityLoader, loadCalls := NoCacheLoader(0)
ctx := context.Background()
identityLoader.Prime("A", "Cached")
identityLoader.Prime("B", "B")
identityLoader.Prime(ctx, "A", "Cached")
identityLoader.Prime(ctx, "B", "B")

identityLoader.ClearAll()

future1 := identityLoader.Clear("1").Load(ctx, "1")
future1 := identityLoader.Clear(ctx, "1").Load(ctx, "1")
future2 := identityLoader.Load(ctx, "A")
future3 := identityLoader.Load(ctx, "B")

Expand Down Expand Up @@ -411,8 +411,8 @@ func TestLoader(t *testing.T) {
t.Parallel()
identityLoader, loadCalls := NoCacheLoader(0)
ctx := context.Background()
identityLoader.Prime("A", "Cached")
identityLoader.Prime("B", "B")
identityLoader.Prime(ctx, "A", "Cached")
identityLoader.Prime(ctx, "B", "B")

future1 := identityLoader.Load(ctx, "1")
future2 := identityLoader.Load(ctx, "A")
Expand Down
11 changes: 7 additions & 4 deletions inMemoryCache.go
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,10 @@

package dataloader

import "sync"
import (
"context"
"sync"
)

// InMemoryCache is an in memory implementation of Cache interface.
// this simple implementation is well suited for
Expand All @@ -23,15 +26,15 @@ func NewCache() *InMemoryCache {
}

// Set sets the `value` at `key` in the cache
func (c *InMemoryCache) Set(key string, value Thunk) {
func (c *InMemoryCache) Set(_ context.Context, key string, value Thunk) {
c.mu.Lock()
c.items[key] = value
c.mu.Unlock()
}

// Get gets the value at `key` if it exsits, returns value (or nil) and bool
// indicating of value was found
func (c *InMemoryCache) Get(key string) (Thunk, bool) {
func (c *InMemoryCache) Get(_ context.Context, key string) (Thunk, bool) {
c.mu.RLock()
defer c.mu.RUnlock()

Expand All @@ -44,7 +47,7 @@ func (c *InMemoryCache) Get(key string) (Thunk, bool) {
}

// Delete deletes item at `key` from cache
func (c *InMemoryCache) Delete(key string) bool {
func (c *InMemoryCache) Delete(_ context.Context, key string) bool {
if _, found := c.Get(key); found {
c.mu.Lock()
defer c.mu.Unlock()
Expand Down
Loading

0 comments on commit aa342df

Please sign in to comment.