Skip to content

Commit

Permalink
NPE due to improperly scheduled write tasks (fixes #35)
Browse files Browse the repository at this point in the history
When an absent computation returns null, a add/removal task was being
improperly scheduled. As there was no Node, this null field was then
causing an NPE during the maintenance cycle. Usually that was on a
background executor, but could be visible if `cleanUp` is called.

This wasn't caught by tests since the executor's was swalled, logged,
and logging was disabled to avoid spamming the report. Now the executor
is instrumented so that the validation can assert no failures occurred.
This detected the two bugs found with existing tests, so this provides
more coverage than a one-off fix.
  • Loading branch information
ben-manes committed Dec 2, 2015
1 parent 113a332 commit 0e7daa2
Show file tree
Hide file tree
Showing 18 changed files with 223 additions and 95 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -698,8 +698,8 @@ boolean hasExpired(Node<K, V> node, long now) {
}

/**
* Attempts to evict the entry based on the given removal cause. A removal due to expiration may
* be ignored if the entry was since updated and is no longer eligible for eviction.
* Attempts to evict the entry based on the given removal cause. A removal due to expiration or
* size may be ignored if the entry was since updated and is no longer eligible for eviction.
*
* @param node the entry to evict
* @param cause the reason to evict
Expand Down Expand Up @@ -1811,7 +1811,9 @@ V doComputeIfAbsent(K key, Object keyRef, Function<? super K, ? extends V> mappi
});

if (node == null) {
afterWrite(null, new RemovalTask(removed[0]), now);
if (removed[0] != null) {
afterWrite(null, new RemovalTask(removed[0]), now);
}
return null;
}
if (cause[0] != null) {
Expand Down Expand Up @@ -1978,6 +1980,8 @@ V remap(K key, Object keyRef, BiFunction<? super K, ? super V, ? extends V> rema

if (removed[0] != null) {
afterWrite(removed[0], new RemovalTask(removed[0]), now);
} else if (node == null) {
// absent and not computable
} else if ((oldValue[0] == null) && (cause[0] == null)) {
afterWrite(node, new AddTask(node, weight[1]), now);
} else {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -1098,7 +1098,7 @@ public void compute_nullKey(Map<Integer, Integer> map, CacheContext context) {
@CacheSpec(removalListener = { Listener.DEFAULT, Listener.REJECTING })
@Test(dataProvider = "caches", expectedExceptions = NullPointerException.class)
public void compute_nullMappingFunction(Map<Integer, Integer> map, CacheContext context) {
map.computeIfPresent(1, null);
map.compute(1, null);
}

@CheckNoWriter
Expand Down Expand Up @@ -1158,6 +1158,17 @@ public void compute_error(Map<Integer, Integer> map, CacheContext context) {
assertThat(context, both(hasLoadSuccessCount(0)).and(hasLoadFailureCount(1)));
}

@CheckNoWriter
@Test(dataProvider = "caches")
@CacheSpec(removalListener = { Listener.DEFAULT, Listener.REJECTING })
public void compute_absent_nullValue(Map<Integer, Integer> map, CacheContext context) {
assertThat(map.compute(context.absentKey(), (key, value) -> null), is(nullValue()));
assertThat(context, both(hasMissCount(0)).and(hasHitCount(0)));
assertThat(context, both(hasLoadSuccessCount(0)).and(hasLoadFailureCount(1)));
assertThat(map.get(context.absentKey()), is(nullValue()));
assertThat(map.size(), is(context.original().size()));
}

@CheckNoWriter
@Test(dataProvider = "caches")
@CacheSpec(removalListener = { Listener.DEFAULT, Listener.REJECTING })
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,6 @@
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.Executor;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.function.BiFunction;
Expand Down Expand Up @@ -176,8 +175,7 @@ public void getFunc_absent_null_async(AsyncLoadingCache<Integer, Integer> cache,

ready.set(true);
Awaits.await().untilTrue(done);
MoreExecutors.shutdownAndAwaitTermination(
(ExecutorService) context.executor(), 1, TimeUnit.MINUTES);
MoreExecutors.shutdownAndAwaitTermination(context.executor(), 1, TimeUnit.MINUTES);

assertThat(context, both(hasMissCount(1)).and(hasHitCount(0)));
assertThat(context, both(hasLoadSuccessCount(0)).and(hasLoadFailureCount(1)));
Expand Down Expand Up @@ -216,8 +214,7 @@ public void getFunc_absent_failure_async(AsyncLoadingCache<Integer, Integer> cac

ready.set(true);
Awaits.await().untilTrue(done);
MoreExecutors.shutdownAndAwaitTermination(
(ExecutorService) context.executor(), 1, TimeUnit.MINUTES);
MoreExecutors.shutdownAndAwaitTermination(context.executor(), 1, TimeUnit.MINUTES);

assertThat(context, both(hasMissCount(1)).and(hasHitCount(0)));
assertThat(context, both(hasLoadSuccessCount(0)).and(hasLoadFailureCount(1)));
Expand All @@ -240,8 +237,7 @@ public void getFunc_absent_cancelled(AsyncLoadingCache<Integer, Integer> cache,
valueFuture.cancel(true);

Awaits.await().untilTrue(done);
MoreExecutors.shutdownAndAwaitTermination(
(ExecutorService) context.executor(), 1, TimeUnit.MINUTES);
MoreExecutors.shutdownAndAwaitTermination(context.executor(), 1, TimeUnit.MINUTES);

assertThat(context, both(hasMissCount(1)).and(hasHitCount(0)));
assertThat(context, both(hasLoadSuccessCount(0)).and(hasLoadFailureCount(1)));
Expand Down Expand Up @@ -445,8 +441,7 @@ public void get_absent_failure_async(AsyncLoadingCache<Integer, Integer> cache,
valueFuture.whenComplete((r, e) -> done.set(true));

Awaits.await().untilTrue(done);
MoreExecutors.shutdownAndAwaitTermination(
(ExecutorService) context.executor(), 1, TimeUnit.MINUTES);
MoreExecutors.shutdownAndAwaitTermination(context.executor(), 1, TimeUnit.MINUTES);

assertThat(context, both(hasMissCount(1)).and(hasHitCount(0)));
assertThat(context, both(hasLoadSuccessCount(0)).and(hasLoadFailureCount(1)));
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -94,7 +94,7 @@ public void putWeighted_noOverflow() {

@Test(dataProvider = "caches")
@CacheSpec(compute = Compute.SYNC, implementation = Implementation.Caffeine,
population = Population.FULL, maximumSize = MaximumSize.FULL,
population = Population.FULL, maximumSize = MaximumSize.FULL, executorMayFail = true,
executor = CacheExecutor.REJECTING, removalListener = Listener.CONSUMING)
public void evict_rejected(Cache<Integer, Integer> cache, CacheContext context) {
cache.put(context.absentKey(), context.absentValue());
Expand Down Expand Up @@ -283,7 +283,7 @@ public void exceedsMaximumBufferSize_onRead(Cache<Integer, Integer> cache, Cache
@Test(dataProvider = "caches")
@CacheSpec(compute = Compute.SYNC, implementation = Implementation.Caffeine,
population = Population.EMPTY, maximumSize = MaximumSize.FULL)
public void exceedsMaximumBufferSize_onWrite(Cache<Integer, Integer> cache) {
public void exceedsMaximumBufferSize_onWrite(Cache<Integer, Integer> cache, CacheContext context) {
BoundedLocalCache<Integer, Integer> localCache = asBoundedLocalCache(cache);
Node<Integer, Integer> dummy = localCache.nodeFactory.newNode(null, null, null, 1, 0);

Expand Down Expand Up @@ -383,7 +383,7 @@ public void drain_onWrite(Cache<Integer, Integer> cache, CacheContext context) {
@Test(dataProvider = "caches")
@CacheSpec(compute = Compute.SYNC, implementation = Implementation.Caffeine,
population = Population.EMPTY, maximumSize = MaximumSize.FULL)
public void drain_nonblocking(Cache<Integer, Integer> cache) {
public void drain_nonblocking(Cache<Integer, Integer> cache, CacheContext context) {
BoundedLocalCache<Integer, Integer> localCache = asBoundedLocalCache(cache);
AtomicBoolean done = new AtomicBoolean();
Runnable task = () -> {
Expand All @@ -403,7 +403,7 @@ public void drain_nonblocking(Cache<Integer, Integer> cache) {
@Test(dataProvider = "caches")
@CacheSpec(compute = Compute.SYNC, implementation = Implementation.Caffeine,
population = Population.EMPTY, maximumSize = MaximumSize.FULL)
public void drain_blocksClear(Cache<Integer, Integer> cache) {
public void drain_blocksClear(Cache<Integer, Integer> cache, CacheContext context) {
BoundedLocalCache<Integer, Integer> localCache = asBoundedLocalCache(cache);
checkDrainBlocks(localCache, localCache::clear);
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -151,6 +151,8 @@ public void get_throwsException(Cache<Integer, Integer> cache, CacheContext cont
@Test(dataProvider = "caches")
public void get_absent_null(LoadingCache<Integer, Integer> cache, CacheContext context) {
assertThat(cache.get(context.absentKey(), k -> null), is(nullValue()));
assertThat(context, both(hasMissCount(1)).and(hasHitCount(0)));
assertThat(context, both(hasLoadSuccessCount(0)).and(hasLoadFailureCount(1)));
}

@CacheSpec
Expand Down
Loading

0 comments on commit 0e7daa2

Please sign in to comment.