From cb936aecf507804a8752ab1beefa22e924c2a5d5 Mon Sep 17 00:00:00 2001 From: Ben Manes Date: Sun, 12 Jul 2015 16:38:42 -0700 Subject: [PATCH] Finalize CacheWriter support --- README.md | 8 +-- .../caffeine/cache/BoundedLocalCache.java | 21 ++++--- .../benmanes/caffeine/cache/CacheWriter.java | 13 +---- .../benmanes/caffeine/cache/Caffeine.java | 15 +++-- .../cache/LocalAsyncLoadingCache.java | 3 +- .../benmanes/caffeine/cache/LocalCache.java | 6 ++ .../caffeine/cache/LocalLoadingCache.java | 5 +- .../caffeine/cache/UnboundedLocalCache.java | 30 +++------- .../benmanes/caffeine/cache/AsMapTest.java | 57 ++++++++++--------- .../caffeine/cache/AsyncLoadingCacheTest.java | 47 +-------------- .../benmanes/caffeine/cache/CacheTest.java | 15 ++--- .../benmanes/caffeine/cache/CaffeineTest.java | 21 ++++--- .../benmanes/caffeine/cache/EvictionTest.java | 3 +- .../caffeine/cache/ExpirationTest.java | 41 ++++++------- .../caffeine/cache/LoadingCacheTest.java | 16 +++--- .../caffeine/cache/ReferenceTest.java | 39 ++++++------- .../caffeine/cache/RefreshAfterWriteTest.java | 11 ++-- .../caffeine/cache/testing/CacheContext.java | 4 ++ .../cache/testing/CacheWriterVerifier.java | 2 +- .../testing/CaffeineCacheFromContext.java | 6 +- 20 files changed, 166 insertions(+), 197 deletions(-) diff --git a/README.md b/README.md index 0bf5a4ca5f..179408b81e 100644 --- a/README.md +++ b/README.md @@ -24,8 +24,7 @@ LoadingCache graphs = Caffeine.newBuilder() #### Features at a Glance -Caffeine provide flexible construction to create a cache with any combination of the following -features: +Caffeine provide flexible construction to create a cache with a combination of the following features: * [automatic loading of entries][population] into the cache, optionally asynchronously * [least-recently-used eviction][size] when a maximum size is exceeded @@ -44,7 +43,7 @@ In addition, Caffeine offers the following extensions: ### Download -Download [the latest .jar][jar] from [Maven Central][maven] or depend via Gradle: +Download from [Maven Central][maven] or depend via Gradle: ```gradle compile 'com.github.ben-manes.caffeine:caffeine:1.2.0' @@ -55,7 +54,7 @@ compile 'com.github.ben-manes.caffeine:jcache:1.2.0' compile 'com.github.ben-manes.caffeine:tracing-async:1.2.0' ``` -Snapshots of the development version are available in +Snapshots of the development version are available in [Sonatype's snapshots repository](https://oss.sonatype.org/content/repositories/snapshots). [benchmarks]: https://github.com/ben-manes/caffeine/wiki/Benchmarks @@ -74,5 +73,4 @@ Snapshots of the development version are available in [simulator]: https://github.com/ben-manes/caffeine/wiki/Simulator [guava-adapter]: https://github.com/ben-manes/caffeine/wiki/Guava [jsr107]: https://github.com/ben-manes/caffeine/wiki/JCache -[jar]: https://search.maven.org/remote_content?g=com.github.ben-manes.caffeine&a=caffeine&v=LATEST [maven]: https://maven-badges.herokuapp.com/maven-central/com.github.ben-manes.caffeine/caffeine diff --git a/caffeine/src/main/java/com/github/benmanes/caffeine/cache/BoundedLocalCache.java b/caffeine/src/main/java/com/github/benmanes/caffeine/cache/BoundedLocalCache.java index 248950b28a..5bf40a3f39 100644 --- a/caffeine/src/main/java/com/github/benmanes/caffeine/cache/BoundedLocalCache.java +++ b/caffeine/src/main/java/com/github/benmanes/caffeine/cache/BoundedLocalCache.java @@ -903,12 +903,17 @@ public Map getAllPresent(Iterable keys) { @Override public V put(K key, V value) { - return put(key, value, false); + return put(key, value, true, false); + } + + @Override + public V put(K key, V value, boolean notifyWriter) { + return put(key, value, notifyWriter, false); } @Override public V putIfAbsent(K key, V value) { - return put(key, value, true); + return put(key, value, true, true); } /** @@ -917,10 +922,11 @@ public V putIfAbsent(K key, V value) { * * @param key key with which the specified value is to be associated * @param value value to be associated with the specified key + * @param notifyWriter if the writer should be notified for an inserted or updated entry * @param onlyIfAbsent a write is performed only if the key is not already associated with a value * @return the prior value in the data store or null if no mapping was found */ - V put(K key, V value, boolean onlyIfAbsent) { + V put(K key, V value, boolean notifyWriter, boolean onlyIfAbsent) { requireNonNull(key); requireNonNull(value); @@ -938,7 +944,9 @@ V put(K key, V value, boolean onlyIfAbsent) { } Node computed = node; prior = data.computeIfAbsent(node.getKeyReference(), k -> { - writer.write(key, value); + if (notifyWriter) { + writer.write(key, value); + } return computed; }); if (prior == node) { @@ -966,7 +974,7 @@ V put(K key, V value, boolean onlyIfAbsent) { mayUpdate = false; } - if (expired || (mayUpdate && (value != oldValue))) { + if (notifyWriter && (expired || (mayUpdate && (value != oldValue)))) { writer.write(key, value); } if (mayUpdate) { @@ -1369,7 +1377,6 @@ V remap(K key, Object keyRef, BiFunction rema if (newValue[0] == null) { return null; } - writer.write(key, newValue[0]); weight[1] = weigher.weigh(key, newValue[0]); tracer().recordWrite(id, key, weight[1]); return nodeFactory.newNode(keyRef, newValue[0], @@ -1398,14 +1405,12 @@ V remap(K key, Object keyRef, BiFunction rema if (newValue[0] == null) { if (cause[0] == null) { cause[0] = RemovalCause.EXPLICIT; - writer.delete(nodeKey[0], oldValue[0], cause[0]); } removed[0] = n; n.retire(); return null; } - writer.write(nodeKey[0], newValue[0]); weight[0] = n.getWeight(); weight[1] = weigher.weigh(key, newValue[0]); n.setValue(newValue[0], valueReferenceQueue()); diff --git a/caffeine/src/main/java/com/github/benmanes/caffeine/cache/CacheWriter.java b/caffeine/src/main/java/com/github/benmanes/caffeine/cache/CacheWriter.java index ae96063eb0..a95979dc32 100644 --- a/caffeine/src/main/java/com/github/benmanes/caffeine/cache/CacheWriter.java +++ b/caffeine/src/main/java/com/github/benmanes/caffeine/cache/CacheWriter.java @@ -20,16 +20,9 @@ import javax.annotation.concurrent.ThreadSafe; /** - * Communicates the write or deletion of a value, based on a key, to an external resource. - *

- * The operations may be performed in either a write-through or write-behind - * style, where the difference is whether the operation completes synchronously or asynchronously - * with the cache. - *

- * When combined with a {@link CacheLoader}, the writer simplifies the implementation of a tiered - * cache. The loader queries the secondary cache and computes the value if not found. The layered - * cache may be modeled as a victim cache by writing entries when the {@link RemovalCause} indicates - * an eviction. + * Communicates the write or deletion of a value, based on a key, to an external resource. A writer + * is notified by the cache each time an entry is explicitly created or modified, or removed for any + * {@linkplain RemovalCause reason}. The writer is not notified when an entry is loaded or computed. * * @author ben.manes@gmail.com (Ben Manes) * @param the most general type of keys this writer can write; for example {@code Object} if any diff --git a/caffeine/src/main/java/com/github/benmanes/caffeine/cache/Caffeine.java b/caffeine/src/main/java/com/github/benmanes/caffeine/cache/Caffeine.java index ce52c1dd23..9c54dcbef1 100644 --- a/caffeine/src/main/java/com/github/benmanes/caffeine/cache/Caffeine.java +++ b/caffeine/src/main/java/com/github/benmanes/caffeine/cache/Caffeine.java @@ -42,7 +42,7 @@ /** * A builder of {@link AsyncLoadingCache}, {@link LoadingCache}, and {@link Cache} instances - * having any combination of the following features: + * having a combination of the following features: *

    *
  • automatic loading of entries into the cache, optionally asynchronously *
  • least-recently-used eviction when a maximum size is exceeded @@ -216,9 +216,9 @@ int getInitialCapacity() { /** * Specifies the executor to use when running asynchronous tasks. The executor is delegated to - * when sending removal notifications and asynchronous computations requested through the - * {@link AsyncLoadingCache} and {@link LoadingCache#refresh}. By default, - * {@link ForkJoinPool#commonPool()} is used. + * when sending removal notifications, when asynchronous computations are performed by + * {@link AsyncLoadingCache} and {@link LoadingCache#refresh}, or when performing periodic + * maintenance. By default, {@link ForkJoinPool#commonPool()} is used. *

    * The primary intent of this method is to facilitate testing of caches which have been * configured with {@link #removalListener} or utilize asynchronous computations. A test may @@ -652,8 +652,8 @@ RemovalListener getRemovalListener(boolean /** * Specifies a writer instance that caches should notify each time an entry is explicitly created * or modified, or removed for any {@linkplain RemovalCause reason}. The writer is not notified - * when an entry is loaded. Each cache created by this builder will invoke this writer as part of - * the atomic operation that modifies the cache. + * when an entry is loaded or computed. Each cache created by this builder will invoke this writer + * as part of the atomic operation that modifies the cache. *

    * Warning: after invoking this method, do not continue to use this cache builder * reference; instead use the reference this method returns. At runtime, these point to the @@ -665,6 +665,8 @@ RemovalListener getRemovalListener(boolean *

    * Warning: any exception thrown by {@code writer} will be propagated to the {@code Cache} * user. + *

    + * This feature cannot be used in conjunction with {@link #buildAsync}. * * @param writer a writer instance that caches should notify each time an entry is explicitly * created or modified, or removed for any reason @@ -860,6 +862,7 @@ public LoadingCache build( public AsyncLoadingCache buildAsync( @Nonnull CacheLoader loader) { requireState(valueStrength == null); + requireState(writer == null); requireWeightWithWeigher(); requireNonNull(loader); diff --git a/caffeine/src/main/java/com/github/benmanes/caffeine/cache/LocalAsyncLoadingCache.java b/caffeine/src/main/java/com/github/benmanes/caffeine/cache/LocalAsyncLoadingCache.java index 793e333a52..c2d8e7eff6 100644 --- a/caffeine/src/main/java/com/github/benmanes/caffeine/cache/LocalAsyncLoadingCache.java +++ b/caffeine/src/main/java/com/github/benmanes/caffeine/cache/LocalAsyncLoadingCache.java @@ -198,7 +198,8 @@ private CompletableFuture> composeResult(Map> @Override public void put(K key, CompletableFuture valueFuture) { - if (valueFuture.isCompletedExceptionally()) { + if (valueFuture.isCompletedExceptionally() + || (valueFuture.isDone() && (valueFuture.join() == null))) { cache.statsCounter().recordLoadFailure(0L); cache.remove(key); return; diff --git a/caffeine/src/main/java/com/github/benmanes/caffeine/cache/LocalCache.java b/caffeine/src/main/java/com/github/benmanes/caffeine/cache/LocalCache.java index c8a444c5d3..bfb64eb077 100644 --- a/caffeine/src/main/java/com/github/benmanes/caffeine/cache/LocalCache.java +++ b/caffeine/src/main/java/com/github/benmanes/caffeine/cache/LocalCache.java @@ -79,6 +79,12 @@ default Tracer tracer() { @Nonnull Map getAllPresent(@Nonnull Iterable keys); + /** + * See {@link Cache#put(Object, Object)}. This method differs by allowing the operation to not + * notify the writer when an entry was inserted or updated. + */ + V put(K key, V value, boolean notifyWriter); + @Override default V compute(K key, BiFunction remappingFunction) { return compute(key, remappingFunction, false, false); diff --git a/caffeine/src/main/java/com/github/benmanes/caffeine/cache/LocalLoadingCache.java b/caffeine/src/main/java/com/github/benmanes/caffeine/cache/LocalLoadingCache.java index 075cc2291a..c58bc73794 100644 --- a/caffeine/src/main/java/com/github/benmanes/caffeine/cache/LocalLoadingCache.java +++ b/caffeine/src/main/java/com/github/benmanes/caffeine/cache/LocalLoadingCache.java @@ -24,6 +24,7 @@ import java.util.Iterator; import java.util.List; import java.util.Map; +import java.util.Map.Entry; import java.util.function.BiFunction; import java.util.logging.Level; import java.util.logging.Logger; @@ -123,7 +124,9 @@ default void bulkLoad(List keysToLoad, Map result) { try { @SuppressWarnings("unchecked") Map loaded = (Map) cacheLoader().loadAll(keysToLoad); - cache().putAll(loaded); + for (Entry entry : loaded.entrySet()) { + cache().put(entry.getKey(), entry.getValue(), false); + } for (K key : keysToLoad) { V value = loaded.get(key); if (value != null) { diff --git a/caffeine/src/main/java/com/github/benmanes/caffeine/cache/UnboundedLocalCache.java b/caffeine/src/main/java/com/github/benmanes/caffeine/cache/UnboundedLocalCache.java index 6366339cb7..7565d5e731 100644 --- a/caffeine/src/main/java/com/github/benmanes/caffeine/cache/UnboundedLocalCache.java +++ b/caffeine/src/main/java/com/github/benmanes/caffeine/cache/UnboundedLocalCache.java @@ -242,15 +242,7 @@ public V computeIfPresent(K key, V nv = data.computeIfPresent(key, (K k, V oldValue) -> { V newValue = statsAware(remappingFunction, false, false).apply(k, oldValue); - RemovalCause cause; - if (newValue == null) { - cause = RemovalCause.EXPLICIT; - writer.delete(key, oldValue, cause); - } else { - cause = RemovalCause.REPLACED; - writer.write(key, newValue); - } - + RemovalCause cause = (newValue == null) ? RemovalCause.EXPLICIT : RemovalCause.REPLACED; if (hasRemovalListener() && (newValue != oldValue)) { notification[0] = new RemovalNotification<>(key, oldValue, cause); } @@ -298,18 +290,7 @@ V remap(K key, BiFunction remappingFunction) return null; } - RemovalCause cause; - if (newValue == null) { - cause = RemovalCause.EXPLICIT; - writer.delete(key, oldValue, cause); - } else { - // Do not communicate to CacheWriter on a load - cause = RemovalCause.REPLACED; - if (oldValue != null) { - writer.write(key, newValue); - } - } - + RemovalCause cause = (newValue == null) ? RemovalCause.EXPLICIT : RemovalCause.REPLACED; if (hasRemovalListener() && (oldValue != null) && (newValue != oldValue)) { notification[0] = new RemovalNotification<>(key, oldValue, cause); } @@ -362,13 +343,18 @@ public V get(Object key) { @Override public V put(K key, V value) { + return put(key, value, true); + } + + @Override + public V put(K key, V value, boolean notifyWriter) { requireNonNull(value); // ensures that the removal notification is processed after the removal has completed @SuppressWarnings({"unchecked", "rawtypes"}) V oldValue[] = (V[]) new Object[1]; data.compute(key, (k, v) -> { - if (value != v) { + if (notifyWriter && (value != v)) { writer.write(key, value); } oldValue[0] = v; diff --git a/caffeine/src/test/java/com/github/benmanes/caffeine/cache/AsMapTest.java b/caffeine/src/test/java/com/github/benmanes/caffeine/cache/AsMapTest.java index 902fc1f524..4f17226990 100644 --- a/caffeine/src/test/java/com/github/benmanes/caffeine/cache/AsMapTest.java +++ b/caffeine/src/test/java/com/github/benmanes/caffeine/cache/AsMapTest.java @@ -56,6 +56,7 @@ import com.github.benmanes.caffeine.cache.testing.CacheContext; import com.github.benmanes.caffeine.cache.testing.CacheProvider; import com.github.benmanes.caffeine.cache.testing.CacheSpec; +import com.github.benmanes.caffeine.cache.testing.CacheSpec.Compute; import com.github.benmanes.caffeine.cache.testing.CacheSpec.Implementation; import com.github.benmanes.caffeine.cache.testing.CacheSpec.Listener; import com.github.benmanes.caffeine.cache.testing.CacheSpec.Population; @@ -117,7 +118,7 @@ public void clear(Map map, CacheContext context) { @Test(dataProvider = "caches", expectedExceptions = DeleteException.class) @CacheSpec(implementation = Implementation.Caffeine, keys = ReferenceType.STRONG, population = { Population.SINGLETON, Population.PARTIAL, Population.FULL }, - writer = Writer.EXCEPTIONAL, removalListener = Listener.REJECTING) + compute = Compute.SYNC, writer = Writer.EXCEPTIONAL, removalListener = Listener.REJECTING) public void clear_writerFails(Map map, CacheContext context) { try { map.clear(); @@ -287,7 +288,7 @@ public void put_nullKeyAndValue(Map map, CacheContext context) @CheckNoStats @Test(dataProvider = "caches", expectedExceptions = WriteException.class) @CacheSpec(implementation = Implementation.Caffeine, keys = ReferenceType.STRONG, - writer = Writer.EXCEPTIONAL, removalListener = Listener.REJECTING) + compute = Compute.SYNC, writer = Writer.EXCEPTIONAL, removalListener = Listener.REJECTING) public void put_insert_writerFails(Map map, CacheContext context) { try { map.put(context.absentKey(), context.absentValue()); @@ -300,7 +301,7 @@ public void put_insert_writerFails(Map map, CacheContext conte @Test(dataProvider = "caches", expectedExceptions = WriteException.class) @CacheSpec(implementation = Implementation.Caffeine, keys = ReferenceType.STRONG, population = { Population.SINGLETON, Population.PARTIAL, Population.FULL }, - writer = Writer.EXCEPTIONAL, removalListener = Listener.REJECTING) + compute = Compute.SYNC, writer = Writer.EXCEPTIONAL, removalListener = Listener.REJECTING) public void put_replace_writerFails(Map map, CacheContext context) { try { map.put(context.middleKey(), context.absentValue()); @@ -369,7 +370,7 @@ public void putAll_null(Map map, CacheContext context) { @CheckNoStats @Test(dataProvider = "caches", expectedExceptions = WriteException.class) @CacheSpec(implementation = Implementation.Caffeine, keys = ReferenceType.STRONG, - writer = Writer.EXCEPTIONAL, removalListener = Listener.REJECTING) + compute = Compute.SYNC, writer = Writer.EXCEPTIONAL, removalListener = Listener.REJECTING) public void putAll_insert_writerFails(Map map, CacheContext context) { try { map.putAll(context.absent()); @@ -382,7 +383,7 @@ public void putAll_insert_writerFails(Map map, CacheContext co @Test(dataProvider = "caches", expectedExceptions = WriteException.class) @CacheSpec(implementation = Implementation.Caffeine, keys = ReferenceType.STRONG, population = { Population.SINGLETON, Population.PARTIAL, Population.FULL }, - writer = Writer.EXCEPTIONAL, removalListener = Listener.REJECTING) + compute = Compute.SYNC, writer = Writer.EXCEPTIONAL, removalListener = Listener.REJECTING) public void putAll_replace_writerFails(Map map, CacheContext context) { try { map.putAll(ImmutableMap.of(context.middleKey(), context.absentValue())); @@ -479,7 +480,7 @@ public void putIfAbsent_nullKeyAndValue(Map map, CacheContext @CheckNoStats @Test(dataProvider = "caches", expectedExceptions = WriteException.class) @CacheSpec(implementation = Implementation.Caffeine, keys = ReferenceType.STRONG, - writer = Writer.EXCEPTIONAL, removalListener = Listener.REJECTING) + compute = Compute.SYNC, writer = Writer.EXCEPTIONAL, removalListener = Listener.REJECTING) public void putIfAbsent_writerFails(Map map, CacheContext context) { try { map.putIfAbsent(context.absentKey(), context.absentValue()); @@ -527,7 +528,7 @@ public void remove_nullKey(Map map, CacheContext context) { @Test(dataProvider = "caches", expectedExceptions = DeleteException.class) @CacheSpec(implementation = Implementation.Caffeine, keys = ReferenceType.STRONG, population = { Population.SINGLETON, Population.PARTIAL, Population.FULL }, - writer = Writer.EXCEPTIONAL, removalListener = Listener.REJECTING) + compute = Compute.SYNC, writer = Writer.EXCEPTIONAL, removalListener = Listener.REJECTING) public void remove_writerFails(Map map, CacheContext context) { try { map.remove(context.middleKey()); @@ -588,7 +589,7 @@ public void removeConditionally_nullKeyAndValue(Map map, Cache @Test(dataProvider = "caches", expectedExceptions = DeleteException.class) @CacheSpec(implementation = Implementation.Caffeine, keys = ReferenceType.STRONG, population = { Population.SINGLETON, Population.PARTIAL, Population.FULL }, - writer = Writer.EXCEPTIONAL, removalListener = Listener.REJECTING) + compute = Compute.SYNC, writer = Writer.EXCEPTIONAL, removalListener = Listener.REJECTING) public void removeConditionally_writerFails(Map map, CacheContext context) { try { map.remove(context.middleKey(), context.original().get(context.middleKey())); @@ -667,7 +668,7 @@ public void replace_absent(Map map, CacheContext context) { @Test(dataProvider = "caches", expectedExceptions = WriteException.class) @CacheSpec(implementation = Implementation.Caffeine, keys = ReferenceType.STRONG, population = { Population.SINGLETON, Population.PARTIAL, Population.FULL }, - writer = Writer.EXCEPTIONAL, removalListener = Listener.REJECTING) + compute = Compute.SYNC, writer = Writer.EXCEPTIONAL, removalListener = Listener.REJECTING) public void replace_writerFails(Map map, CacheContext context) { try { map.replace(context.middleKey(), context.absentValue()); @@ -770,7 +771,7 @@ public void replaceConditionally_absent(Map map, CacheContext @Test(dataProvider = "caches", expectedExceptions = WriteException.class) @CacheSpec(implementation = Implementation.Caffeine, keys = ReferenceType.STRONG, population = { Population.SINGLETON, Population.PARTIAL, Population.FULL }, - writer = Writer.EXCEPTIONAL, removalListener = Listener.REJECTING) + compute = Compute.SYNC, writer = Writer.EXCEPTIONAL, removalListener = Listener.REJECTING) public void replaceConditionally_writerFails(Map map, CacheContext context) { try { Integer key = context.middleKey(); @@ -848,7 +849,7 @@ public void replaceAll_nullValue(Map map, CacheContext context @Test(dataProvider = "caches", expectedExceptions = WriteException.class) @CacheSpec(implementation = Implementation.Caffeine, keys = ReferenceType.STRONG, population = { Population.SINGLETON, Population.PARTIAL, Population.FULL }, - writer = Writer.EXCEPTIONAL, removalListener = Listener.REJECTING) + compute = Compute.SYNC, writer = Writer.EXCEPTIONAL, removalListener = Listener.REJECTING) public void replaceAll_writerFails(Map map, CacheContext context) { try { map.replaceAll((key, value) -> context.absentValue()); @@ -986,7 +987,7 @@ public void computeIfPresent_nullMappingFunction(Map map, Cach map.computeIfPresent(1, null); } - // FIXME: @CheckNoWriter + @CheckNoWriter @Test(dataProvider = "caches") @CacheSpec(population = { Population.SINGLETON, Population.PARTIAL, Population.FULL }) public void computeIfPresent_nullValue(Map map, CacheContext context) { @@ -1057,7 +1058,7 @@ public void computeIfPresent_error(Map map, CacheContext conte assertThat(context, both(hasLoadSuccessCount(0)).and(hasLoadFailureCount(1))); } - @CheckNoStats // FIXME: @CheckNoWriter + @CheckNoWriter @CheckNoStats @Test(dataProvider = "caches") @CacheSpec(removalListener = { Listener.DEFAULT, Listener.REJECTING }) public void computeIfPresent_absent(Map map, CacheContext context) { @@ -1066,7 +1067,7 @@ public void computeIfPresent_absent(Map map, CacheContext cont assertThat(map.size(), is(context.original().size())); } - //FIXME: @CheckNoWriter + @CheckNoWriter @Test(dataProvider = "caches") @CacheSpec(population = { Population.SINGLETON, Population.PARTIAL, Population.FULL }) public void computeIfPresent_present(Map map, CacheContext context) { @@ -1100,7 +1101,7 @@ public void compute_nullMappingFunction(Map map, CacheContext map.computeIfPresent(1, null); } - // FIXME: @CheckNoWriter + @CheckNoWriter @Test(dataProvider = "caches") @CacheSpec(population = { Population.SINGLETON, Population.PARTIAL, Population.FULL }) public void compute_remove(Map map, CacheContext context) { @@ -1157,7 +1158,7 @@ public void compute_error(Map map, CacheContext context) { assertThat(context, both(hasLoadSuccessCount(0)).and(hasLoadFailureCount(1))); } - // FIXME: @CheckNoWriter + @CheckNoWriter @Test(dataProvider = "caches") @CacheSpec(removalListener = { Listener.DEFAULT, Listener.REJECTING }) public void compute_absent(Map map, CacheContext context) { @@ -1169,7 +1170,7 @@ public void compute_absent(Map map, CacheContext context) { assertThat(map.size(), is(1 + context.original().size())); } - // FIXME: @CheckNoWriter + @CheckNoWriter @Test(dataProvider = "caches") @CacheSpec(population = { Population.SINGLETON, Population.PARTIAL, Population.FULL }) public void compute_sameValue(Map map, CacheContext context) { @@ -1189,7 +1190,7 @@ public void compute_sameValue(Map map, CacheContext context) { assertThat(map, hasRemovalNotifications(context, count, RemovalCause.REPLACED)); } - // FIXME: @CheckNoWriter + @CheckNoWriter @Test(dataProvider = "caches") @CacheSpec(population = { Population.SINGLETON, Population.PARTIAL, Population.FULL }) public void compute_differentValue(Map map, CacheContext context) { @@ -1229,7 +1230,7 @@ public void merge_nullMappingFunction(Map map, CacheContext co map.merge(1, 1, null); } - // FIXME: @CheckNoWriter + @CheckNoWriter @Test(dataProvider = "caches") @CacheSpec(population = { Population.SINGLETON, Population.PARTIAL, Population.FULL }) public void merge_remove(Map map, CacheContext context) { @@ -1296,7 +1297,7 @@ public void merge_error(Map map, CacheContext context) { assertThat(map, is(equalTo(context.original()))); } - @CheckNoStats // FIXME: @CheckNoWriter + @CheckNoWriter @CheckNoStats @Test(dataProvider = "caches") @CacheSpec(removalListener = { Listener.DEFAULT, Listener.REJECTING }) public void merge_absent(Map map, CacheContext context) { @@ -1307,7 +1308,7 @@ public void merge_absent(Map map, CacheContext context) { assertThat(map.size(), is(1 + context.original().size())); } - // FIXME: @CheckNoWriter + @CheckNoWriter @Test(dataProvider = "caches") @CacheSpec(population = { Population.SINGLETON, Population.PARTIAL, Population.FULL }) public void merge_sameValue(Map map, CacheContext context) { @@ -1326,7 +1327,7 @@ public void merge_sameValue(Map map, CacheContext context) { assertThat(map, hasRemovalNotifications(context, count, RemovalCause.REPLACED)); } - // FIXME: @CheckNoWriter + @CheckNoWriter @Test(dataProvider = "caches") @CacheSpec(population = { Population.SINGLETON, Population.PARTIAL, Population.FULL }) public void merge_differentValue(Map map, CacheContext context) { @@ -1466,7 +1467,7 @@ public void keySet_addNotSupported(Map map, CacheContext conte @Test(dataProvider = "caches", expectedExceptions = DeleteException.class) @CacheSpec(implementation = Implementation.Caffeine, keys = ReferenceType.STRONG, population = { Population.SINGLETON, Population.PARTIAL, Population.FULL }, - writer = Writer.EXCEPTIONAL, removalListener = Listener.REJECTING) + compute = Compute.SYNC, writer = Writer.EXCEPTIONAL, removalListener = Listener.REJECTING) public void keySet_writerFails(Map map, CacheContext context) { try { map.keySet().clear(); @@ -1542,7 +1543,7 @@ public void keyIterator_noMoreElements(Map map, CacheContext c @Test(dataProvider = "caches", expectedExceptions = DeleteException.class) @CacheSpec(implementation = Implementation.Caffeine, keys = ReferenceType.STRONG, population = { Population.SINGLETON, Population.PARTIAL, Population.FULL }, - writer = Writer.EXCEPTIONAL, removalListener = Listener.REJECTING) + compute = Compute.SYNC, writer = Writer.EXCEPTIONAL, removalListener = Listener.REJECTING) public void keyIterator_writerFails(Map map, CacheContext context) { try { Iterator i = map.keySet().iterator(); @@ -1606,7 +1607,7 @@ public void values_addNotSupported(Map map, CacheContext conte @Test(dataProvider = "caches", expectedExceptions = DeleteException.class) @CacheSpec(implementation = Implementation.Caffeine, keys = ReferenceType.STRONG, population = { Population.SINGLETON, Population.PARTIAL, Population.FULL }, - writer = Writer.EXCEPTIONAL, removalListener = Listener.REJECTING) + compute = Compute.SYNC, writer = Writer.EXCEPTIONAL, removalListener = Listener.REJECTING) public void values_writerFails(Map map, CacheContext context) { try { map.values().clear(); @@ -1684,7 +1685,7 @@ public void valueIterator_noMoreElements(Map map, CacheContext @Test(dataProvider = "caches", expectedExceptions = DeleteException.class) @CacheSpec(implementation = Implementation.Caffeine, keys = ReferenceType.STRONG, population = { Population.SINGLETON, Population.PARTIAL, Population.FULL }, - writer = Writer.EXCEPTIONAL, removalListener = Listener.REJECTING) + compute = Compute.SYNC, writer = Writer.EXCEPTIONAL, removalListener = Listener.REJECTING) public void valueIterator_writerFails(Map map, CacheContext context) { try { Iterator i = map.values().iterator(); @@ -1751,7 +1752,7 @@ public void entrySet_addIsNotSupported(Map map, CacheContext c @Test(dataProvider = "caches", expectedExceptions = DeleteException.class) @CacheSpec(implementation = Implementation.Caffeine, keys = ReferenceType.STRONG, population = { Population.SINGLETON, Population.PARTIAL, Population.FULL }, - writer = Writer.EXCEPTIONAL, removalListener = Listener.REJECTING) + compute = Compute.SYNC, writer = Writer.EXCEPTIONAL, removalListener = Listener.REJECTING) public void entrySet_writerFails(Map map, CacheContext context) { try { map.entrySet().clear(); @@ -1830,7 +1831,7 @@ public void entryIterator_noMoreElements(Map map, CacheContext @Test(dataProvider = "caches", expectedExceptions = DeleteException.class) @CacheSpec(implementation = Implementation.Caffeine, keys = ReferenceType.STRONG, population = { Population.SINGLETON, Population.PARTIAL, Population.FULL }, - writer = Writer.EXCEPTIONAL, removalListener = Listener.REJECTING) + compute = Compute.SYNC, writer = Writer.EXCEPTIONAL, removalListener = Listener.REJECTING) public void entryIterator_writerFails(Map map, CacheContext context) { try { Iterator> i = map.entrySet().iterator(); diff --git a/caffeine/src/test/java/com/github/benmanes/caffeine/cache/AsyncLoadingCacheTest.java b/caffeine/src/test/java/com/github/benmanes/caffeine/cache/AsyncLoadingCacheTest.java index a0040ad767..2d59227a77 100644 --- a/caffeine/src/test/java/com/github/benmanes/caffeine/cache/AsyncLoadingCacheTest.java +++ b/caffeine/src/test/java/com/github/benmanes/caffeine/cache/AsyncLoadingCacheTest.java @@ -16,7 +16,6 @@ package com.github.benmanes.caffeine.cache; import static com.github.benmanes.caffeine.cache.IsCacheReserializable.reserializable; -import static com.github.benmanes.caffeine.cache.testing.CacheWriterVerifier.verifyWriter; import static com.github.benmanes.caffeine.cache.testing.HasRemovalNotifications.hasRemovalNotifications; import static com.github.benmanes.caffeine.cache.testing.HasStats.hasHitCount; import static com.github.benmanes.caffeine.cache.testing.HasStats.hasLoadFailureCount; @@ -56,12 +55,10 @@ import com.github.benmanes.caffeine.cache.testing.CacheSpec.Loader; import com.github.benmanes.caffeine.cache.testing.CacheSpec.MaximumSize; import com.github.benmanes.caffeine.cache.testing.CacheSpec.Population; -import com.github.benmanes.caffeine.cache.testing.CacheSpec.ReferenceType; import com.github.benmanes.caffeine.cache.testing.CacheSpec.Writer; import com.github.benmanes.caffeine.cache.testing.CacheValidationListener; import com.github.benmanes.caffeine.cache.testing.CheckNoStats; import com.github.benmanes.caffeine.cache.testing.CheckNoWriter; -import com.github.benmanes.caffeine.cache.testing.RejectingCacheWriter.WriteException; import com.github.benmanes.caffeine.testing.Awaits; import com.google.common.collect.ImmutableList; import com.google.common.util.concurrent.MoreExecutors; @@ -640,34 +637,6 @@ public void put_nullKeyAndValue(AsyncLoadingCache cache, Cache cache.put(null, null); } - @CheckNoStats // FIXME - @Test(enabled = false, dataProvider = "caches", expectedExceptions = WriteException.class) - @CacheSpec(implementation = Implementation.Caffeine, keys = ReferenceType.STRONG, - writer = Writer.EXCEPTIONAL, removalListener = Listener.REJECTING) - public void put_insert_writerFails(AsyncLoadingCache map, CacheContext context) { - CompletableFuture value = CompletableFuture.completedFuture(context.absentValue()); - try { - map.put(context.absentKey(), value); - } finally { - assertThat(map, equalTo(context.original())); - } - } - - @CheckNoStats // FIXME - @Test(enabled = false, dataProvider = "caches", expectedExceptions = WriteException.class) - @CacheSpec(implementation = Implementation.Caffeine, keys = ReferenceType.STRONG, - population = { Population.SINGLETON, Population.PARTIAL, Population.FULL }, - writer = Writer.EXCEPTIONAL, removalListener = Listener.REJECTING) - public void put_replace_writerFails(AsyncLoadingCache map, - CacheContext context) { - CompletableFuture value = CompletableFuture.completedFuture(context.absentValue()); - try { - map.put(context.middleKey(), value); - } finally { - assertThat(map, equalTo(context.original())); - } - } - @CheckNoWriter @Test(dataProvider = "caches") @CacheSpec(removalListener = { Listener.DEFAULT, Listener.REJECTING }) @@ -711,10 +680,6 @@ public void put_insert(AsyncLoadingCache cache, CacheContext c assertThat(context, both(hasMissCount(0)).and(hasHitCount(0))); assertThat(context, both(hasLoadSuccessCount(1)).and(hasLoadFailureCount(0))); assertThat(cache.synchronous().getIfPresent(context.absentKey()), is(context.absentValue())); - - verifyWriter(context, (verifier, writer) -> { - verifier.wrote(context.absentKey(), context.absentValue()); - }); } @CheckNoWriter @@ -747,10 +712,10 @@ public void put_replace_failure_async(AsyncLoadingCache cache, assertThat(cache.synchronous().getIfPresent(key), is(context.absentValue())); } - // FIXME: do not publish replace event - @Test(enabled = false, dataProvider = "caches") + @Test(dataProvider = "caches") @CacheSpec(population = { Population.SINGLETON, Population.PARTIAL, Population.FULL }) - public void put_replace_nullValue(AsyncLoadingCache cache, CacheContext context) { + public void put_replace_nullValue( + AsyncLoadingCache cache, CacheContext context) { CompletableFuture value = CompletableFuture.completedFuture(null); for (Integer key : context.firstMiddleLastKeys()) { cache.put(key, value); @@ -759,9 +724,6 @@ public void put_replace_nullValue(AsyncLoadingCache cache, Cac int count = context.firstMiddleLastKeys().size(); assertThat(cache.synchronous().estimatedSize(), is(context.initialSize() - count)); assertThat(cache, hasRemovalNotifications(context, count, RemovalCause.EXPLICIT)); - verifyWriter(context, (verifier, writer) -> { - verifier.deleted(context.absentKey(), context.absentValue(), RemovalCause.EXPLICIT); - }); } @Test(dataProvider = "caches") @@ -776,9 +738,6 @@ public void put_replace(AsyncLoadingCache cache, CacheContext int count = context.firstMiddleLastKeys().size(); assertThat(cache, hasRemovalNotifications(context, count, RemovalCause.REPLACED)); - verifyWriter(context, (verifier, writer) -> { - verifier.wrote(context.absentKey(), context.absentValue()); - }); } /* ---------------- serialize -------------- */ diff --git a/caffeine/src/test/java/com/github/benmanes/caffeine/cache/CacheTest.java b/caffeine/src/test/java/com/github/benmanes/caffeine/cache/CacheTest.java index a1f8a33ac2..a3f1913a6c 100644 --- a/caffeine/src/test/java/com/github/benmanes/caffeine/cache/CacheTest.java +++ b/caffeine/src/test/java/com/github/benmanes/caffeine/cache/CacheTest.java @@ -45,6 +45,7 @@ import com.github.benmanes.caffeine.cache.testing.CacheContext; import com.github.benmanes.caffeine.cache.testing.CacheProvider; import com.github.benmanes.caffeine.cache.testing.CacheSpec; +import com.github.benmanes.caffeine.cache.testing.CacheSpec.Compute; import com.github.benmanes.caffeine.cache.testing.CacheSpec.Implementation; import com.github.benmanes.caffeine.cache.testing.CacheSpec.Listener; import com.github.benmanes.caffeine.cache.testing.CacheSpec.Population; @@ -313,7 +314,7 @@ public void put_nullKeyAndValue(Cache cache, CacheContext cont @CheckNoStats @Test(dataProvider = "caches", expectedExceptions = WriteException.class) @CacheSpec(implementation = Implementation.Caffeine, keys = ReferenceType.STRONG, - writer = Writer.EXCEPTIONAL, removalListener = Listener.REJECTING) + compute = Compute.SYNC, writer = Writer.EXCEPTIONAL, removalListener = Listener.REJECTING) public void put_insert_writerFails(Cache cache, CacheContext context) { try { cache.put(context.absentKey(), context.absentValue()); @@ -326,7 +327,7 @@ public void put_insert_writerFails(Cache cache, CacheContext c @Test(dataProvider = "caches", expectedExceptions = WriteException.class) @CacheSpec(implementation = Implementation.Caffeine, keys = ReferenceType.STRONG, population = { Population.SINGLETON, Population.PARTIAL, Population.FULL }, - writer = Writer.EXCEPTIONAL, removalListener = Listener.REJECTING) + compute = Compute.SYNC, writer = Writer.EXCEPTIONAL, removalListener = Listener.REJECTING) public void put_replace_writerFails(Cache cache, CacheContext context) { try { cache.put(context.middleKey(), context.absentValue()); @@ -406,7 +407,7 @@ public void putAll_null(Cache cache, CacheContext context) { @CheckNoStats @Test(dataProvider = "caches", expectedExceptions = WriteException.class) @CacheSpec(implementation = Implementation.Caffeine, keys = ReferenceType.STRONG, - writer = Writer.EXCEPTIONAL, removalListener = Listener.REJECTING) + compute = Compute.SYNC, writer = Writer.EXCEPTIONAL, removalListener = Listener.REJECTING) public void putAll_insert_writerFails(Cache cache, CacheContext context) { try { cache.putAll(context.absent()); @@ -419,7 +420,7 @@ public void putAll_insert_writerFails(Cache cache, CacheContex @Test(dataProvider = "caches", expectedExceptions = WriteException.class) @CacheSpec(implementation = Implementation.Caffeine, keys = ReferenceType.STRONG, population = { Population.SINGLETON, Population.PARTIAL, Population.FULL }, - writer = Writer.EXCEPTIONAL, removalListener = Listener.REJECTING) + compute = Compute.SYNC, writer = Writer.EXCEPTIONAL, removalListener = Listener.REJECTING) public void putAll_replace_writerFails(Cache cache, CacheContext context) { try { cache.putAll(ImmutableMap.of(context.middleKey(), context.absentValue())); @@ -463,7 +464,7 @@ public void invalidate_nullKey(Cache cache, CacheContext conte @Test(dataProvider = "caches", expectedExceptions = DeleteException.class) @CacheSpec(implementation = Implementation.Caffeine, keys = ReferenceType.STRONG, population = { Population.SINGLETON, Population.PARTIAL, Population.FULL }, - writer = Writer.EXCEPTIONAL, removalListener = Listener.REJECTING) + compute = Compute.SYNC, writer = Writer.EXCEPTIONAL, removalListener = Listener.REJECTING) public void invalidate_writerFails(Cache cache, CacheContext context) { try { cache.invalidate(context.middleKey()); @@ -531,7 +532,7 @@ public void invalidateAll_null(Cache cache, CacheContext conte @Test(dataProvider = "caches", expectedExceptions = DeleteException.class) @CacheSpec(implementation = Implementation.Caffeine, keys = ReferenceType.STRONG, population = { Population.SINGLETON, Population.PARTIAL, Population.FULL }, - writer = Writer.EXCEPTIONAL, removalListener = Listener.REJECTING) + compute = Compute.SYNC, writer = Writer.EXCEPTIONAL, removalListener = Listener.REJECTING) public void invalidateAll_partial_writerFails(Cache cache, CacheContext context) { try { cache.invalidateAll(context.firstMiddleLastKeys()); @@ -544,7 +545,7 @@ public void invalidateAll_partial_writerFails(Cache cache, Cac @Test(dataProvider = "caches", expectedExceptions = DeleteException.class) @CacheSpec(implementation = Implementation.Caffeine, keys = ReferenceType.STRONG, population = { Population.SINGLETON, Population.PARTIAL, Population.FULL }, - writer = Writer.EXCEPTIONAL, removalListener = Listener.REJECTING) + compute = Compute.SYNC, writer = Writer.EXCEPTIONAL, removalListener = Listener.REJECTING) public void invalidateAll_full_writerFails(Cache cache, CacheContext context) { try { cache.invalidateAll(); diff --git a/caffeine/src/test/java/com/github/benmanes/caffeine/cache/CaffeineTest.java b/caffeine/src/test/java/com/github/benmanes/caffeine/cache/CaffeineTest.java index 99e3d587f5..28896b47f9 100644 --- a/caffeine/src/test/java/com/github/benmanes/caffeine/cache/CaffeineTest.java +++ b/caffeine/src/test/java/com/github/benmanes/caffeine/cache/CaffeineTest.java @@ -41,7 +41,7 @@ * @author ben.manes@gmail.com (Ben Manes) */ public final class CaffeineTest { - @Mock CacheLoader single; + @Mock CacheLoader loader; @Mock CacheWriter writer; @BeforeClass @@ -52,8 +52,8 @@ public void beforeClass() { @Test public void unconfigured() { assertThat(Caffeine.newBuilder().build(), is(not(nullValue()))); - assertThat(Caffeine.newBuilder().build(single), is(not(nullValue()))); - assertThat(Caffeine.newBuilder().buildAsync(single), is(not(nullValue()))); + assertThat(Caffeine.newBuilder().build(loader), is(not(nullValue()))); + assertThat(Caffeine.newBuilder().buildAsync(loader), is(not(nullValue()))); assertThat(Caffeine.newBuilder().toString(), is(Caffeine.newBuilder().toString())); } @@ -64,8 +64,8 @@ public void configured() { .expireAfterAccess(1, TimeUnit.SECONDS).expireAfterWrite(1, TimeUnit.SECONDS) .removalListener(x -> {}).recordStats(); assertThat(configured.build(), is(not(nullValue()))); - assertThat(configured.build(single), is(not(nullValue()))); - assertThat(Caffeine.newBuilder().buildAsync(single), is(not(nullValue()))); + assertThat(configured.build(loader), is(not(nullValue()))); + assertThat(Caffeine.newBuilder().buildAsync(loader), is(not(nullValue()))); assertThat(configured.refreshAfterWrite(1, TimeUnit.SECONDS).toString(), is(not(Caffeine.newBuilder().toString()))); @@ -89,12 +89,17 @@ public void async_nullLoader() { @Test(expectedExceptions = IllegalStateException.class) public void async_weakValues() { - Caffeine.newBuilder().weakValues().buildAsync(single); + Caffeine.newBuilder().weakValues().buildAsync(loader); } @Test(expectedExceptions = IllegalStateException.class) public void async_softValues() { - Caffeine.newBuilder().softValues().buildAsync(single); + Caffeine.newBuilder().softValues().buildAsync(loader); + } + + @Test(expectedExceptions = IllegalStateException.class) + public void async_writer() { + Caffeine.newBuilder().writer(writer).buildAsync(loader); } /* ---------------- initialCapacity -------------- */ @@ -331,7 +336,7 @@ public void weakKeys_twice() { @Test(expectedExceptions = IllegalStateException.class) public void weakKeys_writer() { - Caffeine.newBuilder().weakKeys().writer(writer); + Caffeine.newBuilder().writer(writer).weakKeys(); } @Test diff --git a/caffeine/src/test/java/com/github/benmanes/caffeine/cache/EvictionTest.java b/caffeine/src/test/java/com/github/benmanes/caffeine/cache/EvictionTest.java index d608b83571..104afb8353 100644 --- a/caffeine/src/test/java/com/github/benmanes/caffeine/cache/EvictionTest.java +++ b/caffeine/src/test/java/com/github/benmanes/caffeine/cache/EvictionTest.java @@ -46,6 +46,7 @@ import com.github.benmanes.caffeine.cache.testing.CacheProvider; import com.github.benmanes.caffeine.cache.testing.CacheSpec; import com.github.benmanes.caffeine.cache.testing.CacheSpec.CacheWeigher; +import com.github.benmanes.caffeine.cache.testing.CacheSpec.Compute; import com.github.benmanes.caffeine.cache.testing.CacheSpec.Implementation; import com.github.benmanes.caffeine.cache.testing.CacheSpec.Listener; import com.github.benmanes.caffeine.cache.testing.CacheSpec.MaximumSize; @@ -269,7 +270,7 @@ public void evict_zero_async(AsyncLoadingCache> cache, @Test(dataProvider = "caches", expectedExceptions = DeleteException.class) @CacheSpec(implementation = Implementation.Caffeine, keys = ReferenceType.STRONG, population = Population.FULL, maximumSize = MaximumSize.FULL, - writer = Writer.EXCEPTIONAL, removalListener = Listener.REJECTING) + compute = Compute.SYNC, writer = Writer.EXCEPTIONAL, removalListener = Listener.REJECTING) public void evict_writerFails(Cache cache, CacheContext context) { try { cache.policy().eviction().ifPresent(policy -> policy.setMaximum(0)); diff --git a/caffeine/src/test/java/com/github/benmanes/caffeine/cache/ExpirationTest.java b/caffeine/src/test/java/com/github/benmanes/caffeine/cache/ExpirationTest.java index ccfe48505a..277a27e622 100644 --- a/caffeine/src/test/java/com/github/benmanes/caffeine/cache/ExpirationTest.java +++ b/caffeine/src/test/java/com/github/benmanes/caffeine/cache/ExpirationTest.java @@ -38,6 +38,7 @@ import com.github.benmanes.caffeine.cache.testing.CacheProvider; import com.github.benmanes.caffeine.cache.testing.CacheSpec; import com.github.benmanes.caffeine.cache.testing.CacheSpec.CacheWeigher; +import com.github.benmanes.caffeine.cache.testing.CacheSpec.Compute; import com.github.benmanes.caffeine.cache.testing.CacheSpec.Expire; import com.github.benmanes.caffeine.cache.testing.CacheSpec.Implementation; import com.github.benmanes.caffeine.cache.testing.CacheSpec.Listener; @@ -82,7 +83,7 @@ public void expire_zero(Cache cache, CacheContext context) { population = Population.FULL, writer = Writer.EXCEPTIONAL, requiresExpiration = true, expireAfterAccess = {Expire.DISABLED, Expire.ONE_MINUTE}, expireAfterWrite = {Expire.DISABLED, Expire.ONE_MINUTE}, - removalListener = Listener.REJECTING) + compute = Compute.SYNC, removalListener = Listener.REJECTING) public void getIfPresent_writerFails(Cache cache, CacheContext context) { try { context.ticker().advance(1, TimeUnit.HOURS); @@ -99,7 +100,7 @@ public void getIfPresent_writerFails(Cache cache, CacheContext population = Population.FULL, requiresExpiration = true, expireAfterAccess = {Expire.DISABLED, Expire.ONE_MINUTE}, expireAfterWrite = {Expire.DISABLED, Expire.ONE_MINUTE}, - writer = Writer.EXCEPTIONAL, removalListener = Listener.REJECTING) + compute = Compute.SYNC, writer = Writer.EXCEPTIONAL, removalListener = Listener.REJECTING) public void get_writerFails(Cache cache, CacheContext context) { try { context.ticker().advance(1, TimeUnit.HOURS); @@ -152,7 +153,7 @@ public void put_replace(Cache cache, CacheContext context) { population = Population.FULL, requiresExpiration = true, expireAfterAccess = {Expire.DISABLED, Expire.ONE_MINUTE}, expireAfterWrite = {Expire.DISABLED, Expire.ONE_MINUTE}, - writer = Writer.EXCEPTIONAL, removalListener = Listener.REJECTING) + compute = Compute.SYNC, writer = Writer.EXCEPTIONAL, removalListener = Listener.REJECTING) public void put_writerFails(Cache cache, CacheContext context) { try { context.ticker().advance(1, TimeUnit.HOURS); @@ -207,7 +208,7 @@ public void putAll_replace(Cache cache, CacheContext context) population = Population.FULL, requiresExpiration = true, expireAfterAccess = {Expire.DISABLED, Expire.ONE_MINUTE}, expireAfterWrite = {Expire.DISABLED, Expire.ONE_MINUTE}, - writer = Writer.EXCEPTIONAL, removalListener = Listener.REJECTING) + compute = Compute.SYNC, writer = Writer.EXCEPTIONAL, removalListener = Listener.REJECTING) public void putAll_writerFails(Cache cache, CacheContext context) { try { context.ticker().advance(1, TimeUnit.HOURS); @@ -237,7 +238,7 @@ public void invalidate(Cache cache, CacheContext context) { population = Population.FULL, requiresExpiration = true, expireAfterAccess = {Expire.DISABLED, Expire.ONE_MINUTE}, expireAfterWrite = {Expire.DISABLED, Expire.ONE_MINUTE}, - writer = Writer.EXCEPTIONAL, removalListener = Listener.REJECTING) + compute = Compute.SYNC, writer = Writer.EXCEPTIONAL, removalListener = Listener.REJECTING) public void invalidate_writerFails(Cache cache, CacheContext context) { try { context.ticker().advance(1, TimeUnit.HOURS); @@ -267,7 +268,7 @@ public void invalidateAll(Cache cache, CacheContext context) { population = Population.FULL, requiresExpiration = true, expireAfterAccess = {Expire.DISABLED, Expire.ONE_MINUTE}, expireAfterWrite = {Expire.DISABLED, Expire.ONE_MINUTE}, - writer = Writer.EXCEPTIONAL, removalListener = Listener.REJECTING) + compute = Compute.SYNC, writer = Writer.EXCEPTIONAL, removalListener = Listener.REJECTING) public void invalidateAll_writerFails(Cache cache, CacheContext context) { try { context.ticker().advance(1, TimeUnit.HOURS); @@ -297,7 +298,7 @@ public void invalidateAll_full(Cache cache, CacheContext conte population = Population.FULL, requiresExpiration = true, expireAfterAccess = {Expire.DISABLED, Expire.ONE_MINUTE}, expireAfterWrite = {Expire.DISABLED, Expire.ONE_MINUTE}, - writer = Writer.EXCEPTIONAL, removalListener = Listener.REJECTING) + compute = Compute.SYNC, writer = Writer.EXCEPTIONAL, removalListener = Listener.REJECTING) public void invalidateAll_full_writerFails(Cache cache, CacheContext context) { try { context.ticker().advance(1, TimeUnit.HOURS); @@ -337,7 +338,7 @@ public void cleanUp(Cache cache, CacheContext context) { population = Population.FULL, requiresExpiration = true, expireAfterAccess = {Expire.DISABLED, Expire.ONE_MINUTE}, expireAfterWrite = {Expire.DISABLED, Expire.ONE_MINUTE}, - writer = Writer.EXCEPTIONAL, removalListener = Listener.REJECTING) + compute = Compute.SYNC, writer = Writer.EXCEPTIONAL, removalListener = Listener.REJECTING) public void cleanUp_writerFails(Cache cache, CacheContext context) { try { context.ticker().advance(1, TimeUnit.HOURS); @@ -356,7 +357,7 @@ public void cleanUp_writerFails(Cache cache, CacheContext cont population = Population.FULL, requiresExpiration = true, expireAfterAccess = {Expire.DISABLED, Expire.ONE_MINUTE}, expireAfterWrite = {Expire.DISABLED, Expire.ONE_MINUTE}, - writer = Writer.EXCEPTIONAL, removalListener = Listener.REJECTING) + compute = Compute.SYNC, writer = Writer.EXCEPTIONAL, removalListener = Listener.REJECTING) public void get_writerFails(LoadingCache cache, CacheContext context) { try { context.ticker().advance(1, TimeUnit.HOURS); @@ -373,7 +374,7 @@ public void get_writerFails(LoadingCache cache, CacheContext c population = Population.FULL, requiresExpiration = true, expireAfterAccess = {Expire.DISABLED, Expire.ONE_MINUTE}, expireAfterWrite = {Expire.DISABLED, Expire.ONE_MINUTE}, - writer = Writer.EXCEPTIONAL, removalListener = Listener.REJECTING) + compute = Compute.SYNC, writer = Writer.EXCEPTIONAL, removalListener = Listener.REJECTING) public void getAll_writerFails(LoadingCache cache, CacheContext context) { try { context.ticker().advance(1, TimeUnit.HOURS); @@ -406,7 +407,7 @@ public void refresh(LoadingCache cache, CacheContext context) population = Population.FULL, requiresExpiration = true, expireAfterAccess = {Expire.DISABLED, Expire.ONE_MINUTE}, expireAfterWrite = {Expire.DISABLED, Expire.ONE_MINUTE}, - writer = Writer.EXCEPTIONAL, removalListener = Listener.REJECTING) + compute = Compute.SYNC, writer = Writer.EXCEPTIONAL, removalListener = Listener.REJECTING) public void refresh_writerFails(LoadingCache cache, CacheContext context) { context.ticker().advance(1, TimeUnit.HOURS); cache.refresh(context.firstKey()); @@ -565,7 +566,7 @@ public void clear(Map map, CacheContext context) { population = Population.FULL, requiresExpiration = true, expireAfterAccess = {Expire.DISABLED, Expire.ONE_MINUTE}, expireAfterWrite = {Expire.DISABLED, Expire.ONE_MINUTE}, - writer = Writer.EXCEPTIONAL, removalListener = Listener.REJECTING) + compute = Compute.SYNC, writer = Writer.EXCEPTIONAL, removalListener = Listener.REJECTING) public void clear_writerFails(Map map, CacheContext context) { try { context.ticker().advance(1, TimeUnit.HOURS); @@ -582,7 +583,7 @@ public void clear_writerFails(Map map, CacheContext context) { population = Population.FULL, requiresExpiration = true, expireAfterAccess = {Expire.DISABLED, Expire.ONE_MINUTE}, expireAfterWrite = {Expire.DISABLED, Expire.ONE_MINUTE}, - writer = Writer.EXCEPTIONAL, removalListener = Listener.REJECTING) + compute = Compute.SYNC, writer = Writer.EXCEPTIONAL, removalListener = Listener.REJECTING) public void putIfAbsent_writerFails(Map map, CacheContext context) { try { context.ticker().advance(1, TimeUnit.HOURS); @@ -635,7 +636,7 @@ public void put_replace(Map map, CacheContext context) { population = Population.FULL, requiresExpiration = true, expireAfterAccess = {Expire.DISABLED, Expire.ONE_MINUTE}, expireAfterWrite = {Expire.DISABLED, Expire.ONE_MINUTE}, - writer = Writer.EXCEPTIONAL, removalListener = Listener.REJECTING) + compute = Compute.SYNC, writer = Writer.EXCEPTIONAL, removalListener = Listener.REJECTING) public void put_writerFails(Map map, CacheContext context) { try { context.ticker().advance(1, TimeUnit.HOURS); @@ -732,7 +733,7 @@ public void remove(Map map, CacheContext context) { @Test(dataProvider = "caches", expectedExceptions = DeleteException.class) @CacheSpec(implementation = Implementation.Caffeine, keys = ReferenceType.STRONG, - population = Population.FULL, requiresExpiration = true, + population = Population.FULL, compute = Compute.SYNC, requiresExpiration = true, expireAfterAccess = {Expire.DISABLED, Expire.ONE_MINUTE}, expireAfterWrite = {Expire.DISABLED, Expire.ONE_MINUTE}, writer = Writer.EXCEPTIONAL, removalListener = Listener.REJECTING) @@ -766,7 +767,7 @@ public void removeConditionally(Map map, CacheContext context) population = Population.FULL, requiresExpiration = true, expireAfterAccess = {Expire.DISABLED, Expire.ONE_MINUTE}, expireAfterWrite = {Expire.DISABLED, Expire.ONE_MINUTE}, - writer = Writer.EXCEPTIONAL, removalListener = Listener.REJECTING) + compute = Compute.SYNC, writer = Writer.EXCEPTIONAL, removalListener = Listener.REJECTING) public void removeConditionally_writerFails(Map map, CacheContext context) { try { Integer key = context.firstKey(); @@ -799,7 +800,7 @@ public void computeIfAbsent(Map map, CacheContext context) { population = Population.FULL, requiresExpiration = true, expireAfterAccess = {Expire.DISABLED, Expire.ONE_MINUTE}, expireAfterWrite = {Expire.DISABLED, Expire.ONE_MINUTE}, - writer = Writer.EXCEPTIONAL, removalListener = Listener.REJECTING) + compute = Compute.SYNC, writer = Writer.EXCEPTIONAL, removalListener = Listener.REJECTING) public void computeIfAbsent_writerFails(Map map, CacheContext context) { try { Integer key = context.firstKey(); @@ -833,7 +834,7 @@ public void computeIfPresent(Map map, CacheContext context) { population = Population.FULL, requiresExpiration = true, expireAfterAccess = {Expire.DISABLED, Expire.ONE_MINUTE}, expireAfterWrite = {Expire.DISABLED, Expire.ONE_MINUTE}, - writer = Writer.EXCEPTIONAL, removalListener = Listener.REJECTING) + compute = Compute.SYNC, writer = Writer.EXCEPTIONAL, removalListener = Listener.REJECTING) public void computeIfPresent_writerFails(Map map, CacheContext context) { try { Integer key = context.firstKey(); @@ -869,7 +870,7 @@ public void compute(Map map, CacheContext context) { population = Population.FULL, requiresExpiration = true, expireAfterAccess = {Expire.DISABLED, Expire.ONE_MINUTE}, expireAfterWrite = {Expire.DISABLED, Expire.ONE_MINUTE}, - writer = Writer.EXCEPTIONAL, removalListener = Listener.REJECTING) + compute = Compute.SYNC, writer = Writer.EXCEPTIONAL, removalListener = Listener.REJECTING) public void compute_writerFails(Map map, CacheContext context) { try { Integer key = context.firstKey(); @@ -904,7 +905,7 @@ public void merge(Map map, CacheContext context) { population = Population.FULL, requiresExpiration = true, expireAfterAccess = {Expire.DISABLED, Expire.ONE_MINUTE}, expireAfterWrite = {Expire.DISABLED, Expire.ONE_MINUTE}, - writer = Writer.EXCEPTIONAL, removalListener = Listener.REJECTING) + compute = Compute.SYNC, writer = Writer.EXCEPTIONAL, removalListener = Listener.REJECTING) public void merge_writerFails(Map map, CacheContext context) { try { Integer key = context.firstKey(); diff --git a/caffeine/src/test/java/com/github/benmanes/caffeine/cache/LoadingCacheTest.java b/caffeine/src/test/java/com/github/benmanes/caffeine/cache/LoadingCacheTest.java index 3b42644f74..28881673c5 100644 --- a/caffeine/src/test/java/com/github/benmanes/caffeine/cache/LoadingCacheTest.java +++ b/caffeine/src/test/java/com/github/benmanes/caffeine/cache/LoadingCacheTest.java @@ -187,7 +187,7 @@ public void getAll_absent_failure_iterable( } } - // FIXME: @CheckNoWriter + @CheckNoWriter @Test(dataProvider = "caches") @CacheSpec(loader = { Loader.NEGATIVE, Loader.BULK_NEGATIVE }, removalListener = { Listener.DEFAULT, Listener.REJECTING }) @@ -201,7 +201,7 @@ public void getAll_absent(LoadingCache cache, CacheContext con assertThat(context, both(hasLoadSuccessCount(loads)).and(hasLoadFailureCount(0))); } - // FIXME: @CheckNoWriter + @CheckNoWriter @Test(dataProvider = "caches") @CacheSpec(loader = { Loader.NEGATIVE, Loader.BULK_NEGATIVE }, population = { Population.SINGLETON, Population.PARTIAL, Population.FULL }, @@ -239,7 +239,7 @@ public void refresh_null(LoadingCache cache, CacheContext cont cache.refresh(null); } - // FIXME: @CheckNoWriter + @CheckNoWriter @Test(dataProvider = "caches") @CacheSpec(implementation = Implementation.Caffeine, executor = CacheExecutor.DIRECT, loader = Loader.NULL, @@ -251,7 +251,7 @@ public void refresh_remove(LoadingCache cache, CacheContext co assertThat(cache, hasRemovalNotifications(context, 1, RemovalCause.EXPLICIT)); } - // FIXME: @CheckNoWriter + @CheckNoWriter @Test(dataProvider = "caches") @CacheSpec(executor = CacheExecutor.DIRECT, loader = Loader.EXCEPTIONAL, removalListener = { Listener.DEFAULT, Listener.REJECTING }, @@ -264,7 +264,7 @@ public void refresh_failure(LoadingCache cache, CacheContext c assertThat(context, both(hasLoadSuccessCount(0)).and(hasLoadFailureCount(2))); } - // FIXME: @CheckNoWriter + @CheckNoWriter @CacheSpec(loader = Loader.NULL) @Test(dataProvider = "caches") public void refresh_absent_null(LoadingCache cache, CacheContext context) { @@ -272,7 +272,7 @@ public void refresh_absent_null(LoadingCache cache, CacheConte assertThat(cache.estimatedSize(), is(context.initialSize())); } - // FIXME: @CheckNoWriter + @CheckNoWriter @Test(dataProvider = "caches") @CacheSpec(executor = CacheExecutor.DIRECT, removalListener = { Listener.DEFAULT, Listener.REJECTING }) @@ -286,7 +286,7 @@ public void refresh_absent(LoadingCache cache, CacheContext co assertThat(cache.get(context.absentKey()), is(-context.absentKey())); } - // FIXME: @CheckNoWriter + @CheckNoWriter @Test(dataProvider = "caches") @CacheSpec(executor = CacheExecutor.DIRECT, population = { Population.SINGLETON, Population.PARTIAL, Population.FULL }) @@ -306,7 +306,7 @@ public void refresh_present_sameValue( assertThat(cache, hasRemovalNotifications(context, count, RemovalCause.REPLACED)); } - // FIXME: @CheckNoWriter + @CheckNoWriter @Test(dataProvider = "caches") @CacheSpec(executor = CacheExecutor.DIRECT, loader = Loader.IDENTITY, population = { Population.SINGLETON, Population.PARTIAL, Population.FULL }) diff --git a/caffeine/src/test/java/com/github/benmanes/caffeine/cache/ReferenceTest.java b/caffeine/src/test/java/com/github/benmanes/caffeine/cache/ReferenceTest.java index ac7151d691..67c2238ad5 100644 --- a/caffeine/src/test/java/com/github/benmanes/caffeine/cache/ReferenceTest.java +++ b/caffeine/src/test/java/com/github/benmanes/caffeine/cache/ReferenceTest.java @@ -36,6 +36,7 @@ import com.github.benmanes.caffeine.cache.testing.CacheProvider; import com.github.benmanes.caffeine.cache.testing.CacheSpec; import com.github.benmanes.caffeine.cache.testing.CacheSpec.CacheWeigher; +import com.github.benmanes.caffeine.cache.testing.CacheSpec.Compute; import com.github.benmanes.caffeine.cache.testing.CacheSpec.Expire; import com.github.benmanes.caffeine.cache.testing.CacheSpec.Implementation; import com.github.benmanes.caffeine.cache.testing.CacheSpec.Listener; @@ -117,7 +118,7 @@ public void get(Cache cache, CacheContext context) { implementation = Implementation.Caffeine, expireAfterAccess = Expire.DISABLED, expireAfterWrite = Expire.DISABLED, maximumSize = MaximumSize.DISABLED, weigher = CacheWeigher.DEFAULT, population = Population.FULL, stats = Stats.ENABLED, - removalListener = Listener.CONSUMING, writer = Writer.EXCEPTIONAL) + compute = Compute.SYNC, removalListener = Listener.CONSUMING, writer = Writer.EXCEPTIONAL) public void get_writerFails(Cache cache, CacheContext context) { Integer key = context.firstKey(); try { @@ -163,7 +164,7 @@ public void put(Cache cache, CacheContext context) { implementation = Implementation.Caffeine, expireAfterAccess = Expire.DISABLED, expireAfterWrite = Expire.DISABLED, maximumSize = MaximumSize.DISABLED, weigher = CacheWeigher.DEFAULT, population = Population.FULL, stats = Stats.ENABLED, - removalListener = Listener.CONSUMING, writer = Writer.EXCEPTIONAL) + compute = Compute.SYNC, removalListener = Listener.CONSUMING, writer = Writer.EXCEPTIONAL) public void put_writerFails(Cache cache, CacheContext context) { Integer key = context.firstKey(); try { @@ -199,7 +200,7 @@ public void putAll(Cache cache, CacheContext context) { implementation = Implementation.Caffeine, expireAfterAccess = Expire.DISABLED, expireAfterWrite = Expire.DISABLED, maximumSize = MaximumSize.DISABLED, weigher = CacheWeigher.DEFAULT, population = Population.FULL, stats = Stats.ENABLED, - removalListener = Listener.CONSUMING, writer = Writer.EXCEPTIONAL) + compute = Compute.SYNC, removalListener = Listener.CONSUMING, writer = Writer.EXCEPTIONAL) public void putAll_writerFails(Cache cache, CacheContext context) { Integer key = context.firstKey(); try { @@ -234,7 +235,7 @@ public void invalidate(Cache cache, CacheContext context) { implementation = Implementation.Caffeine, expireAfterAccess = Expire.DISABLED, expireAfterWrite = Expire.DISABLED, maximumSize = MaximumSize.DISABLED, weigher = CacheWeigher.DEFAULT, population = Population.FULL, stats = Stats.ENABLED, - removalListener = Listener.CONSUMING, writer = Writer.EXCEPTIONAL) + compute = Compute.SYNC, removalListener = Listener.CONSUMING, writer = Writer.EXCEPTIONAL) public void invalidate_writerFails(Cache cache, CacheContext context) { Integer key = context.firstKey(); try { @@ -269,7 +270,7 @@ public void invalidateAll(Cache cache, CacheContext context) { implementation = Implementation.Caffeine, expireAfterAccess = Expire.DISABLED, expireAfterWrite = Expire.DISABLED, maximumSize = MaximumSize.DISABLED, weigher = CacheWeigher.DEFAULT, population = Population.FULL, stats = Stats.ENABLED, - removalListener = Listener.CONSUMING, writer = Writer.EXCEPTIONAL) + compute = Compute.SYNC, removalListener = Listener.CONSUMING, writer = Writer.EXCEPTIONAL) public void invalidateAll_writerFails(Cache cache, CacheContext context) { Set keys = context.firstMiddleLastKeys(); try { @@ -303,7 +304,7 @@ public void invalidateAll_full(Cache cache, CacheContext conte implementation = Implementation.Caffeine, expireAfterAccess = Expire.DISABLED, expireAfterWrite = Expire.DISABLED, maximumSize = MaximumSize.DISABLED, weigher = CacheWeigher.DEFAULT, population = Population.FULL, stats = Stats.ENABLED, - removalListener = Listener.CONSUMING, writer = Writer.EXCEPTIONAL) + compute = Compute.SYNC, removalListener = Listener.CONSUMING, writer = Writer.EXCEPTIONAL) public void invalidateAll_full_writerFails(Cache cache, CacheContext context) { Set keys = context.firstMiddleLastKeys(); try { @@ -348,7 +349,7 @@ public void cleanUp(Cache cache, CacheContext context) { implementation = Implementation.Caffeine, expireAfterAccess = Expire.DISABLED, expireAfterWrite = Expire.DISABLED, maximumSize = MaximumSize.DISABLED, weigher = CacheWeigher.DEFAULT, population = Population.FULL, stats = Stats.ENABLED, - removalListener = Listener.CONSUMING, writer = Writer.EXCEPTIONAL) + compute = Compute.SYNC, removalListener = Listener.CONSUMING, writer = Writer.EXCEPTIONAL) public void cleanUp_writerFails(Cache cache, CacheContext context) { Set keys = context.original().keySet(); try { @@ -385,7 +386,7 @@ public void get(LoadingCache cache, CacheContext context) { implementation = Implementation.Caffeine, expireAfterAccess = Expire.DISABLED, expireAfterWrite = Expire.DISABLED, maximumSize = MaximumSize.DISABLED, weigher = CacheWeigher.DEFAULT, population = Population.FULL, stats = Stats.ENABLED, - removalListener = Listener.CONSUMING, writer = Writer.EXCEPTIONAL) + compute = Compute.SYNC, removalListener = Listener.CONSUMING, writer = Writer.EXCEPTIONAL) public void get_writerFails(LoadingCache cache, CacheContext context) { Integer key = context.firstKey(); try { @@ -422,7 +423,7 @@ public void getAll(LoadingCache cache, CacheContext context) { expireAfterWrite = Expire.DISABLED, maximumSize = MaximumSize.DISABLED, weigher = CacheWeigher.DEFAULT, population = Population.FULL, stats = Stats.ENABLED, removalListener = Listener.CONSUMING, writer = Writer.EXCEPTIONAL, - loader = {Loader.IDENTITY, Loader.BULK_IDENTITY}) + compute = Compute.SYNC, loader = {Loader.IDENTITY, Loader.BULK_IDENTITY}) public void getAll_writerFails(LoadingCache cache, CacheContext context) { Set keys = context.firstMiddleLastKeys(); try { @@ -460,7 +461,7 @@ public void refresh(LoadingCache cache, CacheContext context) implementation = Implementation.Caffeine, expireAfterAccess = Expire.DISABLED, expireAfterWrite = Expire.DISABLED, maximumSize = MaximumSize.DISABLED, weigher = CacheWeigher.DEFAULT, population = Population.FULL, stats = Stats.ENABLED, - removalListener = Listener.CONSUMING, writer = Writer.EXCEPTIONAL) + compute = Compute.SYNC, removalListener = Listener.CONSUMING, writer = Writer.EXCEPTIONAL) public void refresh_writerFails(LoadingCache cache, CacheContext context) { Integer key = context.firstKey(); context.clear(); @@ -506,7 +507,7 @@ public void get(AsyncLoadingCache cache, CacheContext context) implementation = Implementation.Caffeine, expireAfterAccess = Expire.DISABLED, expireAfterWrite = Expire.DISABLED, maximumSize = MaximumSize.DISABLED, weigher = CacheWeigher.DEFAULT, population = Population.FULL, stats = Stats.ENABLED, - removalListener = Listener.CONSUMING, writer = Writer.EXCEPTIONAL) + compute = Compute.SYNC, removalListener = Listener.CONSUMING, writer = Writer.EXCEPTIONAL) public void get_writerFails(AsyncLoadingCache cache, CacheContext context) { Integer key = context.firstKey(); try { @@ -542,7 +543,7 @@ public void getAll(AsyncLoadingCache cache, CacheContext conte implementation = Implementation.Caffeine, expireAfterAccess = Expire.DISABLED, expireAfterWrite = Expire.DISABLED, maximumSize = MaximumSize.DISABLED, weigher = CacheWeigher.DEFAULT, population = Population.FULL, stats = Stats.ENABLED, - removalListener = Listener.CONSUMING, writer = Writer.EXCEPTIONAL) + compute = Compute.SYNC, removalListener = Listener.CONSUMING, writer = Writer.EXCEPTIONAL) public void getAll_writerFails(AsyncLoadingCache cache, CacheContext context) { Set keys = context.firstMiddleLastKeys(); try { @@ -640,7 +641,7 @@ public void clear(Map map, CacheContext context) { implementation = Implementation.Caffeine, expireAfterAccess = Expire.DISABLED, expireAfterWrite = Expire.DISABLED, maximumSize = MaximumSize.DISABLED, weigher = CacheWeigher.DEFAULT, population = Population.FULL, stats = Stats.ENABLED, - removalListener = Listener.CONSUMING, writer = Writer.EXCEPTIONAL) + compute = Compute.SYNC, removalListener = Listener.CONSUMING, writer = Writer.EXCEPTIONAL) public void clear_writerFails(Map map, CacheContext context) { Set keys = context.firstMiddleLastKeys(); try { @@ -676,7 +677,7 @@ public void putIfAbsent(Map map, CacheContext context) { implementation = Implementation.Caffeine, expireAfterAccess = Expire.DISABLED, expireAfterWrite = Expire.DISABLED, maximumSize = MaximumSize.DISABLED, weigher = CacheWeigher.DEFAULT, population = Population.FULL, stats = Stats.ENABLED, - removalListener = Listener.CONSUMING, writer = Writer.EXCEPTIONAL) + compute = Compute.SYNC, removalListener = Listener.CONSUMING, writer = Writer.EXCEPTIONAL) public void putIfAbsent_writerFails(Map map, CacheContext context) { Integer key = context.firstKey(); try { @@ -712,7 +713,7 @@ public void put(Map map, CacheContext context) { implementation = Implementation.Caffeine, expireAfterAccess = Expire.DISABLED, expireAfterWrite = Expire.DISABLED, maximumSize = MaximumSize.DISABLED, weigher = CacheWeigher.DEFAULT, population = Population.FULL, stats = Stats.ENABLED, - removalListener = Listener.CONSUMING, writer = Writer.EXCEPTIONAL) + compute = Compute.SYNC, removalListener = Listener.CONSUMING, writer = Writer.EXCEPTIONAL) public void put_writerFails(Map map, CacheContext context) { Integer key = context.firstKey(); try { @@ -775,7 +776,7 @@ public void remove(Map map, CacheContext context) { implementation = Implementation.Caffeine, expireAfterAccess = Expire.DISABLED, expireAfterWrite = Expire.DISABLED, maximumSize = MaximumSize.DISABLED, weigher = CacheWeigher.DEFAULT, population = Population.FULL, stats = Stats.ENABLED, - removalListener = Listener.CONSUMING, writer = Writer.EXCEPTIONAL) + compute = Compute.SYNC, removalListener = Listener.CONSUMING, writer = Writer.EXCEPTIONAL) public void remove_writerFails(Map map, CacheContext context) { Integer key = context.firstKey(); try { @@ -828,7 +829,7 @@ public void computeIfAbsent(Map map, CacheContext context) { implementation = Implementation.Caffeine, expireAfterAccess = Expire.DISABLED, expireAfterWrite = Expire.DISABLED, maximumSize = MaximumSize.DISABLED, weigher = CacheWeigher.DEFAULT, population = Population.FULL, stats = Stats.ENABLED, - removalListener = Listener.CONSUMING, writer = Writer.EXCEPTIONAL) + compute = Compute.SYNC, removalListener = Listener.CONSUMING, writer = Writer.EXCEPTIONAL) public void computeIfAbsent_writerFails(Map map, CacheContext context) { Integer key = context.firstKey(); try { @@ -884,7 +885,7 @@ public void compute(Map map, CacheContext context) { implementation = Implementation.Caffeine, expireAfterAccess = Expire.DISABLED, expireAfterWrite = Expire.DISABLED, maximumSize = MaximumSize.DISABLED, weigher = CacheWeigher.DEFAULT, population = Population.FULL, stats = Stats.ENABLED, - removalListener = Listener.CONSUMING, writer = Writer.EXCEPTIONAL) + compute = Compute.SYNC, removalListener = Listener.CONSUMING, writer = Writer.EXCEPTIONAL) public void compute_writerFails(Map map, CacheContext context) { Integer key = context.firstKey(); try { @@ -921,7 +922,7 @@ public void merge(Map map, CacheContext context) { implementation = Implementation.Caffeine, expireAfterAccess = Expire.DISABLED, expireAfterWrite = Expire.DISABLED, maximumSize = MaximumSize.DISABLED, weigher = CacheWeigher.DEFAULT, population = Population.FULL, stats = Stats.ENABLED, - removalListener = Listener.CONSUMING, writer = Writer.EXCEPTIONAL) + compute = Compute.SYNC, removalListener = Listener.CONSUMING, writer = Writer.EXCEPTIONAL) public void merge_writerFails(Map map, CacheContext context) { Integer key = context.firstKey(); try { diff --git a/caffeine/src/test/java/com/github/benmanes/caffeine/cache/RefreshAfterWriteTest.java b/caffeine/src/test/java/com/github/benmanes/caffeine/cache/RefreshAfterWriteTest.java index cf070e07b0..6a658a6655 100644 --- a/caffeine/src/test/java/com/github/benmanes/caffeine/cache/RefreshAfterWriteTest.java +++ b/caffeine/src/test/java/com/github/benmanes/caffeine/cache/RefreshAfterWriteTest.java @@ -42,6 +42,7 @@ import com.github.benmanes.caffeine.cache.testing.CacheSpec.Loader; import com.github.benmanes.caffeine.cache.testing.CacheSpec.Population; import com.github.benmanes.caffeine.cache.testing.CacheValidationListener; +import com.github.benmanes.caffeine.cache.testing.CheckNoWriter; import com.github.benmanes.caffeine.cache.testing.RefreshAfterWrite; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; @@ -56,7 +57,7 @@ @Test(dataProviderClass = CacheProvider.class) public final class RefreshAfterWriteTest { - // FIXME: @CheckNoWriter + @CheckNoWriter @Test(dataProvider = "caches") @CacheSpec(refreshAfterWrite = Expire.ONE_MINUTE, loader = Loader.NEGATIVE, population = { Population.SINGLETON, Population.PARTIAL, Population.FULL }) @@ -70,7 +71,7 @@ public void getIfPresent(LoadingCache cache, CacheContext cont assertThat(cache, hasRemovalNotifications(context, 1, RemovalCause.REPLACED)); } - // FIXME: @CheckNoWriter + @CheckNoWriter @Test(dataProvider = "caches") @CacheSpec(refreshAfterWrite = Expire.ONE_MINUTE, population = { Population.PARTIAL, Population.FULL }) @@ -85,7 +86,7 @@ public void getAllPresent(LoadingCache cache, CacheContext con assertThat(cache, hasRemovalNotifications(context, count, RemovalCause.REPLACED)); } - // FIXME: @CheckNoWriter + @CheckNoWriter @Test(dataProvider = "caches") @CacheSpec(refreshAfterWrite = Expire.ONE_MINUTE, population = { Population.PARTIAL, Population.FULL }) @@ -100,7 +101,7 @@ public void get_mappingFun(LoadingCache cache, CacheContext co assertThat(cache, hasRemovalNotifications(context, 1, RemovalCause.REPLACED)); } - // FIXME: @CheckNoWriter + @CheckNoWriter @Test(dataProvider = "caches") @CacheSpec(refreshAfterWrite = Expire.ONE_MINUTE, population = { Population.PARTIAL, Population.FULL }) @@ -114,7 +115,7 @@ public void get(LoadingCache cache, CacheContext context) { assertThat(cache, hasRemovalNotifications(context, 1, RemovalCause.REPLACED)); } - // FIXME: @CheckNoWriter + @CheckNoWriter @Test(dataProvider = "caches") @CacheSpec(refreshAfterWrite = Expire.ONE_MINUTE, loader = Loader.IDENTITY, population = { Population.PARTIAL, Population.FULL }) diff --git a/caffeine/src/test/java/com/github/benmanes/caffeine/cache/testing/CacheContext.java b/caffeine/src/test/java/com/github/benmanes/caffeine/cache/testing/CacheContext.java index ed931d4c06..ee743f9d5e 100644 --- a/caffeine/src/test/java/com/github/benmanes/caffeine/cache/testing/CacheContext.java +++ b/caffeine/src/test/java/com/github/benmanes/caffeine/cache/testing/CacheContext.java @@ -261,6 +261,10 @@ public boolean isWeakValues() { return valueStrength == ReferenceType.WEAK; } + public boolean isSoftValues() { + return valueStrength == ReferenceType.SOFT; + } + public boolean isLoading() { return (loader != null); } diff --git a/caffeine/src/test/java/com/github/benmanes/caffeine/cache/testing/CacheWriterVerifier.java b/caffeine/src/test/java/com/github/benmanes/caffeine/cache/testing/CacheWriterVerifier.java index ef21d2a8b5..b8a5f2c96e 100644 --- a/caffeine/src/test/java/com/github/benmanes/caffeine/cache/testing/CacheWriterVerifier.java +++ b/caffeine/src/test/java/com/github/benmanes/caffeine/cache/testing/CacheWriterVerifier.java @@ -97,7 +97,7 @@ public static void verifyWriter(CacheContext context, BiConsumer> consumer) { boolean mayVerify = (context.implementation() == Implementation.Caffeine) && context.isStrongKeys() - && !context.isAsync(); // FIXME: Support async mode + && !context.isAsync(); if (mayVerify) { consumer.accept(new CacheWriterVerifier(context), context.cacheWriter()); } diff --git a/caffeine/src/test/java/com/github/benmanes/caffeine/cache/testing/CaffeineCacheFromContext.java b/caffeine/src/test/java/com/github/benmanes/caffeine/cache/testing/CaffeineCacheFromContext.java index c1b5579694..48c6bc44e7 100644 --- a/caffeine/src/test/java/com/github/benmanes/caffeine/cache/testing/CaffeineCacheFromContext.java +++ b/caffeine/src/test/java/com/github/benmanes/caffeine/cache/testing/CaffeineCacheFromContext.java @@ -69,9 +69,9 @@ public static Cache newCaffeineCache(CacheContext context) { } else if (context.keyStrength == ReferenceType.SOFT) { throw new IllegalStateException(); } - if (context.valueStrength == ReferenceType.WEAK) { + if (context.isWeakValues()) { builder.weakValues(); - } else if (context.valueStrength == ReferenceType.SOFT) { + } else if (context.isSoftValues()) { builder.softValues(); } if (context.cacheExecutor != CacheExecutor.DEFAULT) { @@ -80,7 +80,7 @@ public static Cache newCaffeineCache(CacheContext context) { if (context.removalListenerType != Listener.DEFAULT) { builder.removalListener(context.removalListener); } - if (context.keyStrength != ReferenceType.WEAK) { + if (context.isStrongKeys() && !context.isAsync()) { builder.writer(context.cacheWriter()); } if (context.isAsync()) {