diff --git a/.circleci/config.yml b/.circleci/config.yml index d0c436c261..16df131dce 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -161,6 +161,7 @@ workflows: - caffeine:strongKeysAndSoftValuesSyncCaffeineTest - caffeine:weakKeysAndWeakValuesSyncCaffeineTest - caffeine:weakKeysAndSoftValuesSyncCaffeineTest + - caffeine:standaloneTest - caffeine:lincheckTest - caffeine:isolatedTest - caffeine:junitTest diff --git a/.github/workflows/actionlint.yml b/.github/workflows/actionlint.yml index f546111133..be70ea2295 100644 --- a/.github/workflows/actionlint.yml +++ b/.github/workflows/actionlint.yml @@ -16,7 +16,7 @@ jobs: github.com:443 - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 - name: actionlint - uses: reviewdog/action-actionlint@08ef4afa963243489a457cca426f705ce4e0d1a5 # v1.60.0 + uses: reviewdog/action-actionlint@534eb894142bcf31616e5436cbe4214641c58101 # v1.61.0 env: SHELLCHECK_OPTS: -e SC2001 -e SC2035 -e SC2046 -e SC2061 -e SC2086 -e SC2156 with: diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 3214b425df..6f5d5260a6 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -106,6 +106,7 @@ jobs: - caffeine:strongKeysAndSoftValuesSyncCaffeineTest - caffeine:weakKeysAndWeakValuesSyncCaffeineTest - caffeine:weakKeysAndSoftValuesSyncCaffeineTest + - caffeine:standaloneTest - caffeine:lincheckTest - caffeine:isolatedTest - caffeine:junitTest diff --git a/.github/workflows/spelling.yml b/.github/workflows/spelling.yml index 679c216263..48f64b293b 100644 --- a/.github/workflows/spelling.yml +++ b/.github/workflows/spelling.yml @@ -34,4 +34,4 @@ jobs: objects.githubusercontent.com:443 - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 - name: Typos - uses: crate-ci/typos@9d890159570d5018df91fedfa40b4730cd4a81b1 # v1.28.4 + uses: crate-ci/typos@c8fd3764afbf5eaf6e53d2e6571c835db2c8fa5f # v1.29.0 diff --git a/caffeine/build.gradle.kts b/caffeine/build.gradle.kts index 6bbd2bfa26..1eea41ffc5 100644 --- a/caffeine/build.gradle.kts +++ b/caffeine/build.gradle.kts @@ -161,19 +161,18 @@ tasks.named("compileJmhJava").configure { } } -tasks.test.configure { +val standaloneTest = tasks.register("standaloneTest") { + group = "Verification" + description = "Tests that are not part of an explicit suite" exclude("com/github/benmanes/caffeine/cache/**") - dependsOn(junitTest) - useTestNG { threadCount = max(6, Runtime.getRuntime().availableProcessors() - 1) - jvmArgs("-XX:+UseG1GC", "-XX:+ParallelRefProcEnabled") excludeGroups("slow", "isolated", "lincheck") parallel = "methods" } } -tasks.register("isolatedTest") { +val isolatedTest = tasks.register("isolatedTest") { group = "Verification" description = "Tests that must be run in isolation" useTestNG { @@ -182,7 +181,7 @@ tasks.register("isolatedTest") { } } -tasks.register("lincheckTest") { +val lincheckTest = tasks.register("lincheckTest") { group = "Verification" description = "Tests that assert linearizability" enabled = !isEarlyAccess() @@ -194,30 +193,39 @@ tasks.register("lincheckTest") { } } -tasks.register("fuzzTest") { +val fuzzTest = tasks.register("fuzzTest") { group = "Verification" description = "Fuzz tests" + include("com/github/benmanes/caffeine/fuzz/**") - forkEvery = 1 - failFast = true - useJUnitPlatform() - testLogging.events("started") environment("JAZZER_FUZZ", "1") - include("com/github/benmanes/caffeine/fuzz/**") + testLogging.events("started") + useJUnitPlatform() + failFast = true + forkEvery = 1 } val junitTest = tasks.register("junitTest") { group = "Verification" description = "JUnit tests" + exclude("com/github/benmanes/caffeine/fuzz/**") val jar by tasks.existing(Jar::class) dependsOn(jar) - useJUnit() - failFast = true - maxHeapSize = "2g" - exclude("com/github/benmanes/caffeine/fuzz/**") systemProperty("caffeine.osgi.jar", relativePath(jar.get().archiveFile.get().asFile.path)) + maxHeapSize = "2g" + failFast = true + useJUnit() +} + +tasks.test.configure { + exclude("com/github/benmanes/caffeine/**") + dependsOn(standaloneTest) + dependsOn(isolatedTest) + dependsOn(lincheckTest) + dependsOn(junitTest) + dependsOn(fuzzTest) } tasks.jar { diff --git a/caffeine/src/main/java/com/github/benmanes/caffeine/cache/BoundedLocalCache.java b/caffeine/src/main/java/com/github/benmanes/caffeine/cache/BoundedLocalCache.java index 97ff1a6a14..fc39d62afe 100644 --- a/caffeine/src/main/java/com/github/benmanes/caffeine/cache/BoundedLocalCache.java +++ b/caffeine/src/main/java/com/github/benmanes/caffeine/cache/BoundedLocalCache.java @@ -3338,6 +3338,16 @@ public Iterator iterator() { public Spliterator spliterator() { return new KeySpliterator<>(cache); } + + @Override + public Object[] toArray() { + return cache.collectKeys() ? super.toArray() : cache.data.keySet().toArray(); + } + + @Override + public T[] toArray(T[] array) { + return cache.collectKeys() ? super.toArray(array) : cache.data.keySet().toArray(array); + } } /** An adapter to safely externalize the key iterator. */ diff --git a/caffeine/src/main/java/com/github/benmanes/caffeine/cache/UnboundedLocalCache.java b/caffeine/src/main/java/com/github/benmanes/caffeine/cache/UnboundedLocalCache.java index da02907beb..dbde55f09e 100644 --- a/caffeine/src/main/java/com/github/benmanes/caffeine/cache/UnboundedLocalCache.java +++ b/caffeine/src/main/java/com/github/benmanes/caffeine/cache/UnboundedLocalCache.java @@ -676,6 +676,16 @@ public Iterator iterator() { public Spliterator spliterator() { return cache.data.keySet().spliterator(); } + + @Override + public Object[] toArray() { + return cache.data.keySet().toArray(); + } + + @Override + public T[] toArray(T[] array) { + return cache.data.keySet().toArray(array); + } } /** An adapter to safely externalize the key iterator. */ @@ -804,6 +814,16 @@ public Iterator iterator() { public Spliterator spliterator() { return cache.data.values().spliterator(); } + + @Override + public Object[] toArray() { + return cache.data.values().toArray(); + } + + @Override + public T[] toArray(T[] array) { + return cache.data.values().toArray(array); + } } /** An adapter to safely externalize the value iterator. */ @@ -939,6 +959,16 @@ public Iterator> iterator() { public Spliterator> spliterator() { return new EntrySpliterator<>(cache); } + + @Override + public Object[] toArray() { + return cache.data.entrySet().toArray(); + } + + @Override + public T[] toArray(T[] array) { + return cache.data.entrySet().toArray(array); + } } /** An adapter to safely externalize the entry iterator. */ diff --git a/caffeine/src/test/java/com/github/benmanes/caffeine/jsr166/package-info.java b/caffeine/src/test/java/com/github/benmanes/caffeine/jsr166/package-info.java index 2bdc3c41d3..52f39bb5be 100644 --- a/caffeine/src/test/java/com/github/benmanes/caffeine/jsr166/package-info.java +++ b/caffeine/src/test/java/com/github/benmanes/caffeine/jsr166/package-info.java @@ -1,6 +1,6 @@ @NullMarked @CheckReturnValue -package com.github.benmanes.caffeine.jsr166; +package com.github.benmanes.caffeine.openjdk.concurrent.tck; import org.jspecify.annotations.NullMarked; diff --git a/caffeine/src/test/java/com/github/benmanes/caffeine/openjdk/collection/BiggernYours.java b/caffeine/src/test/java/com/github/benmanes/caffeine/openjdk/collection/BiggernYours.java new file mode 100644 index 0000000000..1556f3e7c2 --- /dev/null +++ b/caffeine/src/test/java/com/github/benmanes/caffeine/openjdk/collection/BiggernYours.java @@ -0,0 +1,328 @@ +/* + * Copyright (c) 2006, 2014, Oracle and/or its affiliates. All rights reserved. + * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. + * + * This code is free software; you can redistribute it and/or modify it + * under the terms of the GNU General Public License version 2 only, as + * published by the Free Software Foundation. + * + * This code is distributed in the hope that it will be useful, but WITHOUT + * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or + * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License + * version 2 for more details (a copy is included in the LICENSE file that + * accompanied this code). + * + * You should have received a copy of the GNU General Public License version + * 2 along with this work; if not, write to the Free Software Foundation, + * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. + * + * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA + * or visit www.oracle.com if you need additional information or have any + * questions. + */ +package com.github.benmanes.caffeine.openjdk.collection; + +import static com.google.common.truth.Truth.assertThat; +import static java.util.Locale.US; + +import java.time.Duration; +import java.util.Arrays; +import java.util.Collection; +import java.util.Map; +import java.util.NavigableMap; +import java.util.NavigableSet; +import java.util.Objects; +import java.util.Random; +import java.util.Set; +import java.util.TreeSet; +import java.util.concurrent.ArrayBlockingQueue; +import java.util.concurrent.ConcurrentHashMap; +import java.util.concurrent.ConcurrentLinkedDeque; +import java.util.concurrent.ConcurrentLinkedQueue; +import java.util.concurrent.ConcurrentSkipListMap; +import java.util.concurrent.ConcurrentSkipListSet; +import java.util.concurrent.CopyOnWriteArrayList; +import java.util.concurrent.CopyOnWriteArraySet; +import java.util.concurrent.LinkedBlockingDeque; +import java.util.concurrent.LinkedBlockingQueue; +import java.util.concurrent.LinkedTransferQueue; +import java.util.concurrent.PriorityBlockingQueue; + +import org.testng.annotations.Test; + +import com.github.benmanes.caffeine.cache.Cache; +import com.github.benmanes.caffeine.cache.Caffeine; + +/* + * @test + * @bug 6415641 6377302 + * @summary Concurrent collections are permitted to lie about their size + * @author Martin Buchholz + */ +@SuppressWarnings({"AlmostJavadoc", "MultiVariableDeclaration", "NonAtomicVolatileUpdate", + "NonFinalStaticField", "rawtypes", "serial", "SystemOut", "unchecked", "unused", + "UnnecessaryFinal", "UnusedNestedClass", "UnusedVariable"}) +public class BiggernYours { + static final Random rnd = new Random(18675309); + + @Test + public void bounded() { + testCaffeine(Caffeine.newBuilder() + .expireAfterWrite(Duration.ofNanos(Long.MAX_VALUE)) + .maximumSize(Long.MAX_VALUE) + .build()); + } + + @Test + public void unbounded() { + testCaffeine(Caffeine.newBuilder().build()); + } + + private static void testCaffeine(Cache cache) { + testMaps( + cache.asMap(), + new ConcurrentHashMap() { + @Override + public int size() {return randomize(super.size());}}); + assertThat(failed).isEqualTo(0); + assertThat(passed).isGreaterThan(0); + } + + static void compareCollections(Collection c1, Collection c2) { + Object[] c1Array = c1.toArray(); + Object[] c2Array = c2.toArray(); + + check(c1Array.length == c2Array.length); + for (Object aC1 : c1Array) { + boolean found = false; + for (Object aC2 : c2Array) { + if (Objects.equals(aC1, aC2)) { + found = true; + break; + } + } + + if (!found) { + fail(aC1 + " not found in " + Arrays.toString(c2Array)); + } + } + } + + static void compareMaps(Map m1, Map m2) { + compareCollections(m1.keySet(), + m2.keySet()); + compareCollections(m1.values(), + m2.values()); + compareCollections(m1.entrySet(), + m2.entrySet()); + } + + static void compareNavigableMaps(NavigableMap m1, NavigableMap m2) { + compareMaps(m1, m2); + compareMaps(m1.descendingMap(), + m2.descendingMap()); + compareMaps(m1.tailMap(Integer.MIN_VALUE), + m2.tailMap(Integer.MIN_VALUE)); + compareMaps(m1.headMap(Integer.MAX_VALUE), + m2.headMap(Integer.MAX_VALUE)); + } + + static void compareNavigableSets(NavigableSet s1, NavigableSet s2) { + compareCollections(s1, s2); + compareCollections(s1.descendingSet(), + s2.descendingSet()); + compareCollections(s1.tailSet(Integer.MIN_VALUE), + s2.tailSet(Integer.MIN_VALUE)); + } + + abstract static class MapFrobber { abstract void frob(Map m); } + abstract static class SetFrobber { abstract void frob(Set s); } + abstract static class ColFrobber { abstract void frob(Collection c); } + + static ColFrobber adder(final int i) { + return new ColFrobber() {@Override + void frob(Collection c) { c.add(i); }}; + } + + static final ColFrobber[] adders = + { adder(1), adder(3), adder(2) }; + + static MapFrobber putter(final int k, final int v) { + return new MapFrobber() {@Override + void frob(Map m) { m.put(k,v); }}; + } + + static final MapFrobber[] putters = + { putter(1, -2), putter(3, -6), putter(2, -4) }; + + static void unexpected(Throwable t, Object suspect) { + System.out.println(suspect.getClass()); + unexpected(t); + } + + static void testCollections(Collection c1, Collection c2) { + try { + compareCollections(c1, c2); + for (ColFrobber adder : adders) { + for (Collection c : new Collection[]{c1, c2}) { + adder.frob(c); + } + compareCollections(c1, c2); + } + } catch (Throwable t) { unexpected(t, c1); } + } + + static void testNavigableSets(NavigableSet s1, NavigableSet s2) { + try { + compareNavigableSets(s1, s2); + for (ColFrobber adder : adders) { + for (Set s : new Set[]{s1, s2}) { + adder.frob(s); + } + compareNavigableSets(s1, s2); + } + } catch (Throwable t) { unexpected(t, s1); } + } + + static void testMaps(Map m1, Map m2) { + try { + compareMaps(m1, m2); + for (MapFrobber putter : putters) { + for (Map m : new Map[]{m1, m2}) { + putter.frob(m); + } + compareMaps(m1, m2); + } + } catch (Throwable t) { unexpected(t, m1); } + } + + static void testNavigableMaps(NavigableMap m1, NavigableMap m2) { + try { + compareNavigableMaps(m1, m2); + for (MapFrobber putter : putters) { + for (Map m : new Map[]{m1, m2}) { + putter.frob(m); + } + compareNavigableMaps(m1, m2); + } + } catch (Throwable t) { unexpected(t, m1); } + } + + static int randomize(int size) { return rnd.nextInt(size + 2); } + + @SuppressWarnings("serial") + private static void realMain(String[] args) { + testNavigableMaps( + new ConcurrentSkipListMap(), + new ConcurrentSkipListMap() { + @Override + public int size() {return randomize(super.size());}}); + + testNavigableSets( + new ConcurrentSkipListSet(), + new ConcurrentSkipListSet() { + @Override + public int size() {return randomize(super.size());}}); + + testCollections( + new CopyOnWriteArraySet(), + new CopyOnWriteArraySet() { + @Override + public int size() {return randomize(super.size());}}); + + testCollections( + new CopyOnWriteArrayList(), + new CopyOnWriteArrayList() { + @Override + public int size() {return randomize(super.size());}}); + + testCollections( + new TreeSet(), + new TreeSet() { + @Override + public int size() {return randomize(super.size());}}); + + testMaps( + new ConcurrentHashMap(), + new ConcurrentHashMap() { + @Override + public int size() {return randomize(super.size());}}); + + testCollections( + new ConcurrentLinkedDeque(), + new ConcurrentLinkedDeque() { + @Override + public int size() {return randomize(super.size());}}); + + testCollections( + new ConcurrentLinkedQueue(), + new ConcurrentLinkedQueue() { + @Override + public int size() {return randomize(super.size());}}); + + testCollections( + new LinkedTransferQueue(), + new LinkedTransferQueue() { + @Override + public int size() {return randomize(super.size());}}); + + testCollections( + new LinkedBlockingQueue(), + new LinkedBlockingQueue() { + @Override + public int size() {return randomize(super.size());}}); + + testCollections( + new LinkedBlockingDeque(), + new LinkedBlockingDeque() { + @Override + public int size() {return randomize(super.size());}}); + + testCollections( + new ArrayBlockingQueue(5), + new ArrayBlockingQueue(5) { + @Override + public int size() {return randomize(super.size());}}); + + testCollections( + new PriorityBlockingQueue(5), + new PriorityBlockingQueue(5) { + @Override + public int size() {return randomize(super.size());}}); + } + + //--------------------- Infrastructure --------------------------- + static volatile int passed = 0, failed = 0; + static void pass() {passed++;} + static void fail() {failed++; Thread.dumpStack();} + static void fail(String msg) {System.out.println(msg); fail();} + static void unexpected(Throwable t) {failed++; t.printStackTrace();} + static void check(boolean cond) {if (cond) { + pass(); + } else { + fail(); + }} + static void equal(Object x, Object y) { + if (x == null ? y == null : x.equals(y)) { + pass(); + } else { + fail(x + " not equal to " + y); + }} + static void arrayEqual(Object[] x, Object[] y) { + if (x == null ? y == null : Arrays.equals(x, y)) { + pass(); + } else { + fail(Arrays.toString(x) + " not equal to " + Arrays.toString(y)); + }} + public static void main(String[] args) { + try {realMain(args);} catch (Throwable t) {unexpected(t);} + System.out.printf(US, "%nPassed = %d, failed = %d%n%n", passed, failed); + if (failed > 0) { + throw new AssertionError("Some tests failed"); + }} + private abstract static class CheckedThread extends Thread { + abstract void realRun() throws Throwable; + @Override + public void run() { + try {realRun();} catch (Throwable t) {unexpected(t);}}} +} diff --git a/caffeine/src/test/java/com/github/benmanes/caffeine/openjdk/collection/IteratorAtEnd.java b/caffeine/src/test/java/com/github/benmanes/caffeine/openjdk/collection/IteratorAtEnd.java new file mode 100644 index 0000000000..8993d17ffb --- /dev/null +++ b/caffeine/src/test/java/com/github/benmanes/caffeine/openjdk/collection/IteratorAtEnd.java @@ -0,0 +1,213 @@ +/* + * Copyright (c) 2007, 2014, Oracle and/or its affiliates. All rights reserved. + * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. + * + * This code is free software; you can redistribute it and/or modify it + * under the terms of the GNU General Public License version 2 only, as + * published by the Free Software Foundation. + * + * This code is distributed in the hope that it will be useful, but WITHOUT + * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or + * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License + * version 2 for more details (a copy is included in the LICENSE file that + * accompanied this code). + * + * You should have received a copy of the GNU General Public License version + * 2 along with this work; if not, write to the Free Software Foundation, + * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. + * + * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA + * or visit www.oracle.com if you need additional information or have any + * questions. + */ +package com.github.benmanes.caffeine.openjdk.collection; + +import static com.google.common.truth.Truth.assertThat; + +import java.time.Duration; +import java.util.ArrayDeque; +import java.util.ArrayList; +import java.util.Collection; +import java.util.HashMap; +import java.util.Hashtable; +import java.util.IdentityHashMap; +import java.util.Iterator; +import java.util.LinkedHashMap; +import java.util.LinkedList; +import java.util.List; +import java.util.ListIterator; +import java.util.Locale; +import java.util.Map; +import java.util.NoSuchElementException; +import java.util.PriorityQueue; +import java.util.TreeMap; +import java.util.TreeSet; +import java.util.Vector; +import java.util.WeakHashMap; +import java.util.concurrent.ArrayBlockingQueue; +import java.util.concurrent.ConcurrentHashMap; +import java.util.concurrent.ConcurrentLinkedDeque; +import java.util.concurrent.ConcurrentLinkedQueue; +import java.util.concurrent.ConcurrentSkipListMap; +import java.util.concurrent.ConcurrentSkipListSet; +import java.util.concurrent.CopyOnWriteArrayList; +import java.util.concurrent.CopyOnWriteArraySet; +import java.util.concurrent.LinkedBlockingQueue; +import java.util.concurrent.LinkedTransferQueue; + +import org.testng.annotations.Test; + +import com.github.benmanes.caffeine.cache.Cache; +import com.github.benmanes.caffeine.cache.Caffeine; + +/* + * @test + * @bug 6529795 + * @summary next() does not change iterator state if throws NoSuchElementException + * @author Martin Buchholz + */ +@SuppressWarnings({"AlmostJavadoc", "ClassIsInstance", "IdentifierName", "JdkObsolete", + "MultiVariableDeclaration", "NonAtomicVolatileUpdate", "NonFinalStaticField", "rawtypes", + "SystemOut", "unchecked", "UnnecessaryFinal"}) +public class IteratorAtEnd { + private static final int SIZE = 6; + + @Test + public void bounded() { + testCaffeine(Caffeine.newBuilder() + .expireAfterWrite(Duration.ofNanos(Long.MAX_VALUE)) + .maximumSize(Long.MAX_VALUE) + .build()); + } + + @Test + public void unbounded() { + testCaffeine(Caffeine.newBuilder().build()); + } + + private static void testCaffeine(Cache cache) { + testMap(cache.asMap()); + assertThat(failed).isEqualTo(0); + assertThat(passed).isGreaterThan(0); + } + + static void realMain(String[] args) throws Throwable { + testCollection(new ArrayList()); + testCollection(new Vector()); + testCollection(new LinkedList()); + testCollection(new ArrayDeque()); + testCollection(new TreeSet()); + testCollection(new CopyOnWriteArrayList()); + testCollection(new CopyOnWriteArraySet()); + testCollection(new ConcurrentSkipListSet()); + + testCollection(new PriorityQueue()); + testCollection(new LinkedBlockingQueue()); + testCollection(new ArrayBlockingQueue(100)); + testCollection(new ConcurrentLinkedDeque()); + testCollection(new ConcurrentLinkedQueue()); + testCollection(new LinkedTransferQueue()); + + testMap(new HashMap()); + testMap(new Hashtable()); + testMap(new LinkedHashMap()); + testMap(new WeakHashMap()); + testMap(new IdentityHashMap()); + testMap(new ConcurrentHashMap()); + testMap(new ConcurrentSkipListMap()); + testMap(new TreeMap()); + } + + static void testCollection(Collection c) { + try { + for (int i = 0; i < SIZE; i++) { + c.add(i); + } + test(c); + } catch (Throwable t) { unexpected(t); } + } + + static void testMap(Map m) { + try { + for (int i = 0; i < 3*SIZE; i++) { + m.put(i, i); + } + test(m.values()); + test(m.keySet()); + test(m.entrySet()); + } catch (Throwable t) { unexpected(t); } + } + + static void test(Collection c) { + try { + final Iterator it = c.iterator(); + THROWS(NoSuchElementException.class, + () -> { while (true) { + it.next(); + } }); + try { it.remove(); } + catch (UnsupportedOperationException exc) { return; } + pass(); + } catch (Throwable t) { unexpected(t); } + + if (c instanceof List) { + final List list = (List) c; + try { + final ListIterator it = list.listIterator(0); + it.next(); + final Object x = it.previous(); + THROWS(NoSuchElementException.class, () -> it.previous()); + try { it.remove(); } + catch (UnsupportedOperationException exc) { return; } + pass(); + check(! list.get(0).equals(x)); + } catch (Throwable t) { unexpected(t); } + + try { + final ListIterator it = list.listIterator(list.size()); + it.previous(); + final Object x = it.next(); + THROWS(NoSuchElementException.class, () -> it.next()); + try { it.remove(); } + catch (UnsupportedOperationException exc) { return; } + pass(); + check(! list.get(list.size()-1).equals(x)); + } catch (Throwable t) { unexpected(t); } + } + } + + //--------------------- Infrastructure --------------------------- + static volatile int passed = 0, failed = 0; + static void pass() {passed++;} + static void fail() {failed++; Thread.dumpStack();} + static void fail(String msg) {System.out.println(msg); fail();} + static void unexpected(Throwable t) {failed++; t.printStackTrace();} + static void check(boolean cond) {if (cond) { + pass(); + } else { + fail(); + }} + static void equal(Object x, Object y) { + if (x == null ? y == null : x.equals(y)) { + pass(); + } else { + fail(x + " not equal to " + y); + }} + public static void main(String[] args) throws Throwable { + try {realMain(args);} catch (Throwable t) {unexpected(t);} + System.out.printf(Locale.US, "%nPassed = %d, failed = %d%n%n", passed, failed); + if (failed > 0) { + throw new AssertionError("Some tests failed"); + }} + interface Fun {void f() throws Throwable;} + static void THROWS(Class k, Fun... fs) { + for (Fun f : fs) { + try { f.f(); fail("Expected " + k.getName() + " not thrown"); } + catch (Throwable t) { + if (k.isAssignableFrom(t.getClass())) { + pass(); + } else { + unexpected(t); + }} + }} +} diff --git a/caffeine/src/test/java/com/github/benmanes/caffeine/openjdk/collection/MOAT.java b/caffeine/src/test/java/com/github/benmanes/caffeine/openjdk/collection/MOAT.java new file mode 100644 index 0000000000..0587e8a00c --- /dev/null +++ b/caffeine/src/test/java/com/github/benmanes/caffeine/openjdk/collection/MOAT.java @@ -0,0 +1,2055 @@ +/* + * Copyright (c) 2005, 2024, Oracle and/or its affiliates. All rights reserved. + * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. + * + * This code is free software; you can redistribute it and/or modify it + * under the terms of the GNU General Public License version 2 only, as + * published by the Free Software Foundation. + * + * This code is distributed in the hope that it will be useful, but WITHOUT + * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or + * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License + * version 2 for more details (a copy is included in the LICENSE file that + * accompanied this code). + * + * You should have received a copy of the GNU General Public License version + * 2 along with this work; if not, write to the Free Software Foundation, + * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. + * + * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA + * or visit www.oracle.com if you need additional information or have any + * questions. + */ +package com.github.benmanes.caffeine.openjdk.collection; + +import static com.google.common.truth.Truth.assertThat; +import static java.util.Collections.EMPTY_LIST; +import static java.util.Collections.EMPTY_MAP; +import static java.util.Collections.EMPTY_SET; +import static java.util.Collections.emptyList; +import static java.util.Collections.emptyMap; +import static java.util.Collections.emptySet; +import static java.util.Collections.singleton; +import static java.util.Collections.singletonList; +import static java.util.Collections.singletonMap; +import static java.util.Collections.unmodifiableList; +import static java.util.Collections.unmodifiableMap; +import static java.util.Collections.unmodifiableSet; + +import java.io.ByteArrayInputStream; +import java.io.ByteArrayOutputStream; +import java.io.IOException; +import java.io.InputStream; +import java.io.NotSerializableException; +import java.io.ObjectInputStream; +import java.io.ObjectOutputStream; +import java.io.Serializable; +import java.lang.reflect.Method; +import java.time.Duration; +import java.util.AbstractCollection; +import java.util.AbstractList; +import java.util.AbstractSet; +import java.util.ArrayDeque; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collection; +import java.util.Collections; +import java.util.Deque; +import java.util.HashMap; +import java.util.HashSet; +import java.util.Hashtable; +import java.util.IdentityHashMap; +import java.util.Iterator; +import java.util.LinkedHashMap; +import java.util.LinkedHashSet; +import java.util.LinkedList; +import java.util.List; +import java.util.ListIterator; +import java.util.Locale; +import java.util.Map; +import java.util.NavigableMap; +import java.util.NavigableSet; +import java.util.NoSuchElementException; +import java.util.Objects; +import java.util.PriorityQueue; +import java.util.Queue; +import java.util.Random; +import java.util.RandomAccess; +import java.util.Set; +import java.util.TreeMap; +import java.util.TreeSet; +import java.util.Vector; +import java.util.concurrent.ArrayBlockingQueue; +import java.util.concurrent.ConcurrentHashMap; +import java.util.concurrent.ConcurrentLinkedDeque; +import java.util.concurrent.ConcurrentLinkedQueue; +import java.util.concurrent.ConcurrentMap; +import java.util.concurrent.ConcurrentSkipListMap; +import java.util.concurrent.ConcurrentSkipListSet; +import java.util.concurrent.CopyOnWriteArrayList; +import java.util.concurrent.CopyOnWriteArraySet; +import java.util.concurrent.LinkedBlockingDeque; +import java.util.concurrent.LinkedBlockingQueue; +import java.util.concurrent.LinkedTransferQueue; +import java.util.concurrent.PriorityBlockingQueue; +import java.util.stream.Collectors; +import java.util.stream.Stream; + +import org.testng.annotations.Test; + +import com.github.benmanes.caffeine.cache.Cache; +import com.github.benmanes.caffeine.cache.Caffeine; + +/* + * @test + * @bug 6207984 6272521 6192552 6269713 6197726 6260652 5073546 4137464 + * 4155650 4216399 4294891 6282555 6318622 6355327 6383475 6420753 + * 6431845 4802633 6570566 6570575 6570631 6570924 6691185 6691215 + * 4802647 7123424 8024709 8193128 + * @summary Run many tests on many Collection and Map implementations + * @author Martin Buchholz + * @modules java.base/java.util:open + * @run main MOAT + * @key randomness + */ + +/* Mother Of All (Collection) Tests + * + * Testing of collection classes is often spotty, because many tests + * need to be performed on many implementations, but the onus on + * writing the tests falls on the engineer introducing the new + * implementation. + * + * The idea of this mega-test is that: + * + * An engineer adding a new collection implementation could simply add + * their new implementation to a list of implementations in this + * test's main method. Any general purpose Collection or + * Map class is appropriate. + * + * An engineer fixing a regression could add their regression test here and + * simultaneously test all other implementations. + */ +@SuppressWarnings({"ClassIsInstance", "FieldCanBeFinal", "InvalidParam", "JdkObsolete", + "MultiVariableDeclaration", "NonFinalStaticField", "rawtypes", "ReferenceEquality", + "AlmostJavadoc", "BadInstanceof", "BoxedPrimitiveEquality", "CollectionIsEmpty", + "CollectionToArray", "CollectorMutability", "EmptyCatch", "IdentifierName", + "IdentityConversion", "IdentityHashMapBoxing", "IterableIsEmpty", "JUnitClassModifiers", + "JUnitMethodDeclaration", "LexicographicalAnnotationAttributeListing", + "ModifyingCollectionWithItself", "NonAtomicVolatileUpdate", "NonStaticImport", "NullAway", + "ParameterMissingNullable", "resource", "ReturnValueIgnored", "SelfEquals", "SystemOut", + "unchecked", "UndefinedEquals", "UnnecessaryFinal", "unused", "UnusedMethod"}) +public class MOAT { + // Collections under test must not be initialized to contain this value, + // and maps under test must not contain this value as a key. + // It's used as a sentinel for absent-element testing. + static final int ABSENT_VALUE = 778347983; + + static final Integer[] integerArray; + static { + Integer[] ia = new Integer[20]; + // fill with 1..20 inclusive + for (int i = 0; i < ia.length; i++) { + ia[i] = i + 1; + } + integerArray = ia; + } + + @Test + public void bounded() { + testCaffeine(Caffeine.newBuilder() + .expireAfterWrite(Duration.ofNanos(Long.MAX_VALUE)) + .maximumSize(Long.MAX_VALUE) + .build()); + } + + @Test + public void unbounded() { + testCaffeine(Caffeine.newBuilder().build()); + } + + private static void testCaffeine(Cache cache) { + testMap(cache.asMap()); + assertThat(failed).isEqualTo(0); + assertThat(passed).isGreaterThan(0); + } + + public static void realMain(String[] args) { + + testCollection(new NewAbstractCollection()); + testCollection(new NewAbstractSet()); + testCollection(new LinkedHashSet()); + testCollection(new HashSet()); + testCollection(new Vector()); + testCollection(new Vector().subList(0,0)); + testCollection(new ArrayDeque()); + testCollection(new ArrayList()); + testCollection(new ArrayList().subList(0,0)); + testCollection(new LinkedList()); + testCollection(new LinkedList().subList(0,0)); + testCollection(new TreeSet()); + testCollection(Collections.checkedList(new ArrayList(), Integer.class)); + testCollection(Collections.synchronizedList(new ArrayList())); + testCollection(Collections.checkedSet(new HashSet(), Integer.class)); + testCollection(Collections.checkedSortedSet(new TreeSet(), Integer.class)); + testCollection(Collections.checkedNavigableSet(new TreeSet(), Integer.class)); + testCollection(Collections.synchronizedSet(new HashSet())); + testCollection(Collections.synchronizedSortedSet(new TreeSet())); + testCollection(Collections.synchronizedNavigableSet(new TreeSet())); + + testCollection(new CopyOnWriteArrayList()); + testCollection(new CopyOnWriteArrayList().subList(0,0)); + testCollection(new CopyOnWriteArraySet()); + testCollection(new PriorityQueue()); + testCollection(new PriorityBlockingQueue()); + testCollection(new ArrayBlockingQueue(20)); + testCollection(new LinkedBlockingQueue(20)); + testCollection(new LinkedBlockingDeque(20)); + testCollection(new ConcurrentLinkedDeque()); + testCollection(new ConcurrentLinkedQueue()); + testCollection(new LinkedTransferQueue()); + testCollection(new ConcurrentSkipListSet()); + testCollection(Arrays.asList(Integer.valueOf(42))); + testCollection(Arrays.asList(1,2,3)); + testCollection(Collections.nCopies(25,1)); + testImmutableList(Collections.nCopies(25,1)); + + testMap(new HashMap()); + testMap(new LinkedHashMap()); + + // TODO: Add reliable support for WeakHashMap. + // This test is subject to very rare failures because the GC + // may remove unreferenced-keys from the map at any time. + // testMap(new WeakHashMap()); + + testMap(new IdentityHashMap()); + testMap(new TreeMap()); + testMap(new Hashtable()); + testMap(new ConcurrentHashMap(10, 0.5f)); + testMap(new ConcurrentSkipListMap()); + testMap(Collections.checkedMap(new HashMap(), Integer.class, Integer.class)); + testMap(Collections.checkedSortedMap(new TreeMap(), Integer.class, Integer.class)); + testMap(Collections.checkedNavigableMap(new TreeMap(), Integer.class, Integer.class)); + testMap(Collections.synchronizedMap(new HashMap())); + testMap(Collections.synchronizedSortedMap(new TreeMap())); + testMap(Collections.synchronizedNavigableMap(new TreeMap())); + + // Unmodifiable wrappers + testImmutableSet(unmodifiableSet(new HashSet<>(Arrays.asList(1,2,3))), 99); + testImmutableList(unmodifiableList(Arrays.asList(1,2,3))); + testImmutableMap(unmodifiableMap(Collections.singletonMap(1,2))); +// testImmutableSeqColl(unmodifiableSequencedCollection(Arrays.asList(1,2,3)), 99); +// testImmutableSeqColl(unmodifiableSequencedSet(new LinkedHashSet<>(Arrays.asList(1,2,3))), 99); +// var lhm = new LinkedHashMap(); lhm.put(1,2); lhm.put(3, 4); +// testImmutableSeqMap(unmodifiableSequencedMap(lhm)); + testCollMutatorsAlwaysThrow(unmodifiableSet(new HashSet<>(Arrays.asList(1,2,3)))); + testCollMutatorsAlwaysThrow(unmodifiableSet(Collections.emptySet())); + testEmptyCollMutatorsAlwaysThrow(unmodifiableSet(Collections.emptySet())); + testListMutatorsAlwaysThrow(unmodifiableList(Arrays.asList(1,2,3))); + testListMutatorsAlwaysThrow(unmodifiableList(Collections.emptyList())); + testEmptyListMutatorsAlwaysThrow(unmodifiableList(Collections.emptyList())); + testMapMutatorsAlwaysThrow(unmodifiableMap(Collections.singletonMap(1,2))); + testMapMutatorsAlwaysThrow(unmodifiableMap(Collections.emptyMap())); + testEmptyMapMutatorsAlwaysThrow(unmodifiableMap(Collections.emptyMap())); + + // Empty collections + final List emptyArray = Arrays.asList(new Integer[]{}); + testCollection(emptyArray); + testEmptyList(emptyArray); + THROWS(IndexOutOfBoundsException.class, () -> emptyArray.set(0,1)); + THROWS(UnsupportedOperationException.class, () -> emptyArray.add(0,1)); + + List noOne = Collections.nCopies(0,1); + testCollection(noOne); + testEmptyList(noOne); + testImmutableList(noOne); + + Set emptySet = emptySet(); + testCollection(emptySet); + testEmptySet(emptySet); + testEmptySet(EMPTY_SET); + testEmptySet(Collections.emptySet()); + testEmptySet(Collections.emptySortedSet()); + testEmptySet(Collections.emptyNavigableSet()); + testImmutableSet(emptySet, 99); + + List emptyList = emptyList(); + testCollection(emptyList); + testEmptyList(emptyList); + testEmptyList(EMPTY_LIST); + testEmptyList(Collections.emptyList()); + testImmutableList(emptyList); + + Map emptyMap = emptyMap(); + testMap(emptyMap); + testEmptyMap(emptyMap); + testEmptyMap(EMPTY_MAP); + testEmptyMap(Collections.emptyMap()); + testEmptyMap(Collections.emptySortedMap()); + testEmptyMap(Collections.emptyNavigableMap()); + testImmutableMap(emptyMap); + testImmutableMap(Collections.emptyMap()); + testImmutableMap(Collections.emptySortedMap()); + testImmutableMap(Collections.emptyNavigableMap()); + + // Singleton collections + Set singletonSet = singleton(1); + equal(singletonSet.size(), 1); + testCollection(singletonSet); + testImmutableSet(singletonSet, 99); + + List singletonList = singletonList(1); + equal(singletonList.size(), 1); + testCollection(singletonList); + testImmutableList(singletonList); + testImmutableList(singletonList.subList(0,1)); + testImmutableList(singletonList.subList(0,1).subList(0,1)); + testEmptyList(singletonList.subList(0,0)); + testEmptyList(singletonList.subList(0,0).subList(0,0)); + + Map singletonMap = singletonMap(1,2); + equal(singletonMap.size(), 1); + testMap(singletonMap); + testImmutableMap(singletonMap); + + // Immutable List + testEmptyList(List.of()); + testEmptyList(List.of().subList(0,0)); + testListMutatorsAlwaysThrow(List.of()); + testListMutatorsAlwaysThrow(List.of().subList(0,0)); + testEmptyListMutatorsAlwaysThrow(List.of()); + testEmptyListMutatorsAlwaysThrow(List.of().subList(0,0)); + for (List list : Arrays.asList( + List.of(), + List.of(1), + List.of(1, 2), + List.of(1, 2, 3), + List.of(1, 2, 3, 4), + List.of(1, 2, 3, 4, 5), + List.of(1, 2, 3, 4, 5, 6), + List.of(1, 2, 3, 4, 5, 6, 7), + List.of(1, 2, 3, 4, 5, 6, 7, 8), + List.of(1, 2, 3, 4, 5, 6, 7, 8, 9), + List.of(1, 2, 3, 4, 5, 6, 7, 8, 9, 10), + List.of(integerArray) +// Stream.empty().toList(), +// Stream.of(1).toList(), +// Stream.of(1, 2).toList(), +// Stream.of(1, 2, 3).toList(), +// Stream.of(1, 2, 3, 4).toList(), +// Stream.of((Integer)null).toList(), +// Stream.of(1, null).toList(), +// Stream.of(1, null, 3).toList(), +// Stream.of(1, null, 3, 4).toList() + )) { + testCollection(list); + testImmutableList(list); + testListMutatorsAlwaysThrow(list); + testImmutableListMutatorsAlwaysThrow(list); + if (list.size() >= 1) { + // test subLists + List headList = list.subList(0, list.size() - 1); + List tailList = list.subList(1, list.size()); + testCollection(headList); + testCollection(tailList); + testImmutableList(headList); + testImmutableList(tailList); + testListMutatorsAlwaysThrow(headList); + testListMutatorsAlwaysThrow(tailList); + } + } + + List listCopy = List.copyOf(Arrays.asList(1, 2, 3)); + testCollection(listCopy); + testImmutableList(listCopy); + testListMutatorsAlwaysThrow(listCopy); + + List listCollected = Stream.of(1, 2, 3).collect(Collectors.toUnmodifiableList()); + equal(listCollected, List.of(1, 2, 3)); + testCollection(listCollected); + testImmutableList(listCollected); + testListMutatorsAlwaysThrow(listCollected); + + // List indexOf / lastIndexOf + + // 0 element + System.out.println("testListIndexOf size 0"); + testListIndexOf(-1, -1); + + System.out.println("testListIndexOf size 1"); + testListIndexOf(-1, -1, 0); + testListIndexOf(0, 0, 1); + + System.out.println("testListIndexOf size 2"); + testListIndexOf(-1, -1, 0, 0); + testListIndexOf(0, 0, 1, 0); + testListIndexOf(0, 1, 1, 1); + testListIndexOf(1, 1, 0, 1); + + + System.out.println("testListIndexOf size 3"); + testListIndexOf(-1, -1, 0, 0, 0); + testListIndexOf(0, 0, 1, 0, 0); + testListIndexOf(0, 1, 1, 1, 0); + testListIndexOf(1, 2, 0, 1, 1); + testListIndexOf(2, 2, 0, 0, 1); + + System.out.println("testListIndexOf size N"); + testListIndexOf(-1, -1, 0, 0, 0, 0, 0, 0, 0); + testListIndexOf(2, 6, 0, 0, 1, 0, 1, 0, 1); + testListIndexOf(4, 4, 0, 0, 0, 0, 1, 0, 0); + testListIndexOf(0, 6, 1, 1, 1, 1, 1, 1, 1); + testListIndexOf(0, 7, 1, 1, 1, 1, 1, 1, 1, 1); + testListIndexOf(0, 8, 1, 1, 1, 1, 1, 1, 1, 1, 1); + testListIndexOf(0, 9, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1); + testListIndexOf(0, 10, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1); + testListIndexOf(0, 11, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1); + testListIndexOf(0, 12, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1); + testListIndexOf(12, 12, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1); + testListIndexOf(-1, -1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0); + + // Immutable Set + testEmptySet(Set.of()); + testCollMutatorsAlwaysThrow(Set.of()); + testEmptyCollMutatorsAlwaysThrow(Set.of()); + for (Set set : Arrays.asList( + Set.of(), + Set.of(1), + Set.of(1, 2), + Set.of(1, 2, 3), + Set.of(1, 2, 3, 4), + Set.of(1, 2, 3, 4, 5), + Set.of(1, 2, 3, 4, 5, 6), + Set.of(1, 2, 3, 4, 5, 6, 7), + Set.of(1, 2, 3, 4, 5, 6, 7, 8), + Set.of(1, 2, 3, 4, 5, 6, 7, 8, 9), + Set.of(1, 2, 3, 4, 5, 6, 7, 8, 9, 10), + Set.of(integerArray))) { + testCollection(set); + testImmutableSet(set, 99); + testCollMutatorsAlwaysThrow(set); + } + + Set setCopy = Set.copyOf(Arrays.asList(1, 2, 3)); + testCollection(setCopy); + testImmutableSet(setCopy, 99); + testCollMutatorsAlwaysThrow(setCopy); + + Set setCollected = Stream.of(1, 1, 2, 3, 2, 3) + .collect(Collectors.toUnmodifiableSet()); + equal(setCollected, Set.of(1, 2, 3)); + testCollection(setCollected); + testImmutableSet(setCollected, 99); + testCollMutatorsAlwaysThrow(setCollected); + + // Immutable Map + + @SuppressWarnings("unchecked") + Map.Entry[] ea = (Map.Entry[])new Map.Entry[20]; + for (int i = 0; i < ea.length; i++) { + ea[i] = Map.entry(i+1, i+101); + } + + testEmptyMap(Map.of()); + testMapMutatorsAlwaysThrow(Map.of()); + testEmptyMapMutatorsAlwaysThrow(Map.of()); + for (Map map : Arrays.asList( + Map.of(), + Map.of(1, 101), + Map.of(1, 101, 2, 202), + Map.of(1, 101, 2, 202, 3, 303), + Map.of(1, 101, 2, 202, 3, 303, 4, 404), + Map.of(1, 101, 2, 202, 3, 303, 4, 404, 5, 505), + Map.of(1, 101, 2, 202, 3, 303, 4, 404, 5, 505, 6, 606), + Map.of(1, 101, 2, 202, 3, 303, 4, 404, 5, 505, 6, 606, 7, 707), + Map.of(1, 101, 2, 202, 3, 303, 4, 404, 5, 505, 6, 606, 7, 707, 8, 808), + Map.of(1, 101, 2, 202, 3, 303, 4, 404, 5, 505, 6, 606, 7, 707, 8, 808, 9, 909), + Map.of(1, 101, 2, 202, 3, 303, 4, 404, 5, 505, 6, 606, 7, 707, 8, 808, 9, 909, 10, 1010), + Map.ofEntries(ea))) { + testMap(map); + testImmutableMap(map); + testMapMutatorsAlwaysThrow(map); + } + + Map mapCopy = Map.copyOf(new HashMap<>(Map.of(1, 101, 2, 202, 3, 303))); + testMap(mapCopy); + testImmutableMap(mapCopy); + testMapMutatorsAlwaysThrow(mapCopy); + + Map mapCollected1 = + Stream.of(1, 2, 3) + .collect(Collectors.toUnmodifiableMap(i -> i, i -> 101 * i)); + equal(mapCollected1, Map.of(1, 101, 2, 202, 3, 303)); + testMap(mapCollected1); + testImmutableMap(mapCollected1); + testMapMutatorsAlwaysThrow(mapCollected1); + + try { + Stream.of(1, 1, 2, 3, 2, 3) + .collect(Collectors.toUnmodifiableMap(i -> i, i -> 101 * i)); + fail("duplicates should have thrown an exception"); + } catch (IllegalStateException ise) { + pass(); + } + + Map mapCollected2 = + Stream.of(1, 1, 2, 3, 2, 3) + .collect(Collectors.toUnmodifiableMap(i -> i, i -> 101 * i, Integer::sum)); + equal(mapCollected2, Map.of(1, 202, 2, 404, 3, 606)); + testMap(mapCollected2); + testImmutableMap(mapCollected2); + testMapMutatorsAlwaysThrow(mapCollected2); + } + + private static void checkContainsSelf(Collection c) { + check(c.containsAll(c)); + check(c.containsAll(Arrays.asList(c.toArray()))); + check(c.containsAll(Arrays.asList(c.toArray(new Integer[0])))); + } + + private static void checkContainsEmpty(Collection c) { + check(c.containsAll(new ArrayList())); + } + + private static void checkUnique(Set s) { + for (Integer i : s) { + int count = 0; + for (Integer j : s) { + if (Objects.equals(i,j)) { + ++count; + } + } + check(count == 1); + } + } + + private static void testEmptyCollection(Collection c) { + check(c.isEmpty()); + equal(c.size(), 0); + equal(c.toString(),"[]"); + equal(c.toArray().length, 0); + equal(c.toArray(new Object[0]).length, 0); + equal(c.toArray(Object[]::new).length, 0); + check(c.toArray(new Object[]{42})[0] == null); + + Object[] a = new Object[1]; a[0] = Boolean.TRUE; + equal(c.toArray(a), a); + equal(a[0], null); + testEmptyIterator(c.iterator()); + } + + static void testEmptyIterator(final Iterator it) { + if (rnd.nextBoolean()) { + check(! it.hasNext()); + } + + THROWS(NoSuchElementException.class, () -> it.next()); + + try { it.remove(); } + catch (IllegalStateException ignored) { pass(); } + catch (UnsupportedOperationException ignored) { pass(); } + catch (Throwable t) { unexpected(t); } + + if (rnd.nextBoolean()) { + check(! it.hasNext()); + } + } + + private static void testEmptyList(List c) { + testEmptyCollection(c); + equal(c.hashCode(), 1); + equal2(c, Collections.emptyList()); + } + + private static void testEmptySet(Set c) { + testEmptyCollection(c); + equal(c.hashCode(), 0); + equal2(c, Collections.emptySet()); + if (c instanceof NavigableSet) { + testEmptyIterator(((NavigableSet)c).descendingIterator()); + } + } + + private static void testImmutableCollection(final Collection c, T t) { + THROWS(UnsupportedOperationException.class, + () -> c.add(t), + () -> c.addAll(singleton(t))); + if (! c.isEmpty()) { + final T first = c.iterator().next(); + THROWS(UnsupportedOperationException.class, + () -> c.clear(), + () -> c.remove(first), + () -> c.removeAll(singleton(first)), + () -> c.retainAll(emptyList())); + } + } + +// private static void testImmutableSeqColl(final SequencedCollection c, T t) { +// SequencedCollection r = c.reversed(); +// testImmutableCollection(c, t); +// testImmutableCollection(r, t); +// THROWS(UnsupportedOperationException.class, +// () -> c.addFirst(t), +// () -> c.addLast(t), +// () -> r.addFirst(t), +// () -> r.addLast(t)); +// if (! c.isEmpty()) { +// THROWS(UnsupportedOperationException.class, +// () -> c.removeFirst(), +// () -> c.removeLast(), +// () -> r.removeFirst(), +// () -> r.removeLast()); +// } +// } + + private static void testImmutableSet(final Set c, T t) { + testImmutableCollection(c, t); + } + +// private static void testImmutableSeqSet(final SequencedSet c, T t) { +// testImmutableSeqColl(c, t); +// } + + private static void testImmutableList(final List c) { + testList(c); + testImmutableCollection(c, 42); +// testImmutableSeqColl(c, 42); + THROWS(UnsupportedOperationException.class, + () -> c.set(0,42), + () -> c.add(0,42), + () -> c.addAll(0,singleton(86))); + if (! c.isEmpty()) { + THROWS(UnsupportedOperationException.class, + () -> { Iterator it = c.iterator(); + it.next(); + it.remove(); }, + () -> { ListIterator it = c.listIterator(); + it.next(); + it.remove(); }); + } + } + + /** + * Test that calling a mutator always throws UOE, even if the mutator + * wouldn't actually do anything, given its arguments. + * + * @param c the collection instance to test + */ + private static void testCollMutatorsAlwaysThrow(Collection c) { + THROWS(UnsupportedOperationException.class, + () -> c.addAll(Collections.emptyList()), + () -> c.remove(ABSENT_VALUE), + () -> c.removeAll(Collections.emptyList()), + () -> c.removeIf(x -> false), + () -> c.retainAll(c)); + } + + /** + * Test that calling a mutator always throws UOE, even if the mutator + * wouldn't actually do anything on an empty collection. + * + * @param c the collection instance to test, must be empty + */ + private static void testEmptyCollMutatorsAlwaysThrow(Collection c) { + if (! c.isEmpty()) { + fail("collection is not empty"); + } + THROWS(UnsupportedOperationException.class, + () -> c.clear()); + } + + /** + * As above, for a list. + * + * @param c the list instance to test + */ + private static void testListMutatorsAlwaysThrow(List c) { + testCollMutatorsAlwaysThrow(c); + THROWS(UnsupportedOperationException.class, + () -> c.addAll(0, Collections.emptyList())); + } + + private static void testImmutableListMutatorsAlwaysThrow(List c) { +// THROWS(UnsupportedOperationException.class, +// c::removeFirst, +// c::removeLast); + } + + /** + * As above, for an empty list. + * + * @param c the list instance to test, must be empty + */ + private static void testEmptyListMutatorsAlwaysThrow(List c) { + if (! c.isEmpty()) { + fail("list is not empty"); + } + testEmptyCollMutatorsAlwaysThrow(c); + THROWS(UnsupportedOperationException.class, + () -> c.replaceAll(x -> x), + () -> c.sort(null)); + } + + /** + * As above, for a map. + * + * @param m the map instance to test + */ + private static void testMapMutatorsAlwaysThrow(Map m) { + THROWS(UnsupportedOperationException.class, + () -> m.compute(ABSENT_VALUE, (k, v) -> null), + () -> m.computeIfAbsent(ABSENT_VALUE, k -> null), + () -> m.computeIfPresent(ABSENT_VALUE, (k, v) -> null), + () -> m.merge(ABSENT_VALUE, 0, (k, v) -> null), + () -> m.putAll(Collections.emptyMap()), + () -> m.remove(ABSENT_VALUE), + () -> m.remove(ABSENT_VALUE, 0), + () -> m.replace(ABSENT_VALUE, 0), + () -> m.replace(ABSENT_VALUE, 0, 1)); + } + + /** + * As above, for an empty map. + * + * @param map the map instance to test, must be empty + */ + private static void testEmptyMapMutatorsAlwaysThrow(Map m) { + if (! m.isEmpty()) { + fail("map is not empty"); + } + THROWS(UnsupportedOperationException.class, + () -> m.clear(), + () -> m.replaceAll((k, v) -> v)); + } + + private static void clear(Collection c) { + try { c.clear(); } + catch (Throwable t) { unexpected(t); } + testEmptyCollection(c); + } + + private static void testEmptyMap(final Map m) { + check(m.isEmpty()); + equal(m.size(), 0); + equal(m.toString(),"{}"); + testEmptySet(m.keySet()); + testEmptySet(m.entrySet()); + testEmptyCollection(m.values()); + + try { check(! m.containsValue(null)); } + catch (NullPointerException ignored) { /* OK */ } + try { check(! m.containsKey(null)); } + catch (NullPointerException ignored) { /* OK */ } + check(! m.containsValue(1)); + check(! m.containsKey(1)); + } + + private static void testImmutableMapEntry(final Map.Entry me) { + Integer key = me.getKey(); + Integer val = me.getValue(); + THROWS(UnsupportedOperationException.class, + () -> me.setValue(3)); + equal(key, me.getKey()); + equal(val, me.getValue()); + } + + private static void testImmutableMap(final Map m) { + THROWS(UnsupportedOperationException.class, + () -> m.put(1,1), + () -> m.putAll(singletonMap(1,1))); + if (! m.isEmpty()) { + final Integer first = m.keySet().iterator().next(); + THROWS(UnsupportedOperationException.class, + () -> m.remove(first), + () -> m.clear()); + testImmutableMapEntry(m.entrySet().iterator().next()); + } + testImmutableSet(m.keySet(), 99); + testImmutableCollection(m.values(), 99); + testImmutableSet(m.entrySet(), Map.entry(42, 43)); + } + +// private static void testImmutableSeqMap(final SequencedMap m) { +// SequencedMap r = m.reversed(); +// testImmutableMap(m); +// testImmutableMap(r); +// THROWS(UnsupportedOperationException.class, +// () -> m.putFirst(0, 0), +// () -> m.putLast(0, 0), +// () -> r.putFirst(0, 0), +// () -> r.putLast(0, 0)); +// if (! m.isEmpty()) { +// THROWS(UnsupportedOperationException.class, +// () -> m.pollFirstEntry(), +// () -> m.pollLastEntry(), +// () -> r.pollFirstEntry(), +// () -> r.pollLastEntry()); +// testImmutableMapEntry(m.sequencedEntrySet().getFirst()); +// testImmutableMapEntry(r.sequencedEntrySet().getFirst()); +// testImmutableMapEntry(m.sequencedEntrySet().reversed().getFirst()); +// testImmutableMapEntry(r.sequencedEntrySet().reversed().getFirst()); +// } +// testImmutableSeqSet(m.sequencedKeySet(), 99); +// testImmutableSeqColl(m.sequencedValues(), 99); +// testImmutableSeqSet(m.sequencedEntrySet(), Map.entry(42, 43)); +// testImmutableSeqSet(r.sequencedKeySet(), 99); +// testImmutableSeqColl(r.sequencedValues(), 99); +// testImmutableSeqSet(r.sequencedEntrySet(), Map.entry(42, 43)); +// } + + private static void clear(Map m) { + try { m.clear(); } + catch (Throwable t) { unexpected(t); } + testEmptyMap(m); + } + + private static void oneElement(Collection c) { + clear(c); + try { + check(c.add(-42)); + equal(c.toString(), "[-42]"); + if (c instanceof Set) { + check(! c.add(-42)); + } + } catch (Throwable t) { unexpected(t); } + check(! c.isEmpty()); check(c.size() == 1); + } + + private static boolean supportsAdd(Collection c) { + try { check(c.add(ABSENT_VALUE)); } + catch (UnsupportedOperationException t) { return false; } + catch (Throwable t) { unexpected(t); } + + try { + check(c.contains(ABSENT_VALUE)); + check(c.remove(ABSENT_VALUE)); + } catch (Throwable t) { unexpected(t); } + return true; + } + + private static boolean supportsRemove(Collection c) { + try { check(! c.remove(ABSENT_VALUE)); } + catch (UnsupportedOperationException t) { return false; } + catch (Throwable t) { unexpected(t); } + return true; + } + + // 6260652: (coll) Arrays.asList(x).toArray().getClass() + // should be Object[].class + // Fixed in jdk9, but not jdk8 ... + static final boolean needToWorkAround6260652 = + Arrays.asList("").toArray().getClass() != Object[].class; + + private static void checkFunctionalInvariants(Collection c) { + try { + checkContainsSelf(c); + checkContainsEmpty(c); + check(c.size() != 0 ^ c.isEmpty()); + check(! c.contains(ABSENT_VALUE)); + + { + int size = 0; + for (Integer i : c) { + size++; + } + check(c.size() == size); + } + + if (c instanceof Set) { + checkUnique((Set)c); + } + + check(c.toArray().length == c.size()); + check(c.toArray().getClass() == Object[].class + || + (needToWorkAround6260652 && + c.getClass().getName().equals("java.util.Arrays$ArrayList"))); + for (int size : new int[]{0,1,c.size(), c.size()+1}) { + Integer[] a = c.toArray(new Integer[size]); + check((size > c.size()) || a.length == c.size()); + int i = 0; for (Integer j : c) { + check(a[i++] == j); + } + check((size <= c.size()) || (a[c.size()] == null)); + check(a.getClass() == Integer[].class); + } + + { + Integer[] a = c.toArray(Integer[]::new); + equal(c.size(), a.length); + check(a.getClass() == Integer[].class); + check(Arrays.equals(c.toArray(new Integer[0]), a)); + } + + check(c.equals(c)); + if (c instanceof Serializable) { + //System.out.printf("Serializing %s%n", c.getClass().getName()); + try { + Object clone = serialClone(c); + equal(c instanceof Serializable, + clone instanceof Serializable); + equal(c instanceof RandomAccess, + clone instanceof RandomAccess); + if ((c instanceof List) || (c instanceof Set)) { + equal(c, clone); + } else { + equal(new HashSet(c), + new HashSet(serialClone(c))); + } + } catch (Error xxx) { + if (! (xxx.getCause() instanceof NotSerializableException)) { + throw xxx; + } + } + } + } + catch (Throwable t) { unexpected(t); } + } + + //---------------------------------------------------------------- + // If add(null) succeeds, contains(null) & remove(null) should succeed + //---------------------------------------------------------------- + private static void testNullElement(Collection c) { + + try { + check(c.add(null)); + if (c.size() == 1) { + equal(c.toString(), "[null]"); + } + try { + checkFunctionalInvariants(c); + check(c.contains(null)); + check(c.remove(null)); + } + catch (Throwable t) { unexpected(t); } + } + catch (NullPointerException e) { /* OK */ } + catch (Throwable t) { unexpected(t); } + } + + //---------------------------------------------------------------- + // If add("x") succeeds, contains("x") & remove("x") should succeed + //---------------------------------------------------------------- + @SuppressWarnings("unchecked") + private static void testStringElement(Collection c) { + Collection x = c; // Make type-unsafe + try { + check(x.add("x")); + try { + check(x.contains("x")); + check(x.remove("x")); + } catch (Throwable t) { unexpected(t); } + } + catch (ClassCastException e) { /* OK */ } + catch (Throwable t) { unexpected(t); } + } + + private static void testConcurrentCollection(Collection c) { + try { + c.add(1); + Iterator it = c.iterator(); + check(it.hasNext()); + clear(c); + check(it.next() instanceof Integer); // No CME + check(c.isEmpty()); + } + catch (Throwable t) { unexpected(t); } + } + + private static void testQueue(Queue q) { + q.clear(); + for (int i = 0; i < 5; i++) { + testQueueAddRemove(q, null); + testQueueAddRemove(q, 537); + q.add(i); + } + equal(q.size(), 5); + checkFunctionalInvariants(q); + q.poll(); + equal(q.size(), 4); + checkFunctionalInvariants(q); + if ((q instanceof LinkedBlockingQueue) || + (q instanceof LinkedBlockingDeque) || + (q instanceof ConcurrentLinkedDeque) || + (q instanceof ConcurrentLinkedQueue)) { + testQueueIteratorRemove(q); + } + } + + private static void testQueueAddRemove(final Queue q, + final Integer e) { + final List originalContents = new ArrayList<>(q); + final boolean isEmpty = q.isEmpty(); + final boolean isList = (q instanceof List); + final List asList = isList ? (List) q : null; + check(!q.contains(e)); + try { + q.add(e); + } catch (NullPointerException npe) { + check(e == null); + return; // Null elements not supported + } + check(q.contains(e)); + check(q.remove(e)); + check(!q.contains(e)); + equal(new ArrayList(q), originalContents); + + if (q instanceof Deque) { + final Deque deq = (Deque) q; + final List singleton = Collections.singletonList(e); + + // insert, query, remove element at head + if (isEmpty) { + THROWS(NoSuchElementException.class, + () -> deq.getFirst(), + () -> deq.element(), + () -> deq.iterator().next()); + check(deq.peekFirst() == null); + check(deq.peek() == null); + } else { + check(deq.getFirst() != e); + check(deq.element() != e); + check(deq.iterator().next() != e); + check(deq.peekFirst() != e); + check(deq.peek() != e); + } + check(!deq.contains(e)); + check(!deq.removeFirstOccurrence(e)); + check(!deq.removeLastOccurrence(e)); + if (isList) { + check(asList.indexOf(e) == -1); + check(asList.lastIndexOf(e) == -1); + } + switch (rnd.nextInt(isList ? 4 : 3)) { + case 0: deq.addFirst(e); break; + case 1: check(deq.offerFirst(e)); break; + case 2: deq.push(e); break; + case 3: asList.add(0, e); break; + default: throw new AssertionError(); + } + check(deq.peekFirst() == e); + check(deq.getFirst() == e); + check(deq.element() == e); + check(deq.peek() == e); + check(deq.iterator().next() == e); + check(deq.contains(e)); + if (isList) { + check(asList.get(0) == e); + check(asList.indexOf(e) == 0); + check(asList.lastIndexOf(e) == 0); + check(asList.subList(0, 1).equals(singleton)); + } + switch (rnd.nextInt(isList ? 11 : 9)) { + case 0: check(deq.pollFirst() == e); break; + case 1: check(deq.removeFirst() == e); break; + case 2: check(deq.remove() == e); break; + case 3: check(deq.pop() == e); break; + case 4: check(deq.removeFirstOccurrence(e)); break; + case 5: check(deq.removeLastOccurrence(e)); break; + case 6: check(deq.remove(e)); break; + case 7: check(deq.removeAll(singleton)); break; + case 8: Iterator it = deq.iterator(); it.next(); it.remove(); break; + case 9: asList.remove(0); break; + case 10: asList.subList(0, 1).clear(); break; + default: throw new AssertionError(); + } + if (isEmpty) { + THROWS(NoSuchElementException.class, + () -> deq.getFirst(), + () -> deq.element(), + () -> deq.iterator().next()); + check(deq.peekFirst() == null); + check(deq.peek() == null); + } else { + check(deq.getFirst() != e); + check(deq.element() != e); + check(deq.iterator().next() != e); + check(deq.peekFirst() != e); + check(deq.peek() != e); + } + check(!deq.contains(e)); + check(!deq.removeFirstOccurrence(e)); + check(!deq.removeLastOccurrence(e)); + if (isList) { + check(isEmpty || asList.get(0) != e); + check(asList.indexOf(e) == -1); + check(asList.lastIndexOf(e) == -1); + } + equal(new ArrayList(deq), originalContents); + + // insert, query, remove element at tail + if (isEmpty) { + check(deq.peekLast() == null); + THROWS(NoSuchElementException.class, () -> deq.getLast()); + } else { + check(deq.peekLast() != e); + check(deq.getLast() != e); + } + switch (rnd.nextInt(isList ? 6 : 4)) { + case 0: deq.addLast(e); break; + case 1: check(deq.offerLast(e)); break; + case 2: check(deq.add(e)); break; + case 3: deq.addAll(singleton); break; + case 4: asList.addAll(deq.size(), singleton); break; + case 5: asList.add(deq.size(), e); break; + default: throw new AssertionError(); + } + check(deq.peekLast() == e); + check(deq.getLast() == e); + check(deq.contains(e)); + if (isList) { + ListIterator it = asList.listIterator(asList.size()); + check(it.previous() == e); + check(asList.get(asList.size() - 1) == e); + check(asList.indexOf(e) == asList.size() - 1); + check(asList.lastIndexOf(e) == asList.size() - 1); + int size = asList.size(); + check(asList.subList(size - 1, size).equals(singleton)); + } + switch (rnd.nextInt(isList ? 8 : 6)) { + case 0: check(deq.pollLast() == e); break; + case 1: check(deq.removeLast() == e); break; + case 2: check(deq.removeFirstOccurrence(e)); break; + case 3: check(deq.removeLastOccurrence(e)); break; + case 4: check(deq.remove(e)); break; + case 5: check(deq.removeAll(singleton)); break; + case 6: asList.remove(asList.size() - 1); break; + case 7: + ListIterator it = asList.listIterator(asList.size()); + it.previous(); + it.remove(); + break; + default: throw new AssertionError(); + } + if (isEmpty) { + check(deq.peekLast() == null); + THROWS(NoSuchElementException.class, () -> deq.getLast()); + } else { + check(deq.peekLast() != e); + check(deq.getLast() != e); + } + check(!deq.contains(e)); + equal(new ArrayList(deq), originalContents); + + // Test operations on empty deque + switch (rnd.nextInt(isList ? 4 : 2)) { + case 0: deq.clear(); break; + case 1: + Iterator it = deq.iterator(); + while (it.hasNext()) { + it.next(); + it.remove(); + } + break; + case 2: asList.subList(0, asList.size()).clear(); break; + case 3: + ListIterator lit = asList.listIterator(asList.size()); + while (lit.hasPrevious()) { + lit.previous(); + lit.remove(); + } + break; + default: throw new AssertionError(); + } + testEmptyCollection(deq); + check(!deq.iterator().hasNext()); + if (isList) { + check(!asList.listIterator().hasPrevious()); + THROWS(NoSuchElementException.class, + () -> asList.listIterator().previous()); + } + THROWS(NoSuchElementException.class, + () -> deq.iterator().next(), + () -> deq.element(), + () -> deq.getFirst(), + () -> deq.getLast(), + () -> deq.pop(), + () -> deq.remove(), + () -> deq.removeFirst(), + () -> deq.removeLast()); + + check(deq.poll() == null); + check(deq.pollFirst() == null); + check(deq.pollLast() == null); + check(deq.peek() == null); + check(deq.peekFirst() == null); + check(deq.peekLast() == null); + check(!deq.removeFirstOccurrence(e)); + check(!deq.removeLastOccurrence(e)); + + check(deq.addAll(originalContents) == !isEmpty); + equal(new ArrayList(deq), originalContents); + check(!deq.addAll(Collections.emptyList())); + equal(new ArrayList(deq), originalContents); + } + } + + private static void testQueueIteratorRemove(Queue q) { + System.err.printf(Locale.US, "testQueueIteratorRemove %s%n", + q.getClass().getSimpleName()); + q.clear(); + for (int i = 0; i < 5; i++) { + q.add(i); + } + Iterator it = q.iterator(); + check(it.hasNext()); + for (int i = 3; i >= 0; i--) { + q.remove(i); + } + equal(it.next(), 0); + equal(it.next(), 4); + + q.clear(); + for (int i = 0; i < 5; i++) { + q.add(i); + } + it = q.iterator(); + equal(it.next(), 0); + check(it.hasNext()); + for (int i = 1; i < 4; i++) { + q.remove(i); + } + equal(it.next(), 1); + equal(it.next(), 4); + } + + // for any array of integer values, check that the result of lastIndexOf(1) + // and indexOf(1) match assumptions for all types of List we can + // construct + private static void testListIndexOf(final int index, + final int lastIndex, + final Integer ... values) { + if (values.length == 0) { + checkListIndexOf(emptyList(), index, lastIndex); + } else if (values.length == 1) { + checkListIndexOf(singletonList(values[0]), index, lastIndex); + checkListIndexOf(Collections.nCopies(25, values[0]), index, lastIndex == 0 ? 24 : -1); + } + List l = List.of(values); + checkListIndexOf(l, index, lastIndex); + checkListIndexOf(Arrays.asList(values), index, lastIndex); + checkListIndexOf(new ArrayList(l), index, lastIndex); + checkListIndexOf(new LinkedList(l), index, lastIndex); + checkListIndexOf(new Vector(l), index, lastIndex); + checkListIndexOf(new CopyOnWriteArrayList(l), index, lastIndex); + } + + private static void checkListIndexOf(final List list, + final int index, + final int lastIndex) { + String msg = list.getClass().toString(); + equal(list.indexOf(1), index, msg); + equal(list.lastIndexOf(1), lastIndex, msg); + equal(list.subList(0, list.size()).indexOf(1), index, msg); + equal(list.subList(0, list.size()).lastIndexOf(1), lastIndex, msg); + } + + private static void testList(final List l) { + //---------------------------------------------------------------- + // 4802633: (coll) AbstractList.addAll(-1,emptyCollection) + // doesn't throw IndexOutOfBoundsException + //---------------------------------------------------------------- + try { + l.addAll(-1, Collections.emptyList()); + fail("Expected IndexOutOfBoundsException not thrown"); + } + catch (UnsupportedOperationException ignored) {/* OK */} + catch (IndexOutOfBoundsException ignored) {/* OK */} + catch (Throwable t) { unexpected(t); } + +// equal(l instanceof Serializable, +// l.subList(0,0) instanceof Serializable); + if (l.subList(0,0) instanceof Serializable) { + check(l instanceof Serializable); + } + + equal(l instanceof RandomAccess, + l.subList(0,0) instanceof RandomAccess); + + l.iterator(); + l.listIterator(); + l.listIterator(0); + l.listIterator(l.size()); + THROWS(IndexOutOfBoundsException.class, + () -> l.listIterator(-1), + () -> l.listIterator(l.size() + 1)); + + if (l instanceof AbstractList) { + try { + int size = l.size(); + AbstractList abList = (AbstractList) l; + Method m = AbstractList.class.getDeclaredMethod("removeRange", new Class[] { int.class, int.class }); + m.setAccessible(true); + m.invoke(abList, new Object[] { 0, 0 }); + m.invoke(abList, new Object[] { size, size }); + equal(size, l.size()); + } + catch (UnsupportedOperationException ignored) {/* OK */} + catch (Throwable t) { unexpected(t); } + } + + int hashCode = 1; + for (Integer i : l) { + hashCode = 31 * hashCode + (i == null ? 0 : i.hashCode()); + } + check(l.hashCode() == hashCode); + + var t = new ArrayList<>(l); + check(t.equals(l)); + check(l.equals(t)); + } + + private static void testCollection(Collection c) { + try { testCollection1(c); } + catch (Throwable t) { unexpected(t); } + } + + private static void testCollection1(Collection c) { + + System.out.println("\n==> " + c.getClass().getName()); + + checkFunctionalInvariants(c); + + if (! supportsAdd(c)) + { + return; + //System.out.println("add() supported"); + } + + if (c instanceof NavigableSet) { + System.out.println("NavigableSet tests..."); + + NavigableSet ns = (NavigableSet)c; + testNavigableSet(ns); + testNavigableSet(ns.headSet(6, false)); + testNavigableSet(ns.headSet(5, true)); + testNavigableSet(ns.tailSet(0, false)); + testNavigableSet(ns.tailSet(1, true)); + testNavigableSet(ns.subSet(0, false, 5, true)); + testNavigableSet(ns.subSet(1, true, 6, false)); + } + + if (c instanceof Queue) { + testQueue((Queue)c); + } + + if (c instanceof List) { + testList((List)c); + } + + if (c instanceof Set) { + int hashCode = 0; + for (Integer i : c) { + hashCode = hashCode + (i == null ? 0 : i.hashCode()); + } + check(c.hashCode() == hashCode); + } + + check(supportsRemove(c)); + + try { + oneElement(c); + checkFunctionalInvariants(c); + } + catch (Throwable t) { unexpected(t); } + + clear(c); testNullElement(c); + oneElement(c); testNullElement(c); + + clear(c); testStringElement(c); + oneElement(c); testStringElement(c); + + if (c.getClass().getName().matches(".*concurrent.*")) { + testConcurrentCollection(c); + } + + //---------------------------------------------------------------- + // The "all" operations should throw NPE when passed null + //---------------------------------------------------------------- + { + clear(c); + try { + c.removeAll(null); + fail("Expected NullPointerException"); + } + catch (NullPointerException e) { pass(); } + catch (Throwable t) { unexpected(t); } + + oneElement(c); + try { + c.removeAll(null); + fail("Expected NullPointerException"); + } + catch (NullPointerException e) { pass(); } + catch (Throwable t) { unexpected(t); } + + clear(c); + try { + c.retainAll(null); + fail("Expected NullPointerException"); + } + catch (NullPointerException e) { pass(); } + catch (Throwable t) { unexpected(t); } + + oneElement(c); + try { + c.retainAll(null); + fail("Expected NullPointerException"); + } + catch (NullPointerException e) { pass(); } + catch (Throwable t) { unexpected(t); } + + oneElement(c); + try { + c.addAll(null); + fail("Expected NullPointerException"); + } + catch (NullPointerException e) { pass(); } + catch (Throwable t) { unexpected(t); } + + oneElement(c); + try { + c.containsAll(null); + fail("Expected NullPointerException"); + } + catch (NullPointerException e) { pass(); } + catch (Throwable t) { unexpected(t); } + } + } + + //---------------------------------------------------------------- + // Map + //---------------------------------------------------------------- + private static void checkFunctionalInvariants(Map m) { + check(m.keySet().size() == m.entrySet().size()); + check(m.keySet().size() == m.size()); + checkFunctionalInvariants(m.keySet()); + checkFunctionalInvariants(m.values()); + check(m.size() != 0 ^ m.isEmpty()); + check(! m.containsKey(ABSENT_VALUE)); + + if (m instanceof Serializable) { + //System.out.printf("Serializing %s%n", m.getClass().getName()); + try { + Object clone = serialClone(m); + equal(m instanceof Serializable, + clone instanceof Serializable); + equal(m, clone); + } catch (Error xxx) { + if (! (xxx.getCause() instanceof NotSerializableException)) { + throw xxx; + } + } + } + } + + private static void testMap(Map m) { + System.out.println("\n==> " + m.getClass().getName()); + + int hashCode = 0; + for (var e : m.entrySet()) { + int entryHash = (e.getKey() == null ? 0 : e.getKey().hashCode()) ^ + (e.getValue() == null ? 0 : e.getValue().hashCode()); + check(e.hashCode() == entryHash); + hashCode += entryHash; + } + check(m.hashCode() == hashCode); + + if (m instanceof ConcurrentMap) { + testConcurrentMap((ConcurrentMap) m); + } + + if (m instanceof NavigableMap) { + System.out.println("NavigableMap tests..."); + + NavigableMap nm = + (NavigableMap) m; + testNavigableMapRemovers(nm); + testNavigableMap(nm); + testNavigableMap(nm.headMap(6, false)); + testNavigableMap(nm.headMap(5, true)); + testNavigableMap(nm.tailMap(0, false)); + testNavigableMap(nm.tailMap(1, true)); + testNavigableMap(nm.subMap(1, true, 6, false)); + testNavigableMap(nm.subMap(0, false, 5, true)); + } + + checkFunctionalInvariants(m); + + if (supportsClear(m)) { + try { clear(m); } + catch (Throwable t) { unexpected(t); } + } + + if (supportsPut(m)) { + try { + check(m.put(3333, 77777) == null); + check(m.put(9134, 74982) == null); + check(m.get(9134) == 74982); + check(m.put(9134, 1382) == 74982); + check(m.get(9134) == 1382); + check(m.size() == 2); + checkFunctionalInvariants(m); + checkNPEConsistency(m); + } + catch (Throwable t) { unexpected(t); } + } + } + + private static boolean supportsPut(Map m) { + // We're asking for .equals(...) semantics + if (m instanceof IdentityHashMap) { + return false; + } + + try { check(m.put(ABSENT_VALUE,12735) == null); } + catch (UnsupportedOperationException t) { return false; } + catch (Throwable t) { unexpected(t); } + + try { + check(m.containsKey(ABSENT_VALUE)); + check(m.remove(ABSENT_VALUE) != null); + } catch (Throwable t) { unexpected(t); } + return true; + } + + private static boolean supportsClear(Map m) { + try { m.clear(); } + catch (UnsupportedOperationException t) { return false; } + catch (Throwable t) { unexpected(t); } + return true; + } + + //---------------------------------------------------------------- + // ConcurrentMap + //---------------------------------------------------------------- + private static void testConcurrentMap(ConcurrentMap m) { + System.out.println("ConcurrentMap tests..."); + + try { + clear(m); + + check(m.putIfAbsent(18357,7346) == null); + check(m.containsKey(18357)); + check(m.putIfAbsent(18357,8263) == 7346); + try { m.putIfAbsent(18357,null); fail("NPE"); } + catch (NullPointerException t) { } + check(m.containsKey(18357)); + + check(! m.replace(18357,8888,7777)); + check(m.containsKey(18357)); + try { m.replace(18357,null,7777); fail("NPE"); } + catch (NullPointerException t) { } + check(m.containsKey(18357)); + check(m.get(18357) == 7346); + check(m.replace(18357,7346,5555)); + check(m.replace(18357,5555,7346)); + check(m.get(18357) == 7346); + + check(m.replace(92347,7834) == null); + try { m.replace(18357,null); fail("NPE"); } + catch (NullPointerException t) { } + check(m.replace(18357,7346) == 7346); + check(m.replace(18357,5555) == 7346); + check(m.get(18357) == 5555); + check(m.replace(18357,7346) == 5555); + check(m.get(18357) == 7346); + + check(! m.remove(18357,9999)); + check(m.get(18357) == 7346); + check(m.containsKey(18357)); + check(! m.remove(18357,null)); // 6272521 + check(m.get(18357) == 7346); + check(m.remove(18357,7346)); + check(m.get(18357) == null); + check(! m.containsKey(18357)); + check(m.isEmpty()); + + m.putIfAbsent(1,2); + check(m.size() == 1); + check(! m.remove(1,null)); + check(! m.remove(1,null)); + check(! m.remove(1,1)); + check(m.remove(1,2)); + check(m.isEmpty()); + + testEmptyMap(m); + } + catch (Throwable t) { unexpected(t); } + } + + private static void throwsConsistently(Class k, + Iterable fs) { + List> threw = new ArrayList<>(); + for (Fun f : fs) { + try { f.f(); threw.add(null); } + catch (Throwable t) { + check(k.isAssignableFrom(t.getClass())); + threw.add(t.getClass()); + } + } + if (new HashSet(threw).size() != 1) { + fail(threw.toString()); + } + } + + private static void checkNPEConsistency(final Map m) { + m.clear(); + final ConcurrentMap cm = (m instanceof ConcurrentMap) + ? (ConcurrentMap) m + : null; + List fs = new ArrayList<>(); + fs.add(() -> check(! m.containsKey(null))); + fs.add(() -> equal(m.remove(null), null)); + fs.add(() -> equal(m.get(null), null)); + if (cm != null) { + fs.add(() -> check(! cm.remove(null,null))); + } + throwsConsistently(NullPointerException.class, fs); + + fs.clear(); + final Map sm = singletonMap(null,1); + fs.add(() -> { equal(m.put(null,1), null); m.clear();}); + fs.add(() -> { m.putAll(sm); m.clear();}); + if (cm != null) { + fs.add(() -> check(! cm.remove(null,null))); + fs.add(() -> equal(cm.putIfAbsent(null,1), 1)); + fs.add(() -> equal(cm.replace(null,1), null)); + fs.add(() -> equal(cm.replace(null,1, 1), 1)); + } + throwsConsistently(NullPointerException.class, fs); + } + + //---------------------------------------------------------------- + // NavigableMap + //---------------------------------------------------------------- + private static void + checkNavigableMapKeys(NavigableMap m, + Integer i, + Integer lower, + Integer floor, + Integer ceiling, + Integer higher) { + equal(m.lowerKey(i), lower); + equal(m.floorKey(i), floor); + equal(m.ceilingKey(i), ceiling); + equal(m.higherKey(i), higher); + } + + private static void + checkNavigableSetKeys(NavigableSet m, + Integer i, + Integer lower, + Integer floor, + Integer ceiling, + Integer higher) { + equal(m.lower(i), lower); + equal(m.floor(i), floor); + equal(m.ceiling(i), ceiling); + equal(m.higher(i), higher); + } + + static final Random rnd = new Random(); + static void equalNext(final Iterator it, Object expected) { + if (rnd.nextBoolean()) { + check(it.hasNext()); + } + equal(it.next(), expected); + } + + static void equalMaps(Map m1, Map m2) { + equal(m1, m2); + equal(m2, m1); + equal(m1.size(), m2.size()); + equal(m1.isEmpty(), m2.isEmpty()); + equal(m1.toString(), m2.toString()); + check(Arrays.equals(m1.entrySet().toArray(), m2.entrySet().toArray())); + } + + @SuppressWarnings({"unchecked", "rawtypes"}) + static void testNavigableMapRemovers(NavigableMap m) + { + final Map emptyMap = new HashMap(); + + final Map singletonMap = new HashMap(); + singletonMap.put(1, 2); + + abstract class NavigableMapView { + abstract NavigableMap view(NavigableMap m); + } + + NavigableMapView[] views = { + new NavigableMapView() { @Override + NavigableMap view(NavigableMap m) { + return m; }}, + new NavigableMapView() { @Override + NavigableMap view(NavigableMap m) { + return m.headMap(99, true); }}, + new NavigableMapView() { @Override + NavigableMap view(NavigableMap m) { + return m.tailMap(-99, false); }}, + new NavigableMapView() { @Override + NavigableMap view(NavigableMap m) { + return m.subMap(-99, true, 99, false); }}, + }; + + abstract class Remover { + abstract void remove(NavigableMap m, Object k, Object v); + } + + Remover[] removers = { + new Remover() { @Override + void remove(NavigableMap m, Object k, Object v) { + equal(m.remove(k), v); }}, + + new Remover() { @Override + void remove(NavigableMap m, Object k, Object v) { + equal(m.descendingMap().remove(k), v); }}, + new Remover() { @Override + void remove(NavigableMap m, Object k, Object v) { + equal(m.descendingMap().headMap(-86, false).remove(k), v); }}, + new Remover() { @Override + void remove(NavigableMap m, Object k, Object v) { + equal(m.descendingMap().tailMap(86, true).remove(k), v); }}, + + new Remover() { @Override + void remove(NavigableMap m, Object k, Object v) { + equal(m.headMap(86, true).remove(k), v); }}, + new Remover() { @Override + void remove(NavigableMap m, Object k, Object v) { + equal(m.tailMap(-86, true).remove(k), v); }}, + new Remover() { @Override + void remove(NavigableMap m, Object k, Object v) { + equal(m.subMap(-86, false, 86, true).remove(k), v); }}, + + new Remover() { @Override + void remove(NavigableMap m, Object k, Object v) { + check(m.keySet().remove(k)); }}, + new Remover() { @Override + void remove(NavigableMap m, Object k, Object v) { + check(m.navigableKeySet().remove(k)); }}, + + new Remover() { @Override + void remove(NavigableMap m, Object k, Object v) { + check(m.navigableKeySet().headSet(86, true).remove(k)); }}, + new Remover() { @Override + void remove(NavigableMap m, Object k, Object v) { + check(m.navigableKeySet().tailSet(-86, false).remove(k)); }}, + new Remover() { @Override + void remove(NavigableMap m, Object k, Object v) { + check(m.navigableKeySet().subSet(-86, true, 86, false) + .remove(k)); }}, + + new Remover() { @Override + void remove(NavigableMap m, Object k, Object v) { + check(m.descendingKeySet().headSet(-86, false).remove(k)); }}, + new Remover() { @Override + void remove(NavigableMap m, Object k, Object v) { + check(m.descendingKeySet().tailSet(86, true).remove(k)); }}, + new Remover() { @Override + void remove(NavigableMap m, Object k, Object v) { + check(m.descendingKeySet().subSet(86, true, -86, false) + .remove(k)); }}, + }; + + for (NavigableMapView view : views) { + for (Remover remover : removers) { + try { + m.clear(); + equalMaps(m, emptyMap); + equal(m.put(1, 2), null); + equalMaps(m, singletonMap); + NavigableMap v = view.view(m); + remover.remove(v, 1, 2); + equalMaps(m, emptyMap); + } catch (Throwable t) { unexpected(t); } + } + } + } + + private static void testNavigableMap(NavigableMap m) + { + clear(m); + checkNavigableMapKeys(m, 1, null, null, null, null); + + equal(m.put(1, 2), null); + equal(m.put(3, 4), null); + equal(m.put(5, 9), null); + + equal(m.put(1, 2), 2); + equal(m.put(3, 4), 4); + equal(m.put(5, 6), 9); + + checkNavigableMapKeys(m, 0, null, null, 1, 1); + checkNavigableMapKeys(m, 1, null, 1, 1, 3); + checkNavigableMapKeys(m, 2, 1, 1, 3, 3); + checkNavigableMapKeys(m, 3, 1, 3, 3, 5); + checkNavigableMapKeys(m, 5, 3, 5, 5, null); + checkNavigableMapKeys(m, 6, 5, 5, null, null); + + for (final Iterator it : + (Iterator[]) + new Iterator[] { + m.descendingKeySet().iterator(), + m.navigableKeySet().descendingIterator()}) { + equalNext(it, 5); + equalNext(it, 3); + equalNext(it, 1); + check(! it.hasNext()); + THROWS(NoSuchElementException.class, () -> it.next()); + } + + { + final Iterator> it + = m.descendingMap().entrySet().iterator(); + check(it.hasNext()); equal(it.next().getKey(), 5); + check(it.hasNext()); equal(it.next().getKey(), 3); + check(it.hasNext()); equal(it.next().getKey(), 1); + check(! it.hasNext()); + THROWS(NoSuchElementException.class, () -> it.next()); + } + + prepMapForDescItrTests(m); + checkDescItrRmFirst(m.keySet(), m.navigableKeySet().descendingIterator()); + prepMapForDescItrTests(m); + checkDescItrRmMid(m.keySet(), m.navigableKeySet().descendingIterator()); + prepMapForDescItrTests(m); + checkDescItrRmLast(m.keySet(), m.navigableKeySet().descendingIterator()); + + prepMapForDescItrTests(m); + checkDescItrRmFirst(m.keySet(), m.descendingMap().keySet().iterator()); + prepMapForDescItrTests(m); + checkDescItrRmMid(m.keySet(), m.descendingMap().keySet().iterator()); + prepMapForDescItrTests(m); + checkDescItrRmLast(m.keySet(), m.descendingMap().keySet().iterator()); + + prepMapForDescItrTests(m); + checkDescItrRmFirst(m.keySet(), m.descendingKeySet().iterator()); + prepMapForDescItrTests(m); + checkDescItrRmMid(m.keySet(), m.descendingKeySet().iterator()); + prepMapForDescItrTests(m); + checkDescItrRmLast(m.keySet(), m.descendingKeySet().iterator()); + + prepMapForDescItrTests(m); + checkDescItrRmFirst(m.values(), m.descendingMap().values().iterator()); + prepMapForDescItrTests(m); + checkDescItrRmMid(m.values(), m.descendingMap().values().iterator()); + prepMapForDescItrTests(m); + checkDescItrRmLast(m.values(), m.descendingMap().values().iterator()); + + prepMapForDescItrTests(m); + checkDescItrRmFirst((Collection)m.entrySet(), + m.descendingMap().entrySet().iterator()); + prepMapForDescItrTests(m); + checkDescItrRmMid((Collection)m.entrySet(), + m.descendingMap().entrySet().iterator()); + prepMapForDescItrTests(m); + checkDescItrRmLast((Collection)m.entrySet(), + m.descendingMap().entrySet().iterator()); + } + + private static void testNavigableSet(NavigableSet s) { + clear(s); + checkNavigableSetKeys(s, 1, null, null, null, null); + + check(s.add(1)); + check(s.add(3)); + check(s.add(5)); + + check(! s.add(1)); + check(! s.add(3)); + check(! s.add(5)); + + checkNavigableSetKeys(s, 0, null, null, 1, 1); + checkNavigableSetKeys(s, 1, null, 1, 1, 3); + checkNavigableSetKeys(s, 2, 1, 1, 3, 3); + checkNavigableSetKeys(s, 3, 1, 3, 3, 5); + checkNavigableSetKeys(s, 5, 3, 5, 5, null); + checkNavigableSetKeys(s, 6, 5, 5, null, null); + + for (final Iterator it : + (Iterator[]) + new Iterator[] { + s.descendingIterator(), + s.descendingSet().iterator()}) { + equalNext(it, 5); + equalNext(it, 3); + equalNext(it, 1); + check(! it.hasNext()); + THROWS(NoSuchElementException.class, () -> it.next()); + } + + prepSetForDescItrTests(s); + checkDescItrRmFirst(s, s.descendingIterator()); + prepSetForDescItrTests(s); + checkDescItrRmMid(s, s.descendingIterator()); + prepSetForDescItrTests(s); + checkDescItrRmLast(s, s.descendingIterator()); + + prepSetForDescItrTests(s); + checkDescItrRmFirst(s, s.descendingSet().iterator()); + prepSetForDescItrTests(s); + checkDescItrRmMid(s, s.descendingSet().iterator()); + prepSetForDescItrTests(s); + checkDescItrRmLast(s, s.descendingSet().iterator()); + } + + private static void prepSetForDescItrTests(Set s) { + clear(s); + check(s.add(1)); + check(s.add(3)); + check(s.add(5)); + } + + private static void prepMapForDescItrTests(Map m) { + clear(m); + equal(m.put(1, 2), null); + equal(m.put(3, 4), null); + equal(m.put(5, 9), null); + } + + //-------------------------------------------------------------------- + // Check behavior of descending iterator when first element is removed + //-------------------------------------------------------------------- + private static void checkDescItrRmFirst(Collection ascColl, + Iterator descItr) { + T[] expected = (T[]) ascColl.toArray(); + int idx = expected.length -1; + + equalNext(descItr, expected[idx--]); + descItr.remove(); + while (idx >= 0 && descItr.hasNext()) { + equalNext(descItr, expected[idx--]); + } + equal(descItr.hasNext(), false); + equal(idx, -1); + } + + //----------------------------------------------------------------------- + // Check behavior of descending iterator when a middle element is removed + //----------------------------------------------------------------------- + private static void checkDescItrRmMid(Collection ascColl, + Iterator descItr) { + T[] expected = (T[]) ascColl.toArray(); + int idx = expected.length -1; + + while (idx >= expected.length / 2) { + equalNext(descItr, expected[idx--]); + } + descItr.remove(); + while (idx >= 0 && descItr.hasNext()) { + equalNext(descItr, expected[idx--]); + } + equal(descItr.hasNext(), false); + equal(idx, -1); + } + + //----------------------------------------------------------------------- + // Check behavior of descending iterator when the last element is removed + //----------------------------------------------------------------------- + private static void checkDescItrRmLast(Collection ascColl, + Iterator descItr) { + T[] expected = (T[]) ascColl.toArray(); + int idx = expected.length -1; + + while (idx >= 0 && descItr.hasNext()) { + equalNext(descItr, expected[idx--]); + } + equal(idx, -1); + equal(descItr.hasNext(), false); + descItr.remove(); + equal(ascColl.contains(expected[0]), false); + } + + //--------------------- Infrastructure --------------------------- + static volatile int passed = 0, failed = 0; + static void pass() { passed++; } + static void fail() { failed++; Thread.dumpStack(); } + static void fail(String msg) { System.out.println(msg); fail(); } + static void unexpected(Throwable t) { failed++; t.printStackTrace(); } + static void check(boolean cond) { if (cond) { + pass(); + } else { + fail(); + } } + static void equal(Object x, Object y) { + if (x == null ? y == null : x.equals(y)) { + pass(); + } else {System.out.println(x + " not equal to " + y); fail();}} + static void equal(Object x, Object y, String msg) { + if (x == null ? y == null : x.equals(y)) { + pass(); + } else {System.out.println(x + " not equal to " + y + " : " + msg); fail();}} + static void equal2(Object x, Object y) {equal(x, y); equal(y, x);} + public static void main(String[] args) throws Throwable { + try { realMain(args); } catch (Throwable t) { unexpected(t); } + + System.out.printf(Locale.US, "%nPassed = %d, failed = %d%n%n", passed, failed); + if (failed > 0) { + throw new Exception("Some tests failed"); + } + } + interface Fun {void f() throws Throwable;} + private static void THROWS(Class k, Fun... fs) { + for (Fun f : fs) { + try { f.f(); fail("Expected " + k.getName() + " not thrown"); } + catch (Throwable t) { + if (k.isAssignableFrom(t.getClass())) { + pass(); + } else { + unexpected(t); + }} + }} + static byte[] serializedForm(Object obj) { + try { + ByteArrayOutputStream baos = new ByteArrayOutputStream(); + new ObjectOutputStream(baos).writeObject(obj); + return baos.toByteArray(); + } catch (IOException e) { throw new Error(e); }} + static Object readObject(byte[] bytes) + throws IOException, ClassNotFoundException { + InputStream is = new ByteArrayInputStream(bytes); + return new ObjectInputStream(is).readObject();} + @SuppressWarnings("unchecked") + static T serialClone(T obj) { + try { return (T) readObject(serializedForm(obj)); } + catch (Exception e) { throw new Error(e); }} + private static class NewAbstractCollection extends AbstractCollection { + ArrayList list = new ArrayList<>(); + @Override + public boolean remove(Object obj) { + return list.remove(obj); + } + @Override + public boolean add(E e) { + return list.add(e); + } + @Override + public Iterator iterator() { + return list.iterator(); + } + @Override + public int size() { + return list.size(); + } + } + private static class NewAbstractSet extends AbstractSet { + HashSet set = new HashSet<>(); + @Override + public boolean remove(Object obj) { + return set.remove(obj); + } + @Override + public boolean add(E e) { + return set.add(e); + } + @Override + public Iterator iterator() { + return set.iterator(); + } + @Override + public int size() { + return set.size(); + } + } +} diff --git a/caffeine/src/test/java/com/github/benmanes/caffeine/openjdk/collection/package-info.java b/caffeine/src/test/java/com/github/benmanes/caffeine/openjdk/collection/package-info.java new file mode 100644 index 0000000000..cea1e8dfc4 --- /dev/null +++ b/caffeine/src/test/java/com/github/benmanes/caffeine/openjdk/collection/package-info.java @@ -0,0 +1,7 @@ +@NullMarked +@CheckReturnValue +package com.github.benmanes.caffeine.openjdk.collection; + +import org.jspecify.annotations.NullMarked; + +import com.google.errorprone.annotations.CheckReturnValue; diff --git a/caffeine/src/test/java/com/github/benmanes/caffeine/openjdk/concurrent/concurrenthashmap/ConcurrentAssociateTest.java b/caffeine/src/test/java/com/github/benmanes/caffeine/openjdk/concurrent/concurrenthashmap/ConcurrentAssociateTest.java new file mode 100644 index 0000000000..b78ba17e01 --- /dev/null +++ b/caffeine/src/test/java/com/github/benmanes/caffeine/openjdk/concurrent/concurrenthashmap/ConcurrentAssociateTest.java @@ -0,0 +1,218 @@ +/* + * Copyright (c) 2013, 2016, Oracle and/or its affiliates. All rights reserved. + * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. + * + * This code is free software; you can redistribute it and/or modify it + * under the terms of the GNU General Public License version 2 only, as + * published by the Free Software Foundation. + * + * This code is distributed in the hope that it will be useful, but WITHOUT + * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or + * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License + * version 2 for more details (a copy is included in the LICENSE file that + * accompanied this code). + * + * You should have received a copy of the GNU General Public License version + * 2 along with this work; if not, write to the Free Software Foundation, + * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. + * + * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA + * or visit www.oracle.com if you need additional information or have any + * questions. + */ +package com.github.benmanes.caffeine.openjdk.concurrent.concurrenthashmap; + +import java.lang.management.ManagementFactory; +import java.lang.management.ThreadInfo; +import java.lang.management.ThreadMXBean; +import java.time.Duration; +import java.util.HashMap; +import java.util.Map; +import java.util.concurrent.CompletableFuture; +import java.util.concurrent.ConcurrentMap; +import java.util.concurrent.CountDownLatch; +import java.util.concurrent.ThreadLocalRandom; +import java.util.concurrent.TimeUnit; +import java.util.concurrent.TimeoutException; +import java.util.function.BiConsumer; +import java.util.function.Supplier; +import java.util.stream.IntStream; +import java.util.stream.Stream; + +import org.testng.annotations.Test; + +import com.github.benmanes.caffeine.cache.Caffeine; + +/** + * @test + * @bug 8028564 + * @run testng/timeout=1200 ConcurrentAssociateTest + * @summary Test that association operations, such as put and compute, + * place entries in the map + * @modules java.management + */ +@Test +@SuppressWarnings({"ClassNamedLikeTypeParameter", "EmptyCatch", "InterruptedExceptionSwallowed", + "InvalidBlockTag", "rawtypes", "StreamOfArray", "SystemOut", "UnnecessaryFinal", + "YodaCondition"}) +public class ConcurrentAssociateTest { + + /** Maximum time (in seconds) to wait for a test method to complete. */ + private static final int TIMEOUT = Integer.getInteger("timeout", 200); + + /** The number of entries for each thread to place in a map. */ + private static final int N = Integer.getInteger("n", 128); + + /** The number of iterations of the test. */ + private static final int I = Integer.getInteger("i", 64); + + /** Objects to be placed in the concurrent map. */ + static class X { + // Limit the hash code to trigger collisions + final int hc = ThreadLocalRandom.current().nextInt(1, 9); + + @Override + public int hashCode() { return hc; } + } + + public ConcurrentMap bounded() { + return Caffeine.newBuilder() + .expireAfterWrite(Duration.ofNanos(Long.MAX_VALUE)) + .maximumSize(Long.MAX_VALUE) + .build().asMap(); + } + + public ConcurrentMap unbounded() { + return Caffeine.newBuilder().build().asMap(); + } + + @Test + public void testPut() throws Throwable { + test("CHM.put", bounded(), (m, o) -> m.put(o, o)); + test("CHM.put", unbounded(), (m, o) -> m.put(o, o)); + } + + @Test + public void testCompute() throws Throwable { + test("CHM.compute", bounded(), (m, o) -> m.compute(o, (k, v) -> o)); + test("CHM.compute", unbounded(), (m, o) -> m.compute(o, (k, v) -> o)); + } + + @Test + public void testComputeIfAbsent() throws Throwable { + test("CHM.computeIfAbsent", bounded(), (m, o) -> m.computeIfAbsent(o, (k) -> o)); + test("CHM.computeIfAbsent", unbounded(), (m, o) -> m.computeIfAbsent(o, (k) -> o)); + } + + @Test + public void testMerge() throws Throwable { + test("CHM.merge", bounded(), (m, o) -> m.merge(o, o, (v1, v2) -> v1)); + test("CHM.merge", unbounded(), (m, o) -> m.merge(o, o, (v1, v2) -> v1)); + } + + @Test + public void testPutAll() throws Throwable { + test("CHM.putAll", bounded(), (m, o) -> { + Map hm = new HashMap<>(); + hm.put(o, o); + m.putAll(hm); + }); + test("CHM.putAll", unbounded(), (m, o) -> { + Map hm = new HashMap<>(); + hm.put(o, o); + m.putAll(hm); + }); + } + + private static void test(String desc, ConcurrentMap m, + BiConsumer, Object> associator) throws Throwable { + for (int i = 0; i < I; i++) { + testOnce(desc, m, associator); + } + } + + @SuppressWarnings("serial") + static class AssociationFailure extends RuntimeException { + AssociationFailure(String message) { + super(message); + } + } + + private static void testOnce(String desc, ConcurrentMap m, + BiConsumer, Object> associator) throws Throwable { + CountDownLatch s = new CountDownLatch(1); + + Supplier sr = () -> () -> { + try { + if (!s.await(TIMEOUT, TimeUnit.SECONDS)) { + dumpTestThreads(); + throw new AssertionError("timed out"); + } + } + catch (InterruptedException e) { + } + + for (int i = 0; i < N; i++) { + Object o = new X(); + associator.accept(m, o); + if (!m.containsKey(o)) { + throw new AssociationFailure(desc + " failed: entry does not exist"); + } + } + }; + + // Bound concurrency to avoid degenerate performance + int ps = Math.min(Runtime.getRuntime().availableProcessors(), 8); + Stream runners = IntStream.range(0, ps) + .mapToObj(i -> sr.get()) + .map(CompletableFuture::runAsync); + + CompletableFuture all = CompletableFuture.allOf( + runners.toArray(CompletableFuture[]::new)); + + // Trigger the runners to start associating + s.countDown(); + + try { + all.get(TIMEOUT, TimeUnit.SECONDS); + } catch (TimeoutException e) { + dumpTestThreads(); + throw e; + } catch (Throwable e) { + dumpTestThreads(); + Throwable cause = e.getCause(); + if (cause instanceof AssociationFailure) { + throw cause; + } + throw e; + } + } + + /** + * A debugging tool to print stack traces of most threads, as jstack does. + * Uninteresting threads are filtered out. + */ + static void dumpTestThreads() { + ThreadMXBean threadMXBean = ManagementFactory.getThreadMXBean(); + System.err.println("------ stacktrace dump start ------"); + for (ThreadInfo info : threadMXBean.dumpAllThreads(true, true)) { + final String name = info.getThreadName(); + String lockName; + if ("Signal Dispatcher".equals(name)) { + continue; + } + if ("Reference Handler".equals(name) + && (lockName = info.getLockName()) != null + && lockName.startsWith("java.lang.ref.Reference$Lock")) { + continue; + } + if ("Finalizer".equals(name) + && (lockName = info.getLockName()) != null + && lockName.startsWith("java.lang.ref.ReferenceQueue$Lock")) { + continue; + } + System.err.print(info); + } + System.err.println("------ stacktrace dump end ------"); + } +} diff --git a/caffeine/src/test/java/com/github/benmanes/caffeine/openjdk/concurrent/concurrenthashmap/ConcurrentContainsKeyTest.java b/caffeine/src/test/java/com/github/benmanes/caffeine/openjdk/concurrent/concurrenthashmap/ConcurrentContainsKeyTest.java new file mode 100644 index 0000000000..61344b3a1f --- /dev/null +++ b/caffeine/src/test/java/com/github/benmanes/caffeine/openjdk/concurrent/concurrenthashmap/ConcurrentContainsKeyTest.java @@ -0,0 +1,163 @@ +/* + * Copyright (c) 2013, Oracle and/or its affiliates. All rights reserved. + * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. + * + * This code is free software; you can redistribute it and/or modify it + * under the terms of the GNU General Public License version 2 only, as + * published by the Free Software Foundation. + * + * This code is distributed in the hope that it will be useful, but WITHOUT + * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or + * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License + * version 2 for more details (a copy is included in the LICENSE file that + * accompanied this code). + * + * You should have received a copy of the GNU General Public License version + * 2 along with this work; if not, write to the Free Software Foundation, + * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. + * + * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA + * or visit www.oracle.com if you need additional information or have any + * questions. + */ +package com.github.benmanes.caffeine.openjdk.concurrent.concurrenthashmap; + +import java.time.Duration; +import java.util.concurrent.CompletableFuture; +import java.util.concurrent.CompletionException; +import java.util.concurrent.ConcurrentMap; +import java.util.concurrent.CountDownLatch; +import java.util.function.Supplier; +import java.util.stream.IntStream; +import java.util.stream.Stream; + +import org.testng.annotations.Test; + +import com.github.benmanes.caffeine.cache.Caffeine; + +/** + * @test + * @bug 8028564 + * @run testng ConcurrentContainsKeyTest + * @summary Test that entries are always present in the map, + * when entries are held within one bin that is a tree + */ +@Test +@SuppressWarnings({"AlmostJavadoc", "ClassNamedLikeTypeParameter", "EmptyCatch", + "IdentifierName", "InvalidBlockTag", "rawtypes", "StreamOfArray"}) +public class ConcurrentContainsKeyTest { + + // The number of entries for each thread to place in a map + // Should be > ConcurrentHashMap.TREEIFY_THRESHOLD but small + // enough to allow for enough iteration overlap by multiple threads + private static final int N = Integer.getInteger("n", 16); + // The number of rounds each thread performs per entry + private static final int R = Integer.getInteger("r", 32); + // The number of iterations of the test + private static final int I = Integer.getInteger("i", 256); + + // Object to be placed in the concurrent map + static class X implements Comparable { + + private final int a; + + X(int a) { + this.a = a; + } + + @Override + public int compareTo(X o) { + return this.a - o.a; + } + + @Override + public int hashCode() { + // Return the same hash code to guarantee collisions + return 0; + } + } + + public ConcurrentMap bounded() { + return Caffeine.newBuilder() + .expireAfterWrite(Duration.ofNanos(Long.MAX_VALUE)) + .maximumSize(Long.MAX_VALUE) + .initialCapacity(64) + .build().asMap(); + } + + public ConcurrentMap unbounded() { + return Caffeine.newBuilder() + .initialCapacity(64) + .build().asMap(); + } + + @Test + public void testContainsKey() { + X[] content = IntStream.range(0, N).mapToObj(i -> new X(i)).toArray(X[]::new); + // Create map with an initial size >= ConcurrentHashMap.TREEIFY_THRESHOLD + // ensuring tree'ification will occur for a small number of entries + // with the same hash code + ConcurrentMap bounded = bounded(); + Stream.of(content).forEach(x -> bounded.put(x, x)); + test(content, bounded); + + ConcurrentMap unbounded = unbounded(); + Stream.of(content).forEach(x -> unbounded.put(x, x)); + test(content, unbounded); + } + + private static void test(X[] content, ConcurrentMap m) { + for (int i = 0; i < I; i++) { + testOnce(content, m); + } + } + + @SuppressWarnings("serial") + static class AssociationFailure extends RuntimeException { + AssociationFailure(String message) { + super(message); + } + } + + private static void testOnce(Object[] content, ConcurrentMap m) { + CountDownLatch s = new CountDownLatch(1); + + Supplier sr = () -> () -> { + try { + s.await(); + } + catch (InterruptedException e) { + } + + for (int i = 0; i < R * N; i++) { + Object o = content[i % content.length]; + if (!m.containsKey(o)) { + throw new AssociationFailure("CHM.containsKey failed: entry does not exist"); + } + } + }; + + int ps = Runtime.getRuntime().availableProcessors(); + Stream runners = IntStream.range(0, ps) + .mapToObj(i -> sr.get()) + .map(CompletableFuture::runAsync); + + CompletableFuture all = CompletableFuture.allOf( + runners.toArray(CompletableFuture[]::new)); + + // Trigger the runners to start checking key membership + s.countDown(); + try { + all.join(); + } + catch (CompletionException e) { + Throwable t = e.getCause(); + if (t instanceof AssociationFailure) { + throw (AssociationFailure) t; + } + else { + throw e; + } + } + } +} diff --git a/caffeine/src/test/java/com/github/benmanes/caffeine/openjdk/concurrent/concurrenthashmap/DistinctEntrySetElements.java b/caffeine/src/test/java/com/github/benmanes/caffeine/openjdk/concurrent/concurrenthashmap/DistinctEntrySetElements.java new file mode 100644 index 0000000000..a1a3aad6a7 --- /dev/null +++ b/caffeine/src/test/java/com/github/benmanes/caffeine/openjdk/concurrent/concurrenthashmap/DistinctEntrySetElements.java @@ -0,0 +1,79 @@ +/* + * Copyright (c) 2011, Oracle and/or its affiliates. All rights reserved. + * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. + * + * This code is free software; you can redistribute it and/or modify it + * under the terms of the GNU General Public License version 2 only, as + * published by the Free Software Foundation. + * + * This code is distributed in the hope that it will be useful, but WITHOUT + * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or + * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License + * version 2 for more details (a copy is included in the LICENSE file that + * accompanied this code). + * + * You should have received a copy of the GNU General Public License version + * 2 along with this work; if not, write to the Free Software Foundation, + * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. + * + * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA + * or visit www.oracle.com if you need additional information or have any + * questions. + */ + +/* + * Portions Copyright (c) 2011 IBM Corporation + */ +package com.github.benmanes.caffeine.openjdk.concurrent.concurrenthashmap; + +import java.time.Duration; +import java.util.HashSet; +import java.util.Map; +import java.util.Set; +import java.util.concurrent.ConcurrentMap; + +import org.testng.annotations.Test; + +import com.github.benmanes.caffeine.cache.Cache; +import com.github.benmanes.caffeine.cache.Caffeine; + +/* + * @test + * @bug 6312706 + * @summary Sets from Map.entrySet() return distinct objects for each Entry + * @author Neil Richards , + */ +@SuppressWarnings("YodaCondition") +public class DistinctEntrySetElements { + + @Test + public void bounded() { + Cache cache = Caffeine.newBuilder() + .expireAfterWrite(Duration.ofNanos(Long.MAX_VALUE)) + .maximumSize(Long.MAX_VALUE) + .build(); + testDistinct(cache.asMap()); + } + + @Test + public void unbounded() { + Cache cache = Caffeine.newBuilder().build(); + testDistinct(cache.asMap()); + } + + public static void testDistinct(ConcurrentMap concurrentHashMap) { + concurrentHashMap.put("One", "Un"); + concurrentHashMap.put("Two", "Deux"); + concurrentHashMap.put("Three", "Trois"); + + Set> entrySet = concurrentHashMap.entrySet(); + HashSet> hashSet = new HashSet<>(entrySet); + + if (false == hashSet.equals(entrySet)) { + throw new RuntimeException("Test FAILED: Sets are not equal."); + } + if (hashSet.hashCode() != entrySet.hashCode()) { + throw new RuntimeException("Test FAILED: Set's hashcodes are not equal."); + } + } +} diff --git a/caffeine/src/test/java/com/github/benmanes/caffeine/openjdk/concurrent/concurrenthashmap/LoopHelpers.java b/caffeine/src/test/java/com/github/benmanes/caffeine/openjdk/concurrent/concurrenthashmap/LoopHelpers.java new file mode 100644 index 0000000000..b25d858d2f --- /dev/null +++ b/caffeine/src/test/java/com/github/benmanes/caffeine/openjdk/concurrent/concurrenthashmap/LoopHelpers.java @@ -0,0 +1,108 @@ +/* + * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. + * + * This code is free software; you can redistribute it and/or modify it + * under the terms of the GNU General Public License version 2 only, as + * published by the Free Software Foundation. + * + * This code is distributed in the hope that it will be useful, but WITHOUT + * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or + * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License + * version 2 for more details (a copy is included in the LICENSE file that + * accompanied this code). + * + * You should have received a copy of the GNU General Public License version + * 2 along with this work; if not, write to the Free Software Foundation, + * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. + * + * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA + * or visit www.oracle.com if you need additional information or have any + * questions. + */ + +/* + * This file is available under and governed by the GNU General Public + * License version 2 only, as published by the Free Software Foundation. + * However, the following notice accompanied the original version of this + * file: + * + * Written by Doug Lea with assistance from members of JCP JSR-166 + * Expert Group and released to the public domain, as explained at + * http://creativecommons.org/publicdomain/zero/1.0/ + */ +package com.github.benmanes.caffeine.openjdk.concurrent.concurrenthashmap; + +/** + * Misc utilities in JSR166 performance tests + */ +@SuppressWarnings("PrivateConstructorForUtilityClass") +class LoopHelpers { + + // Some mindless computation to do between synchronizations... + + /** + * generates 32 bit pseudo-random numbers. + * Adapted from http://www.snippets.org + */ + public static int compute1(int x) { + int lo = 16807 * (x & 0xFFFF); + int hi = 16807 * (x >>> 16); + lo += (hi & 0x7FFF) << 16; + if ((lo & 0x80000000) != 0) { + lo &= 0x7fffffff; + ++lo; + } + lo += hi >>> 15; + if (lo == 0 || (lo & 0x80000000) != 0) { + lo &= 0x7fffffff; + ++lo; + } + return lo; + } + + /** + * Computes a linear congruential random number a random number + * of times. + */ + public static int compute2(int x) { + int loops = (x >>> 4) & 7; + while (loops-- > 0) { + x = (x * 2147483647) % 16807; + } + return x; + } + + public static class BarrierTimer implements Runnable { + public volatile long startTime; + public volatile long endTime; + @Override + public void run() { + long t = System.nanoTime(); + if (startTime == 0) { + startTime = t; + } else { + endTime = t; + } + } + public void clear() { + startTime = 0; + endTime = 0; + } + public long getTime() { + return endTime - startTime; + } + } + + public static String rightJustify(long n) { + // There's probably a better way to do this... + String field = " "; + String num = Long.toString(n); + if (num.length() >= field.length()) { + return num; + } + StringBuilder b = new StringBuilder(field); + b.replace(b.length()-num.length(), b.length(), num); + return b.toString(); + } + +} diff --git a/caffeine/src/test/java/com/github/benmanes/caffeine/openjdk/concurrent/concurrenthashmap/MapCheck.java b/caffeine/src/test/java/com/github/benmanes/caffeine/openjdk/concurrent/concurrenthashmap/MapCheck.java new file mode 100644 index 0000000000..addf93cdc6 --- /dev/null +++ b/caffeine/src/test/java/com/github/benmanes/caffeine/openjdk/concurrent/concurrenthashmap/MapCheck.java @@ -0,0 +1,705 @@ +/* + * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. + * + * This code is free software; you can redistribute it and/or modify it + * under the terms of the GNU General Public License version 2 only, as + * published by the Free Software Foundation. + * + * This code is distributed in the hope that it will be useful, but WITHOUT + * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or + * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License + * version 2 for more details (a copy is included in the LICENSE file that + * accompanied this code). + * + * You should have received a copy of the GNU General Public License version + * 2 along with this work; if not, write to the Free Software Foundation, + * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. + * + * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA + * or visit www.oracle.com if you need additional information or have any + * questions. + */ + +/* + * This file is available under and governed by the GNU General Public + * License version 2 only, as published by the Free Software Foundation. + * However, the following notice accompanied the original version of this + * file: + * + * Written by Doug Lea with assistance from members of JCP JSR-166 + * Expert Group and released to the public domain, as explained at + * http://creativecommons.org/publicdomain/zero/1.0/ + */ +package com.github.benmanes.caffeine.openjdk.concurrent.concurrenthashmap; + +import java.io.BufferedInputStream; +import java.io.BufferedOutputStream; +import java.io.FileInputStream; +import java.io.FileOutputStream; +import java.io.ObjectInputStream; +import java.io.ObjectOutputStream; +import java.io.Serializable; +import java.time.Duration; +import java.util.Enumeration; +import java.util.Hashtable; +import java.util.IdentityHashMap; +import java.util.Iterator; +import java.util.Map; +import java.util.Set; +import java.util.concurrent.ThreadLocalRandom; +import java.util.function.Supplier; + +import org.testng.annotations.Test; + +import com.github.benmanes.caffeine.cache.Caffeine; + +/* + * @test + * @bug 4486658 + * @run main/timeout=240 MapCheck + * @summary Times and checks basic map operations + */ +@SuppressWarnings({"CatchingUnchecked", "NonFinalStaticField", "NullAway", "rawtypes", "resource", + "SystemOut", "unchecked", "UnnecessarilyFullyQualified", "UnnecessaryParentheses", "unused"}) +public class MapCheck { + + static final int absentSize = 1 << 17; + static final int absentMask = absentSize - 1; + static Object[] absent = new Object[absentSize]; + + static final Object MISSING = new Object(); + + static TestTimer timer = new TestTimer(); + + static void reallyAssert(boolean b) { + if (!b) { + throw new Error("Failed Assertion"); + } + } + + @Test + public void bounded() { + test(() -> Caffeine.newBuilder() + .expireAfterWrite(Duration.ofNanos(Long.MAX_VALUE)) + .maximumSize(Long.MAX_VALUE) + .build().asMap()); + } + + @Test + public void unbounded() { + test(() -> Caffeine.newBuilder().build().asMap()); + } + + public static void test(Supplier supplier) { + int numTests = 8; + int size = 50000; + +// Class mapClass = java.util.concurrent.ConcurrentHashMap.class; +// if (args.length > 0) { +// try { +// mapClass = Class.forName(args[0]); +// } catch (ClassNotFoundException e) { +// throw new RuntimeException("Class " + args[0] + " not found."); +// } +// } +// +// if (args.length > 1) { +// numTests = Integer.parseInt(args[1]); +// } +// +// if (args.length > 2) { +// size = Integer.parseInt(args[2]); +// } +// +// boolean doSerializeTest = args.length > 3; +// +// System.out.println("Testing " + mapClass.getName() + " trials: " + numTests + " size: " + size); + + boolean doSerializeTest = false; + for (int i = 0; i < absentSize; ++i) { + absent[i] = new Object(); + } + + Object[] key = new Object[size]; + for (int i = 0; i < size; ++i) { + key[i] = new Object(); + } + + forceMem(size * 8); + + for (int rep = 0; rep < numTests; ++rep) { + runTest(supplier, key); + } + + TestTimer.printStats(); + +// if (doSerializeTest) { +// stest(newMap(mapClass), size); +// } + } + +// static Map newMap(Class cl) { +// try { +// return (Map)cl.getDeclaredConstructor().newInstance(); +// } catch (Exception e) { +// throw new RuntimeException("Can't instantiate " + cl + ": " + e); +// } +// } + + static void runTest(Supplier supplier, Object[] key) { + shuffle(key); + int size = key.length; + long startTime = System.currentTimeMillis(); + test(supplier, key); + long time = System.currentTimeMillis() - startTime; + } + + static void forceMem(int n) { + // force enough memory + Long[] junk = new Long[n]; + for (int i = 0; i < junk.length; ++i) { + junk[i] = Long.valueOf(i); + } + int sum = 0; + for (int i = 0; i < junk.length; ++i) { + sum += (int)(junk[i].longValue() + i); + } + if (sum == 0) { + System.out.println("Useless number = " + sum); + } + junk = null; + // System.gc(); + } + + static void t1(String nm, int n, Map s, Object[] key, int expect) { + int sum = 0; + int iters = 4; + timer.start(nm, n * iters); + for (int j = 0; j < iters; ++j) { + for (int i = 0; i < n; i++) { + if (s.get(key[i]) != null) { + ++sum; + } + } + } + timer.finish(); + reallyAssert(sum == expect * iters); + } + + static void t2(String nm, int n, Map s, Object[] key, int expect) { + int sum = 0; + timer.start(nm, n); + for (int i = 0; i < n; i++) { + if (s.remove(key[i]) != null) { + ++sum; + } + } + timer.finish(); + reallyAssert(sum == expect); + } + + static void t3(String nm, int n, Map s, Object[] key, int expect) { + int sum = 0; + timer.start(nm, n); + for (int i = 0; i < n; i++) { + if (s.put(key[i], absent[i & absentMask]) == null) { + ++sum; + } + } + timer.finish(); + reallyAssert(sum == expect); + } + + static void t4(String nm, int n, Map s, Object[] key, int expect) { + int sum = 0; + timer.start(nm, n); + for (int i = 0; i < n; i++) { + if (s.containsKey(key[i])) { + ++sum; + } + } + timer.finish(); + reallyAssert(sum == expect); + } + + static void t5(String nm, int n, Map s, Object[] key, int expect) { + int sum = 0; + timer.start(nm, n/2); + for (int i = n-2; i >= 0; i-=2) { + if (s.remove(key[i]) != null) { + ++sum; + } + } + timer.finish(); + reallyAssert(sum == expect); + } + + static void t6(String nm, int n, Map s, Object[] k1, Object[] k2) { + int sum = 0; + timer.start(nm, n * 2); + for (int i = 0; i < n; i++) { + if (s.get(k1[i]) != null) { + ++sum; + } + if (s.get(k2[i & absentMask]) != null) { + ++sum; + } + } + timer.finish(); + reallyAssert(sum == n); + } + + static void t7(String nm, int n, Map s, Object[] k1, Object[] k2) { + int sum = 0; + timer.start(nm, n * 2); + for (int i = 0; i < n; i++) { + if (s.containsKey(k1[i])) { + ++sum; + } + if (s.containsKey(k2[i & absentMask])) { + ++sum; + } + } + timer.finish(); + reallyAssert(sum == n); + } + + static void t8(String nm, int n, Map s, Object[] key, int expect) { + int sum = 0; + timer.start(nm, n); + for (int i = 0; i < n; i++) { + if (s.get(key[i]) != null) { + ++sum; + } + } + timer.finish(); + reallyAssert(sum == expect); + } + + + static void t9(Map s) { + int sum = 0; + int iters = 20; + timer.start("ContainsValue (/n) ", iters * s.size()); + int step = absentSize / iters; + for (int i = 0; i < absentSize; i += step) { + if (s.containsValue(absent[i])) { + ++sum; + } + } + timer.finish(); + reallyAssert(sum != 0); + } + + + static void ktest(Map s, int size, Object[] key) { + timer.start("ContainsKey ", size); + Set ks = s.keySet(); + int sum = 0; + for (int i = 0; i < size; i++) { + if (ks.contains(key[i])) { + ++sum; + } + } + timer.finish(); + reallyAssert(sum == size); + } + + + static void ittest1(Map s, int size) { + int sum = 0; + timer.start("Iter Key ", size); + for (Iterator it = s.keySet().iterator(); it.hasNext(); ) { + if (it.next() != MISSING) { + ++sum; + } + } + timer.finish(); + reallyAssert(sum == size); + } + + static void ittest2(Map s, int size) { + int sum = 0; + timer.start("Iter Value ", size); + for (Iterator it = s.values().iterator(); it.hasNext(); ) { + if (it.next() != MISSING) { + ++sum; + } + } + timer.finish(); + reallyAssert(sum == size); + } + static void ittest3(Map s, int size) { + int sum = 0; + timer.start("Iter Entry ", size); + for (Iterator it = s.entrySet().iterator(); it.hasNext(); ) { + if (it.next() != MISSING) { + ++sum; + } + } + timer.finish(); + reallyAssert(sum == size); + } + + static void ittest4(Map s, int size, int pos) { + IdentityHashMap seen = new IdentityHashMap(size); + reallyAssert(s.size() == size); + int sum = 0; + timer.start("Iter XEntry ", size); + Iterator it = s.entrySet().iterator(); + Object k = null; + Object v = null; + for (int i = 0; i < size-pos; ++i) { + Map.Entry x = (Map.Entry)(it.next()); + k = x.getKey(); + v = x.getValue(); + seen.put(k, k); + if (x != MISSING) { + ++sum; + } + } + reallyAssert(s.containsKey(k)); + it.remove(); + reallyAssert(!s.containsKey(k)); + while (it.hasNext()) { + Map.Entry x = (Map.Entry)(it.next()); + Object k2 = x.getKey(); + seen.put(k2, k2); + if (x != MISSING) { + ++sum; + } + } + + reallyAssert(s.size() == size-1); + s.put(k, v); + reallyAssert(seen.size() == size); + timer.finish(); + reallyAssert(sum == size); + reallyAssert(s.size() == size); + } + + + static void ittest(Map s, int size) { + ittest1(s, size); + ittest2(s, size); + ittest3(s, size); + // for (int i = 0; i < size-1; ++i) + // ittest4(s, size, i); + } + + static void entest1(Hashtable ht, int size) { + int sum = 0; + + timer.start("Iter Enumeration Key ", size); + for (Enumeration en = ht.keys(); en.hasMoreElements(); ) { + if (en.nextElement() != MISSING) { + ++sum; + } + } + timer.finish(); + reallyAssert(sum == size); + } + + static void entest2(Hashtable ht, int size) { + int sum = 0; + timer.start("Iter Enumeration Value ", size); + for (Enumeration en = ht.elements(); en.hasMoreElements(); ) { + if (en.nextElement() != MISSING) { + ++sum; + } + } + timer.finish(); + reallyAssert(sum == size); + } + + + static void entest3(Hashtable ht, int size) { + int sum = 0; + + timer.start("Iterf Enumeration Key ", size); + Enumeration en = ht.keys(); + for (int i = 0; i < size; ++i) { + if (en.nextElement() != MISSING) { + ++sum; + } + } + timer.finish(); + reallyAssert(sum == size); + } + + static void entest4(Hashtable ht, int size) { + int sum = 0; + timer.start("Iterf Enumeration Value", size); + Enumeration en = ht.elements(); + for (int i = 0; i < size; ++i) { + if (en.nextElement() != MISSING) { + ++sum; + } + } + timer.finish(); + reallyAssert(sum == size); + } + + static void entest(Map s, int size) { + if (s instanceof Hashtable) { + Hashtable ht = (Hashtable)s; + // entest3(ht, size); + // entest4(ht, size); + entest1(ht, size); + entest2(ht, size); + entest1(ht, size); + entest2(ht, size); + entest1(ht, size); + entest2(ht, size); + } + } + + static void rtest(Map s, int size) { + timer.start("Remove (iterator) ", size); + for (Iterator it = s.keySet().iterator(); it.hasNext(); ) { + it.next(); + it.remove(); + } + timer.finish(); + } + + static void rvtest(Map s, int size) { + timer.start("Remove (iterator) ", size); + for (Iterator it = s.values().iterator(); it.hasNext(); ) { + it.next(); + it.remove(); + } + timer.finish(); + } + + + static void dtest(Supplier supplier, Map s, int size, Object[] key) { + timer.start("Put (putAll) ", size * 2); + Map s2 = null; + try { + s2 = supplier.get(); + s2.putAll(s); + } + catch (Exception e) { e.printStackTrace(); return; } + timer.finish(); + + timer.start("Iter Equals ", size * 2); + boolean eqt = s2.equals(s) && s.equals(s2); + reallyAssert(eqt); + timer.finish(); + + timer.start("Iter HashCode ", size * 2); + int shc = s.hashCode(); + int s2hc = s2.hashCode(); + reallyAssert(shc == s2hc); + timer.finish(); + + timer.start("Put (present) ", size); + s2.putAll(s); + timer.finish(); + + timer.start("Iter EntrySet contains ", size * 2); + Set es2 = s2.entrySet(); + int sum = 0; + for (Iterator i1 = s.entrySet().iterator(); i1.hasNext(); ) { + Object entry = i1.next(); + if (es2.contains(entry)) { + ++sum; + } + } + timer.finish(); + reallyAssert(sum == size); + + t6("Get ", size, s2, key, absent); + + Object hold = s2.get(key[size-1]); + s2.put(key[size-1], absent[0]); + timer.start("Iter Equals ", size * 2); + eqt = s2.equals(s) && s.equals(s2); + reallyAssert(!eqt); + timer.finish(); + + timer.start("Iter HashCode ", size * 2); + int s1h = s.hashCode(); + int s2h = s2.hashCode(); + reallyAssert(s1h != s2h); + timer.finish(); + + s2.put(key[size-1], hold); + timer.start("Remove (iterator) ", size * 2); + Iterator s2i = s2.entrySet().iterator(); + Set es = s.entrySet(); + while (s2i.hasNext()) { + es.remove(s2i.next()); + } + timer.finish(); + + reallyAssert(s.isEmpty()); + + timer.start("Clear ", size); + s2.clear(); + timer.finish(); + reallyAssert(s2.isEmpty() && s.isEmpty()); + } + + static void stest(Map s, int size) throws Exception { + if (!(s instanceof Serializable)) { + return; + } + System.out.print("Serialize : "); + + for (int i = 0; i < size; i++) { + s.put(i, Boolean.TRUE); + } + + long startTime = System.currentTimeMillis(); + + FileOutputStream fs = new FileOutputStream("MapCheck.dat"); + ObjectOutputStream out = new ObjectOutputStream(new BufferedOutputStream(fs)); + out.writeObject(s); + out.close(); + + FileInputStream is = new FileInputStream("MapCheck.dat"); + ObjectInputStream in = new ObjectInputStream(new BufferedInputStream(is)); + Map m = (Map)in.readObject(); + + long endTime = System.currentTimeMillis(); + long time = endTime - startTime; + + System.out.print(time + "ms"); + + if (s instanceof IdentityHashMap) { + return; + } + reallyAssert(s.equals(m)); + } + + + static void test(Supplier supplier, Object[] key) { + Map s = supplier.get(); + int size = key.length; + + t3("Put (absent) ", size, s, key, size); + t3("Put (present) ", size, s, key, 0); + t7("ContainsKey ", size, s, key, absent); + t4("ContainsKey ", size, s, key, size); + ktest(s, size, key); + t4("ContainsKey ", absentSize, s, absent, 0); + t6("Get ", size, s, key, absent); + t1("Get (present) ", size, s, key, size); + t1("Get (absent) ", absentSize, s, absent, 0); + t2("Remove (absent) ", absentSize, s, absent, 0); + t5("Remove (present) ", size, s, key, size / 2); + t3("Put (half present) ", size, s, key, size / 2); + + ittest(s, size); + entest(s, size); + t9(s); + rtest(s, size); + + t4("ContainsKey ", size, s, key, 0); + t2("Remove (absent) ", size, s, key, 0); + t3("Put (presized) ", size, s, key, size); + dtest(supplier, s, size, key); + } + + static class TestTimer { + private String name; + private long numOps; + private long startTime; + private String cname; + + static final java.util.TreeMap accum = new java.util.TreeMap(); + + static void printStats() { + for (Iterator it = accum.entrySet().iterator(); it.hasNext(); ) { + Map.Entry e = (Map.Entry)(it.next()); + Stats stats = (Stats)(e.getValue()); + int n = stats.number; + double t; + if (n > 0) { + t = stats.sum / n; + } else { + t = stats.least; + } + long nano = Math.round(1000000.0 * t); + System.out.println(e.getKey() + ": " + nano); + } + } + + void start(String name, long numOps) { + this.name = name; + this.cname = classify(); + this.numOps = numOps; + startTime = System.currentTimeMillis(); + } + + + String classify() { + if (name.startsWith("Get")) { + return "Get "; + } else if (name.startsWith("Put")) { + return "Put "; + } else if (name.startsWith("Remove")) { + return "Remove "; + } else if (name.startsWith("Iter")) { + return "Iter "; + } else { + return null; + } + } + + void finish() { + long endTime = System.currentTimeMillis(); + long time = endTime - startTime; + double timePerOp = ((double)time)/numOps; + + Object st = accum.get(name); + if (st == null) { + accum.put(name, new Stats(timePerOp)); + } else { + Stats stats = (Stats) st; + stats.sum += timePerOp; + stats.number++; + if (timePerOp < stats.least) { + stats.least = timePerOp; + } + } + + if (cname != null) { + st = accum.get(cname); + if (st == null) { + accum.put(cname, new Stats(timePerOp)); + } else { + Stats stats = (Stats) st; + stats.sum += timePerOp; + stats.number++; + if (timePerOp < stats.least) { + stats.least = timePerOp; + } + } + } + + } + + } + + static class Stats { + double sum = 0; + double least; + int number = 0; + Stats(double t) { least = t; } + } + + static void shuffle(Object[] keys) { + ThreadLocalRandom rnd = ThreadLocalRandom.current(); + int size = keys.length; + for (int i=size; i>1; i--) { + int r = rnd.nextInt(i); + Object t = keys[i-1]; + keys[i-1] = keys[r]; + keys[r] = t; + } + } + +} diff --git a/caffeine/src/test/java/com/github/benmanes/caffeine/openjdk/concurrent/concurrenthashmap/MapLoops.java b/caffeine/src/test/java/com/github/benmanes/caffeine/openjdk/concurrent/concurrenthashmap/MapLoops.java new file mode 100644 index 0000000000..be384b09b7 --- /dev/null +++ b/caffeine/src/test/java/com/github/benmanes/caffeine/openjdk/concurrent/concurrenthashmap/MapLoops.java @@ -0,0 +1,305 @@ +/* + * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. + * + * This code is free software; you can redistribute it and/or modify it + * under the terms of the GNU General Public License version 2 only, as + * published by the Free Software Foundation. + * + * This code is distributed in the hope that it will be useful, but WITHOUT + * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or + * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License + * version 2 for more details (a copy is included in the LICENSE file that + * accompanied this code). + * + * You should have received a copy of the GNU General Public License version + * 2 along with this work; if not, write to the Free Software Foundation, + * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. + * + * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA + * or visit www.oracle.com if you need additional information or have any + * questions. + */ + +/* + * This file is available under and governed by the GNU General Public + * License version 2 only, as published by the Free Software Foundation. + * However, the following notice accompanied the original version of this + * file: + * + * Written by Doug Lea with assistance from members of JCP JSR-166 + * Expert Group and released to the public domain, as explained at + * http://creativecommons.org/publicdomain/zero/1.0/ + */ +package com.github.benmanes.caffeine.openjdk.concurrent.concurrenthashmap; + +import static java.util.concurrent.TimeUnit.MILLISECONDS; + +import java.time.Duration; +import java.util.List; +import java.util.Map; +import java.util.SplittableRandom; +import java.util.concurrent.CopyOnWriteArrayList; +import java.util.concurrent.CyclicBarrier; +import java.util.concurrent.Executor; +import java.util.concurrent.ExecutorService; +import java.util.concurrent.Executors; +import java.util.function.Supplier; + +import org.testng.annotations.Test; + +import com.github.benmanes.caffeine.cache.Caffeine; + +/* + * @test + * @bug 4486658 + * @summary Exercise multithreaded maps, by default ConcurrentHashMap. + * Multithreaded hash table test. Each thread does a random walk + * though elements of "key" array. On each iteration, it checks if + * table includes key. If absent, with probability pinsert it + * inserts it, and if present, with probability premove it removes + * it. (pinsert and premove are expressed as percentages to simplify + * parsing from command line.) + * @library /test/lib + * @run main/timeout=1600 MapLoops + */ + +/* + * @test + * @summary Exercise multithreaded maps, using only heavy monitors. + * @requires os.arch=="x86" | os.arch=="i386" | os.arch=="amd64" | os.arch=="x86_64" | os.arch=="aarch64" | os.arch == "ppc64" | os.arch == "ppc64le" | os.arch == "riscv64" | os.arch == "s390x" + * @requires vm.debug + * @library /test/lib + * @run main/othervm/timeout=1600 -XX:LockingMode=0 -XX:+VerifyHeavyMonitors MapLoops + */ +@SuppressWarnings({"InconsistentOverloads", "InterruptedExceptionSwallowed", + "NonFinalStaticField", "rawtypes", "SystemOut", "unchecked"}) +public class MapLoops { + static final long LONG_DELAY_MS = 10_000; + static int nkeys = 1000; // 10_000 + static int pinsert = 60; + static int premove = 2; + static int maxThreads = 100; + static int nops = 10000; // 100_000 + static int removesPerMaxRandom; + static int insertsPerMaxRandom; + + @Test + public void bounded() throws Exception { + var pool = Executors.newCachedThreadPool(); + try { + test(pool, () -> Caffeine.newBuilder() + .expireAfterWrite(Duration.ofNanos(Long.MAX_VALUE)) + .maximumSize(Long.MAX_VALUE) + .build().asMap()); + } finally { + pool.shutdown(); + } + } + + @Test + public void unbounded() throws Exception { + var pool = Executors.newCachedThreadPool(); + try { + test(pool, () -> Caffeine.newBuilder().build().asMap()); + } finally { + pool.shutdown(); + } + } + + public void test(ExecutorService pool, Supplier supplier) throws Exception { + +// Class mapClass = null; +// if (args.length > 0) { +// try { +// mapClass = Class.forName(args[0]); +// } catch (ClassNotFoundException e) { +// throw new RuntimeException("Class " + args[0] + " not found."); +// } +// } else { +// mapClass = java.util.concurrent.ConcurrentHashMap.class; +// } +// +// if (args.length > 1) { +// maxThreads = Integer.parseInt(args[1]); +// } +// +// if (args.length > 2) { +// nkeys = Integer.parseInt(args[2]); +// } +// +// if (args.length > 3) { +// pinsert = Integer.parseInt(args[3]); +// } +// +// if (args.length > 4) { +// premove = Integer.parseInt(args[4]); +// } +// +// if (args.length > 5) { +// nops = Integer.parseInt(args[5]); +// } + + // normalize probabilities wrt random number generator + removesPerMaxRandom = (int)(premove/100.0 * 0x7FFFFFFFL); + insertsPerMaxRandom = (int)(pinsert/100.0 * 0x7FFFFFFFL); + +// System.out.print("Class: " + mapClass.getName()); + System.out.print(" threads: " + maxThreads); + System.out.print(" size: " + nkeys); + System.out.print(" ins: " + pinsert); + System.out.print(" rem: " + premove); + System.out.print(" ops: " + nops); + System.out.println(); + + int k = 1; + int warmups = 2; + var throwables = new CopyOnWriteArrayList(); + for (int i = 1; i <= maxThreads;) { + test(pool, i, nkeys, supplier, throwables); + if (warmups > 0) { + --warmups; + } else if (i == k) { + k = i << 1; + i = i + (i >>> 1); + } + else if (i == 1 && k == 2) { + i = k; + warmups = 1; + } else { + i = k; + } + } + pool.shutdown(); + if (! pool.awaitTermination(LONG_DELAY_MS, MILLISECONDS)) { + throw new Error(); + } + + if (! throwables.isEmpty()) { + throw new Error + (throwables.size() + " thread(s) terminated abruptly."); + } + } + + static Integer[] makeKeys(int n) { + SplittableRandom rnd = new SplittableRandom(); + Integer[] key = new Integer[n]; + for (int i = 0; i < key.length; ++i) { + key[i] = rnd.nextInt(); + } + return key; + } + + static void shuffleKeys(Integer[] key) { + SplittableRandom rnd = new SplittableRandom(); + for (int i = key.length; i > 1; --i) { + int j = rnd.nextInt(i); + Integer tmp = key[j]; + key[j] = key[i-1]; + key[i-1] = tmp; + } + } + + void test(Executor pool, int i, int nkeys, + Supplier supplier, List throwables) throws Exception { + System.out.print("Threads: " + i + "\t:"); + Map map = supplier.get(); + Integer[] key = makeKeys(nkeys); + // Uncomment to start with a non-empty table + // for (int j = 0; j < nkeys; j += 4) // start 1/4 occupied + // map.put(key[j], key[j]); + LoopHelpers.BarrierTimer timer = new LoopHelpers.BarrierTimer(); + CyclicBarrier barrier = new CyclicBarrier(i+1, timer); + SplittableRandom rnd = new SplittableRandom(); + for (int t = 0; t < i; ++t) { + pool.execute(new Runner(map, key, barrier, rnd.split(), throwables)); + } + barrier.await(); + barrier.await(); + long time = timer.getTime(); + long tpo = time / (i * (long)nops); + System.out.print(LoopHelpers.rightJustify(tpo) + " ns per op"); + double secs = time / 1000000000.0; + System.out.println("\t " + secs + "s run time"); + map.clear(); + } + + static class Runner implements Runnable { + final Map map; + final Integer[] key; + final CyclicBarrier barrier; + final SplittableRandom rnd; + final List throwables; + int position; + int total; + + Runner(Map map, + Integer[] key, + CyclicBarrier barrier, + SplittableRandom rnd, + List throwables) { + this.map = map; + this.key = key; + this.barrier = barrier; + this.rnd = rnd; + this.throwables = throwables; + position = key.length / 2; + } + + int step() { + // random-walk around key positions, bunching accesses + int r = rnd.nextInt(Integer.MAX_VALUE); + position += (r & 7) - 3; + while (position >= key.length) { + position -= key.length; + } + while (position < 0) { + position += key.length; + } + + Integer k = key[position]; + Integer x = map.get(k); + + if (x != null) { + if (x.intValue() != k.intValue()) { + throw new Error("bad mapping: " + x + " to " + k); + } + + if (r < removesPerMaxRandom) { + if (map.remove(k) != null) { + position = total % key.length; // move from position + return 2; + } + } + } + else if (r < insertsPerMaxRandom) { + ++position; + map.put(k, k); + return 2; + } + + // Uncomment to add a little computation between accesses + // total += LoopHelpers.compute1(k.intValue()); + total += r; + return 1; + } + + @Override + public void run() { + try { + barrier.await(); + int ops = nops; + while (ops > 0) { + ops -= step(); + } + barrier.await(); + } + catch (Throwable throwable) { + synchronized (System.err) { + System.err.println("--------------------------------"); + throwable.printStackTrace(); + } + throwables.add(throwable); + } + } + } +} diff --git a/caffeine/src/test/java/com/github/benmanes/caffeine/openjdk/concurrent/concurrenthashmap/ToArray.java b/caffeine/src/test/java/com/github/benmanes/caffeine/openjdk/concurrent/concurrenthashmap/ToArray.java new file mode 100644 index 0000000000..47e358721f --- /dev/null +++ b/caffeine/src/test/java/com/github/benmanes/caffeine/openjdk/concurrent/concurrenthashmap/ToArray.java @@ -0,0 +1,135 @@ +/* + * Copyright (c) 2004, 2024, Oracle and/or its affiliates. All rights reserved. + * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. + * + * This code is free software; you can redistribute it and/or modify it + * under the terms of the GNU General Public License version 2 only, as + * published by the Free Software Foundation. + * + * This code is distributed in the hope that it will be useful, but WITHOUT + * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or + * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License + * version 2 for more details (a copy is included in the LICENSE file that + * accompanied this code). + * + * You should have received a copy of the GNU General Public License version + * 2 along with this work; if not, write to the Free Software Foundation, + * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. + * + * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA + * or visit www.oracle.com if you need additional information or have any + * questions. + */ +package com.github.benmanes.caffeine.openjdk.concurrent.concurrenthashmap; + +import java.time.Duration; +import java.util.List; +import java.util.Locale; +import java.util.concurrent.CompletableFuture; +import java.util.concurrent.ConcurrentMap; +import java.util.concurrent.Executors; +import java.util.concurrent.ThreadLocalRandom; +import java.util.function.Supplier; +import java.util.stream.Collectors; +import java.util.stream.IntStream; + +import org.testng.annotations.Test; + +import com.github.benmanes.caffeine.cache.Cache; +import com.github.benmanes.caffeine.cache.Caffeine; + +/* + * @test + * @bug 4486658 8010293 + * @summary thread safety of toArray methods of collection views + * @author Martin Buchholz + */ +@SuppressWarnings({"AlmostJavadoc", "CollectorMutability", + "IdentifierName", "ShortCircuitBoolean", "UnnecessaryFinal"}) +public class ToArray { + + @Test + public void bounded() { + test(() -> Caffeine.newBuilder() + .expireAfterWrite(Duration.ofNanos(Long.MAX_VALUE)) + .maximumSize(Long.MAX_VALUE) + .build()); + } + + @Test + public void unbounded() { + test(() -> Caffeine.newBuilder().build()); + } + + public static void test(Supplier> supplier) { + final int runsPerTest = Integer.getInteger("jsr166.runsPerTest", 1); + final int reps = 10 * runsPerTest; + for (int i = reps; i--> 0; ) { + executeTest(supplier.get().asMap()); + } + } + + static void executeTest(ConcurrentMap m) { + var executor = Executors.newCachedThreadPool(); + try { + final ThreadLocalRandom rnd = ThreadLocalRandom.current(); + final int nCPU = Runtime.getRuntime().availableProcessors(); + final int minWorkers = 2; + final int maxWorkers = Math.max(minWorkers, Math.min(32, nCPU)); + final int nWorkers = rnd.nextInt(minWorkers, maxWorkers + 1); + final int sizePerWorker = 1024; + final int maxSize = nWorkers * sizePerWorker; + + // The foreman busy-checks that the size of the arrays obtained + // from the keys and values views grows monotonically until it + // reaches the maximum size. + + // NOTE: these size constraints are not specific to toArray and are + // applicable to any form of traversal of the collection views + CompletableFuture foreman = CompletableFuture.runAsync(new Runnable() { + private int prevSize = 0; + + private boolean checkProgress(Object[] a) { + int size = a.length; + if (size < prevSize || size > maxSize) { + throw new AssertionError( + String.format(Locale.US, "prevSize=%d size=%d maxSize=%d", + prevSize, size, maxSize)); + } + prevSize = size; + return size == maxSize; + } + + @Override + public void run() { + Integer[] empty = new Integer[0]; + for (; ; ) { + if (checkProgress(m.values().toArray()) + & checkProgress(m.keySet().toArray()) + & checkProgress(m.values().toArray(empty)) + & checkProgress(m.keySet().toArray(empty))) { + return; + } + } + } + }, executor); + + // Each worker puts globally unique keys into the map + List> workers = + IntStream.range(0, nWorkers) + .mapToObj(w -> (Runnable) () -> { + for (int i = 0, o = w * sizePerWorker; i < sizePerWorker; i++) { + m.put(o + i, i); + } + }) + .map(r -> CompletableFuture.runAsync(r, executor)) + .collect(Collectors.toList()); + + // Wait for workers and foreman to complete + workers.forEach(CompletableFuture::join); + foreman.join(); + } finally { + executor.shutdown(); + } + } +} diff --git a/caffeine/src/test/java/com/github/benmanes/caffeine/openjdk/concurrent/concurrenthashmap/package-info.java b/caffeine/src/test/java/com/github/benmanes/caffeine/openjdk/concurrent/concurrenthashmap/package-info.java new file mode 100644 index 0000000000..76c48a8e84 --- /dev/null +++ b/caffeine/src/test/java/com/github/benmanes/caffeine/openjdk/concurrent/concurrenthashmap/package-info.java @@ -0,0 +1,7 @@ +@NullMarked +@CheckReturnValue +package com.github.benmanes.caffeine.openjdk.concurrent.concurrenthashmap; + +import org.jspecify.annotations.NullMarked; + +import com.google.errorprone.annotations.CheckReturnValue; diff --git a/caffeine/src/test/java/com/github/benmanes/caffeine/openjdk/concurrent/concurrentmap/ConcurrentModification.java b/caffeine/src/test/java/com/github/benmanes/caffeine/openjdk/concurrent/concurrentmap/ConcurrentModification.java new file mode 100644 index 0000000000..82b52663b6 --- /dev/null +++ b/caffeine/src/test/java/com/github/benmanes/caffeine/openjdk/concurrent/concurrentmap/ConcurrentModification.java @@ -0,0 +1,129 @@ +/* + * Copyright (c) 2005, Oracle and/or its affiliates. All rights reserved. + * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. + * + * This code is free software; you can redistribute it and/or modify it + * under the terms of the GNU General Public License version 2 only, as + * published by the Free Software Foundation. + * + * This code is distributed in the hope that it will be useful, but WITHOUT + * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or + * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License + * version 2 for more details (a copy is included in the LICENSE file that + * accompanied this code). + * + * You should have received a copy of the GNU General Public License version + * 2 along with this work; if not, write to the Free Software Foundation, + * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. + * + * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA + * or visit www.oracle.com if you need additional information or have any + * questions. + */ +package com.github.benmanes.caffeine.openjdk.concurrent.concurrentmap; + +import static java.util.Locale.US; + +/* + * @test + * @bug 6312056 4155650 4294891 4904074 + * @summary Reasonable things should happen if mutating while iterating. + */ +import java.time.Duration; +import java.util.Iterator; +import java.util.Map; +import java.util.concurrent.ConcurrentHashMap; +import java.util.concurrent.ConcurrentMap; +import java.util.concurrent.ConcurrentSkipListMap; + +import org.testng.annotations.Test; + +import com.github.benmanes.caffeine.cache.Cache; +import com.github.benmanes.caffeine.cache.Caffeine; + +@SuppressWarnings({"MultiVariableDeclaration", "NonAtomicVolatileUpdate", + "NonFinalStaticField", "SystemOut"}) +public class ConcurrentModification { + static volatile int passed = 0, failed = 0; + + static void fail(String msg) { + failed++; + new AssertionError(msg).printStackTrace(); + } + + static void unexpected(Throwable t) { + failed++; + t.printStackTrace(); + } + + static void check(boolean condition, String msg) { + if (condition) { + passed++; + } else { + fail(msg); + } + } + + static void check(boolean condition) { + check(condition, "Assertion failed"); + } + + private static void test(ConcurrentMap m) + { + try { + m.clear(); + check(m.isEmpty()); + m.put(1,2); + Iterator> it = m.entrySet().iterator(); + if (it.hasNext()) { + m.remove(1); // sneaky + Map.Entry e = it.next(); + check(m.isEmpty()); + check(e.getKey() == 1); + check(e.getValue() == 2); + } + } catch (Throwable t) {unexpected(t);} + + try { + m.clear(); + check(m.isEmpty()); + m.put(1,2); + Iterator> it = m.entrySet().iterator(); + if (it.hasNext()) { + m.put(1,3); // sneaky + Map.Entry e = it.next(); + check(e.getKey() == 1); + check(e.getValue() == 2 || e.getValue() == 3); + if (m instanceof ConcurrentHashMap) { + e.setValue(4); + check(m.get(1) == 4); + } + } + } catch (Throwable t) {unexpected(t);} + } + + public static void main(String[] args) { + test(new ConcurrentHashMap()); + test(new ConcurrentSkipListMap()); + + System.out.printf(US, "%nPassed = %d, failed = %d%n%n", passed, failed); + if (failed > 0) { + throw new Error("Some tests failed"); + } + } + + @Test + public void bounded() { + Cache cache = Caffeine.newBuilder() + .expireAfterWrite(Duration.ofNanos(Long.MAX_VALUE)) + .maximumSize(Long.MAX_VALUE) + .build(); + test(cache.asMap()); + } + + @Test + public void unbounded() { + Cache cache = Caffeine.newBuilder().build(); + test(cache.asMap()); + } +} diff --git a/caffeine/src/test/java/com/github/benmanes/caffeine/openjdk/concurrent/concurrentmap/ConcurrentRemoveIf.java b/caffeine/src/test/java/com/github/benmanes/caffeine/openjdk/concurrent/concurrentmap/ConcurrentRemoveIf.java new file mode 100644 index 0000000000..a147eb7c4a --- /dev/null +++ b/caffeine/src/test/java/com/github/benmanes/caffeine/openjdk/concurrent/concurrentmap/ConcurrentRemoveIf.java @@ -0,0 +1,186 @@ +/* + * Copyright (c) 2015, Oracle and/or its affiliates. All rights reserved. + * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. + * + * This code is free software; you can redistribute it and/or modify it + * under the terms of the GNU General Public License version 2 only, as + * published by the Free Software Foundation. + * + * This code is distributed in the hope that it will be useful, but WITHOUT + * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or + * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License + * version 2 for more details (a copy is included in the LICENSE file that + * accompanied this code). + * + * You should have received a copy of the GNU General Public License version + * 2 along with this work; if not, write to the Free Software Foundation, + * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. + * + * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA + * or visit www.oracle.com if you need additional information or have any + * questions. + */ +package com.github.benmanes.caffeine.openjdk.concurrent.concurrentmap; + +import static org.testng.Assert.assertEquals; + +import java.time.Duration; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.concurrent.CompletableFuture; +import java.util.concurrent.ConcurrentMap; +import java.util.concurrent.ConcurrentNavigableMap; +import java.util.concurrent.CyclicBarrier; +import java.util.concurrent.ExecutorService; +import java.util.concurrent.Executors; +import java.util.function.Consumer; +import java.util.function.Supplier; + +import org.testng.annotations.AfterClass; +import org.testng.annotations.DataProvider; +import org.testng.annotations.Test; + +import com.github.benmanes.caffeine.cache.Cache; +import com.github.benmanes.caffeine.cache.Caffeine; + +/* + * @test + * @run testng ConcurrentRemoveIf + * @bug 8078645 + * @summary Test removeIf on views of concurrent maps + */ +@Test +@SuppressWarnings({"CanonicalAnnotationSyntax", "InterruptedExceptionSwallowed"}) +public class ConcurrentRemoveIf { + static final int K = 100; + static final int SIZE = 1000; + static final int HALF_SIZE = SIZE / 2; + + @DataProvider() + public static Object[][] concurrentMapViewRemoveIfActions() { + List rows = new ArrayList<>(); + + // ConcurrentMap classes to test + Map>> maps = new HashMap<>(); + maps.put("Bounded", () -> { + Cache cache = Caffeine.newBuilder() + .expireAfterWrite(Duration.ofNanos(Long.MAX_VALUE)) + .maximumSize(Long.MAX_VALUE) + .build(); + return cache.asMap(); + }); + maps.put("Unbounded", () -> { + Cache cache = Caffeine.newBuilder().build(); + return cache.asMap(); + }); + +// maps.put("ConcurrentHashMap", ConcurrentHashMap::new); +// maps.put("ConcurrentSkipListMap", ConcurrentSkipListMap::new); + + // ConcurrentMap actions + Map>> actions = new HashMap<>(); + actions.put(".entrySet().removeIf()", m -> m.entrySet().removeIf(e -> e.getValue() == 0)); + actions.put(".values().removeIf()", m -> m.values().removeIf(v -> v == 0)); + + // ConcurrentNavigableMap actions + Map>> navActions = new HashMap<>(); +// navActions.put(".headMap()/tailMap().entrySet().removeIf()", +// m -> { +// ConcurrentMap left = m.headMap(HALF_SIZE, false); +// ConcurrentMap right = m.tailMap(HALF_SIZE, true); +// left.entrySet().removeIf(e -> e.getValue() == 0); +// right.entrySet().removeIf(e -> e.getValue() == 0); +// }); +// navActions.put(".headMap()/tailMap().values().removeIf()", +// m -> { +// ConcurrentMap left = m.headMap(HALF_SIZE, false); +// ConcurrentMap right = m.tailMap(HALF_SIZE, true); +// left.values().removeIf(v -> v == 0); +// right.values().removeIf(v -> v == 0); +// }); +// navActions.put(".descendingMap().entrySet().removeIf()", +// m -> { +// ConcurrentMap dm = m.descendingMap(); +// dm.entrySet().removeIf(e -> e.getValue() == 0); +// }); +// navActions.put(".descendingMap().values().removeIf()", +// m -> { +// ConcurrentMap dm = m.descendingMap(); +// dm.values().removeIf(v -> v == 0); +// }); + + maps.forEach((mapDescription, sm) -> { + actions.forEach((actionDescription, action) -> { + rows.add(new Object[] {mapDescription + actionDescription, sm, action}); + }); + + if (sm.get() instanceof ConcurrentNavigableMap) { + navActions.forEach((actionDescription, action) -> { + rows.add(new Object[] {mapDescription + actionDescription, sm, action}); + }); + } + }); + + return rows.toArray(new Object[0][]); + } + + ExecutorService executorService = Executors.newCachedThreadPool(); + + @AfterClass + public void after() { + executorService.shutdown(); + } + + @Test(dataProvider = "concurrentMapViewRemoveIfActions") + public void testMap(String desc, Supplier> ms, Consumer> action) + throws InterruptedException { + for (int i = 0; i < K; i++) { + testMap(ms.get(), action); + } + } + + private void testMap(ConcurrentMap map, Consumer> action) + throws InterruptedException { + // put 0's + fillMap(map, 0); + + // To start working simultaneously + CyclicBarrier threadStarted = new CyclicBarrier(2); + + // This task puts 1's into map + CompletableFuture putter = CompletableFuture.runAsync( + awaitOn(threadStarted, () -> fillMap(map, 1)), + executorService); + + // This task performs the map action to remove all 0's from map + CompletableFuture remover = CompletableFuture.runAsync( + awaitOn(threadStarted, () -> action.accept(map)), + executorService); + + // Wait for both tasks to complete + CompletableFuture.allOf(putter, remover).join(); + + assertEquals(map.size(), SIZE, "Map size incorrect"); + map.forEach((k, v) -> assertEquals(v, (Integer)1)); + } + + static void fillMap(ConcurrentMap map, int value) { + for (int i = 0; i < SIZE; i++) { + map.put(i, value); + } + } + + static Runnable awaitOn(CyclicBarrier threadStarted, Runnable r) { + return () -> { + try { + threadStarted.await(); + } + catch (Exception e) { + throw new RuntimeException(e); + } + r.run(); + }; + } +} diff --git a/caffeine/src/test/java/com/github/benmanes/caffeine/openjdk/concurrent/concurrentmap/package-info.java b/caffeine/src/test/java/com/github/benmanes/caffeine/openjdk/concurrent/concurrentmap/package-info.java new file mode 100644 index 0000000000..7c3f007104 --- /dev/null +++ b/caffeine/src/test/java/com/github/benmanes/caffeine/openjdk/concurrent/concurrentmap/package-info.java @@ -0,0 +1,7 @@ +@NullMarked +@CheckReturnValue +package com.github.benmanes.caffeine.openjdk.concurrent.concurrentmap; + +import org.jspecify.annotations.NullMarked; + +import com.google.errorprone.annotations.CheckReturnValue; diff --git a/caffeine/src/test/java/com/github/benmanes/caffeine/openjdk/map/Collisions.java b/caffeine/src/test/java/com/github/benmanes/caffeine/openjdk/map/Collisions.java new file mode 100644 index 0000000000..ffb3534240 --- /dev/null +++ b/caffeine/src/test/java/com/github/benmanes/caffeine/openjdk/map/Collisions.java @@ -0,0 +1,182 @@ +/* + * Copyright (c) 2012, 2016, Oracle and/or its affiliates. All rights reserved. + * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. + * + * This code is free software; you can redistribute it and/or modify it + * under the terms of the GNU General Public License version 2 only, as + * published by the Free Software Foundation. + * + * This code is distributed in the hope that it will be useful, but WITHOUT + * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or + * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License + * version 2 for more details (a copy is included in the LICENSE file that + * accompanied this code). + * + * You should have received a copy of the GNU General Public License version + * 2 along with this work; if not, write to the Free Software Foundation, + * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. + * + * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA + * or visit www.oracle.com if you need additional information or have any + * questions. + */ +package com.github.benmanes.caffeine.openjdk.map; + +import static java.util.Locale.US; +import static org.testng.Assert.assertEquals; +import static org.testng.Assert.assertFalse; +import static org.testng.Assert.assertNotNull; +import static org.testng.Assert.assertTrue; + +import java.util.BitSet; +import java.util.IdentityHashMap; +import java.util.Iterator; +import java.util.Map; +import java.util.function.Supplier; + +import org.testng.annotations.Test; + +/* + * @test + * @bug 7126277 + * @run testng/othervm -Dtest.map.collisions.shortrun=true Collisions + * @summary Ensure Maps behave well with lots of hashCode() collisions. + */ +@SuppressWarnings("unused") +public class Collisions extends MapWithCollisionsProviders { + + @Test(dataProvider = "mapsWithObjects") + void testIntegerIteration(String desc, Supplier> ms, IntKey val) { + Map map = ms.get(); + int mapSize = map.size(); + + BitSet all = new BitSet(mapSize); + for (Map.Entry each : map.entrySet()) { + assertFalse(all.get(each.getKey().getValue()), "Iteration: key already seen"); + all.set(each.getKey().getValue()); + } + + all.flip(0, mapSize); + assertTrue(all.isEmpty(), "Iteration: some keys not visited"); + + for (IntKey each : map.keySet()) { + assertFalse(all.get(each.getValue()), "Iteration: key already seen"); + all.set(each.getValue()); + } + + all.flip(0, mapSize); + assertTrue(all.isEmpty(), "Iteration: some keys not visited"); + + int count = 0; + for (IntKey each : map.values()) { + count++; + } + + assertEquals(map.size(), count, String.format(US, + "Iteration: value count matches size m%d != c%d", map.size(), count)); + } + + @Test(dataProvider = "mapsWithStrings") + void testStringIteration(String desc, Supplier> ms, String val) { + Map map = ms.get(); + int mapSize = map.size(); + + BitSet all = new BitSet(mapSize); + for (Map.Entry each : map.entrySet()) { + String key = each.getKey(); + boolean longKey = key.length() > 5; + int index = key.hashCode() + (longKey ? mapSize / 2 : 0); + assertFalse(all.get(index), "key already seen"); + all.set(index); + } + + all.flip(0, mapSize); + assertTrue(all.isEmpty(), "some keys not visited"); + + for (String each : map.keySet()) { + boolean longKey = each.length() > 5; + int index = each.hashCode() + (longKey ? mapSize / 2 : 0); + assertFalse(all.get(index), "key already seen"); + all.set(index); + } + + all.flip(0, mapSize); + assertTrue(all.isEmpty(), "some keys not visited"); + + int count = 0; + for (String each : map.values()) { + count++; + } + + assertEquals(map.size(), mapSize, + String.format(US, "value count matches size m%d != k%d", map.size(), mapSize)); + } + + @Test(dataProvider = "mapsWithObjectsAndStrings") + void testRemove(String desc, Supplier> ms, Object val) { + Map map = ms.get(); + Object[] keys = map.keySet().toArray(); + + for (int i = 0; i < keys.length; i++) { + Object each = keys[i]; + assertNotNull(map.remove(each), + String.format(US, "remove: %s[%d]%s", desc, i, each)); + } + + assertTrue(map.size() == 0 && map.isEmpty(), + String.format(US, "remove: map empty. size=%d", map.size())); + } + + @Test(dataProvider = "mapsWithObjectsAndStrings") + void testKeysIteratorRemove(String desc, Supplier> ms, Object val) { + Map map = ms.get(); + + Iterator each = map.keySet().iterator(); + while (each.hasNext()) { + Object t = each.next(); + each.remove(); + assertFalse(map.containsKey(t), String.format(US, "not removed: %s", each)); + } + + assertTrue(map.size() == 0 && map.isEmpty(), + String.format(US, "remove: map empty. size=%d", map.size())); + } + + @Test(dataProvider = "mapsWithObjectsAndStrings") + void testValuesIteratorRemove(String desc, Supplier> ms, Object val) { + Map map = ms.get(); + + Iterator each = map.values().iterator(); + while (each.hasNext()) { + Object t = each.next(); + each.remove(); + assertFalse(map.containsValue(t), String.format(US, "not removed: %s", each)); + } + + assertTrue(map.size() == 0 && map.isEmpty(), + String.format(US, "remove: map empty. size=%d", map.size())); + } + + @Test(dataProvider = "mapsWithObjectsAndStrings") + void testEntriesIteratorRemove(String desc, Supplier> ms, Object val) { + Map map = ms.get(); + + Iterator> each = map.entrySet().iterator(); + while (each.hasNext()) { + Map.Entry t = each.next(); + Object key = t.getKey(); + Object value = t.getValue(); + each.remove(); + assertTrue((map instanceof IdentityHashMap) || !map.entrySet().contains(t), + String.format(US, "not removed: %s", each)); + assertFalse(map.containsKey(key), + String.format(US, "not removed: %s", each)); + assertFalse(map.containsValue(value), + String.format(US, "not removed: %s", each)); + } + + assertTrue(map.size() == 0 && map.isEmpty(), + String.format(US, "remove: map empty. size=%d", map.size())); + } + +} diff --git a/caffeine/src/test/java/com/github/benmanes/caffeine/openjdk/map/Defaults.java b/caffeine/src/test/java/com/github/benmanes/caffeine/openjdk/map/Defaults.java new file mode 100644 index 0000000000..b315962070 --- /dev/null +++ b/caffeine/src/test/java/com/github/benmanes/caffeine/openjdk/map/Defaults.java @@ -0,0 +1,1092 @@ +/* + * Copyright (c) 2013, 2017, Oracle and/or its affiliates. All rights reserved. + * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. + * + * This code is free software; you can redistribute it and/or modify it + * under the terms of the GNU General Public License version 2 only, as + * published by the Free Software Foundation. + * + * This code is distributed in the hope that it will be useful, but WITHOUT + * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or + * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License + * version 2 for more details (a copy is included in the LICENSE file that + * accompanied this code). + * + * You should have received a copy of the GNU General Public License version + * 2 along with this work; if not, write to the Free Software Foundation, + * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. + * + * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA + * or visit www.oracle.com if you need additional information or have any + * questions. + */ +package com.github.benmanes.caffeine.openjdk.map; + +import static java.util.Objects.requireNonNull; +import static org.testng.Assert.assertEquals; +import static org.testng.Assert.assertFalse; +import static org.testng.Assert.assertNull; +import static org.testng.Assert.assertSame; +import static org.testng.Assert.assertThrows; +import static org.testng.Assert.assertTrue; +import static org.testng.Assert.fail; + +import java.time.Duration; +import java.util.AbstractMap; +import java.util.AbstractSet; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collection; +import java.util.Collections; +import java.util.EnumMap; +import java.util.HashMap; +import java.util.HashSet; +import java.util.Hashtable; +import java.util.IdentityHashMap; +import java.util.Iterator; +import java.util.LinkedHashMap; +import java.util.Map; +import java.util.Set; +import java.util.TreeMap; +import java.util.WeakHashMap; +import java.util.concurrent.ConcurrentMap; +import java.util.concurrent.atomic.AtomicBoolean; +import java.util.function.BiFunction; +import java.util.function.Function; +import java.util.function.Supplier; + +import org.testng.Assert.ThrowingRunnable; +import org.testng.annotations.DataProvider; +import org.testng.annotations.Test; + +import com.github.benmanes.caffeine.cache.Cache; +import com.github.benmanes.caffeine.cache.Caffeine; + +/* + * @test + * @bug 8010122 8004518 8024331 8024688 + * @summary Test Map default methods + * @author Mike Duigou + * @run testng Defaults + */ +@SuppressWarnings({"AlmostJavadoc", "BooleanParameter", "DirectReturn", "EmptyBlockTag", + "EnumOrdinal", "IdentifierName", "IdentityConversion", "IsNull", "JdkObsolete", + "MethodCanBeStatic", "MultiVariableDeclaration", "NullAway", "PreferredInterfaceType", + "PreferredInterfaceType", "PreferredInterfaceType", "PrivateConstructorForUtilityClass", + "rawtypes", "RedundantStringConversion", "unchecked", "UnnecessaryFinal", "unused", + "YodaCondition"}) +public class Defaults { + + @Test(dataProvider = "Map rw=all keys=withNull values=withNull") + public void testGetOrDefaultNulls(String description, Map map) { + assertTrue(map.containsKey(null), description + ": null key absent"); + assertNull(map.get(null), description + ": value not null"); + assertSame(map.get(null), map.getOrDefault(null, EXTRA_VALUE), description + ": values should match"); + } + + @Test(dataProvider = "Map rw=all keys=all values=all") + public void testGetOrDefault(String description, Map map) { + assertTrue(map.containsKey(KEYS[1]), "expected key missing"); + assertSame(map.get(KEYS[1]), map.getOrDefault(KEYS[1], EXTRA_VALUE), "values should match"); + assertFalse(map.containsKey(EXTRA_KEY), "expected absent key"); + assertSame(map.getOrDefault(EXTRA_KEY, EXTRA_VALUE), EXTRA_VALUE, "value not returned as default"); + assertNull(map.getOrDefault(EXTRA_KEY, null), "null not returned as default"); + } + + @Test(dataProvider = "Map rw=true keys=withNull values=withNull") + public void testPutIfAbsentNulls(String description, Map map) { + // null -> null + assertTrue(map.containsKey(null), "null key absent"); + assertNull(map.get(null), "value not null"); + assertNull(map.putIfAbsent(null, EXTRA_VALUE), "previous not null"); + // null -> EXTRA_VALUE + assertTrue(map.containsKey(null), "null key absent"); + assertSame(map.get(null), EXTRA_VALUE, "unexpected value"); + assertSame(map.putIfAbsent(null, null), EXTRA_VALUE, "previous not expected value"); + assertTrue(map.containsKey(null), "null key absent"); + assertSame(map.get(null), EXTRA_VALUE, "unexpected value"); + assertSame(map.remove(null), EXTRA_VALUE, "removed unexpected value"); + // null -> + + assertFalse(map.containsKey(null), description + ": key present after remove"); + assertNull(map.putIfAbsent(null, null), "previous not null"); + // null -> null + assertTrue(map.containsKey(null), "null key absent"); + assertNull(map.get(null), "value not null"); + assertNull(map.putIfAbsent(null, EXTRA_VALUE), "previous not null"); + assertSame(map.get(null), EXTRA_VALUE, "value not expected"); + } + + @Test(dataProvider = "Map rw=true keys=all values=all") + public void testPutIfAbsent(String description, Map map) { + // 1 -> 1 + assertTrue(map.containsKey(KEYS[1])); + Object expected = map.get(KEYS[1]); + assertTrue(null == expected || expected == VALUES[1]); + assertSame(map.putIfAbsent(KEYS[1], EXTRA_VALUE), expected); + assertSame(map.get(KEYS[1]), expected); + + // EXTRA_KEY -> + assertFalse(map.containsKey(EXTRA_KEY)); + assertSame(map.putIfAbsent(EXTRA_KEY, EXTRA_VALUE), null); + assertSame(map.get(EXTRA_KEY), EXTRA_VALUE); + assertSame(map.putIfAbsent(EXTRA_KEY, VALUES[2]), EXTRA_VALUE); + assertSame(map.get(EXTRA_KEY), EXTRA_VALUE); + } + + @Test(dataProvider = "Map rw=all keys=all values=all") + public void testForEach(String description, Map map) { + IntegerEnum[] EACH_KEY = new IntegerEnum[map.size()]; + + map.forEach((k, v) -> { + int idx = (null == k) ? 0 : k.ordinal(); // substitute for index. + assertNull(EACH_KEY[idx]); + EACH_KEY[idx] = (idx == 0) ? KEYS[0] : k; // substitute for comparison. + assertSame(v, map.get(k)); + }); + + assertEquals(KEYS, EACH_KEY, description); + } + + @Test(dataProvider = "Map rw=true keys=all values=all") + public static void testReplaceAll(String description, Map map) { + IntegerEnum[] EACH_KEY = new IntegerEnum[map.size()]; + Set EACH_REPLACE = new HashSet<>(map.size()); + + map.replaceAll((k,v) -> { + int idx = (null == k) ? 0 : k.ordinal(); // substitute for index. + assertNull(EACH_KEY[idx]); + EACH_KEY[idx] = (idx == 0) ? KEYS[0] : k; // substitute for comparison. + assertSame(v, map.get(k)); + String replacement = v + " replaced"; + EACH_REPLACE.add(replacement); + return replacement; + }); + + assertEquals(KEYS, EACH_KEY, description); + assertEquals(map.values().size(), EACH_REPLACE.size(), description + EACH_REPLACE); + assertTrue(EACH_REPLACE.containsAll(map.values()), description + " : " + EACH_REPLACE + " != " + map.values()); + assertTrue(map.values().containsAll(EACH_REPLACE), description + " : " + EACH_REPLACE + " != " + map.values()); + } + + @Test(dataProvider = "Map rw=true keys=nonNull values=nonNull") + public static void testReplaceAllNoNullReplacement(String description, Map map) { + assertThrowsNPE(() -> map.replaceAll(null)); + assertThrowsNPE(() -> map.replaceAll((k,v) -> null)); //should not allow replacement with null value + } + + @Test(dataProvider = "Map rw=true keys=withNull values=withNull") + public static void testRemoveNulls(String description, Map map) { + assertTrue(map.containsKey(null), "null key absent"); + assertNull(map.get(null), "value not null"); + assertFalse(map.remove(null, EXTRA_VALUE), description); + assertTrue(map.containsKey(null)); + assertNull(map.get(null)); + assertTrue(map.remove(null, null)); + assertFalse(map.containsKey(null)); + assertNull(map.get(null)); + assertFalse(map.remove(null, null)); + } + + @Test(dataProvider = "Map rw=true keys=all values=all") + public static void testRemove(String description, Map map) { + assertTrue(map.containsKey(KEYS[1])); + Object expected = map.get(KEYS[1]); + assertTrue(null == expected || expected == VALUES[1]); + assertFalse(map.remove(KEYS[1], EXTRA_VALUE), description); + assertSame(map.get(KEYS[1]), expected); + assertTrue(map.remove(KEYS[1], expected)); + assertNull(map.get(KEYS[1])); + assertFalse(map.remove(KEYS[1], expected)); + + assertFalse(map.containsKey(EXTRA_KEY)); + assertFalse(map.remove(EXTRA_KEY, EXTRA_VALUE)); + } + + @Test(dataProvider = "Map rw=true keys=withNull values=withNull") + public void testReplaceKVNulls(String description, Map map) { + assertTrue(map.containsKey(null), "null key absent"); + assertNull(map.get(null), "value not null"); + assertSame(map.replace(null, EXTRA_VALUE), null); + assertSame(map.get(null), EXTRA_VALUE); + } + + @Test(dataProvider = "Map rw=true keys=nonNull values=nonNull") + public void testReplaceKVNoNulls(String description, Map map) { + assertTrue(map.containsKey(FIRST_KEY), "expected key missing"); + assertSame(map.get(FIRST_KEY), FIRST_VALUE, "found wrong value"); + assertThrowsNPE(() -> map.replace(FIRST_KEY, null)); + assertSame(map.replace(FIRST_KEY, EXTRA_VALUE), FIRST_VALUE, description + ": replaced wrong value"); + assertSame(map.get(FIRST_KEY), EXTRA_VALUE, "found wrong value"); + } + + @Test(dataProvider = "Map rw=true keys=all values=all") + public void testReplaceKV(String description, Map map) { + assertTrue(map.containsKey(KEYS[1])); + Object expected = map.get(KEYS[1]); + assertTrue(null == expected || expected == VALUES[1]); + assertSame(map.replace(KEYS[1], EXTRA_VALUE), expected); + assertSame(map.get(KEYS[1]), EXTRA_VALUE); + + assertFalse(map.containsKey(EXTRA_KEY)); + assertNull(map.replace(EXTRA_KEY, EXTRA_VALUE)); + assertFalse(map.containsKey(EXTRA_KEY)); + assertNull(map.get(EXTRA_KEY)); + assertNull(map.put(EXTRA_KEY, EXTRA_VALUE)); + assertSame(map.get(EXTRA_KEY), EXTRA_VALUE); + assertSame(map.replace(EXTRA_KEY, (String)expected), EXTRA_VALUE); + assertSame(map.get(EXTRA_KEY), expected); + } + + @Test(dataProvider = "Map rw=true keys=withNull values=withNull") + public void testReplaceKVVNulls(String description, Map map) { + assertTrue(map.containsKey(null), "null key absent"); + assertNull(map.get(null), "value not null"); + assertFalse(map.replace(null, EXTRA_VALUE, EXTRA_VALUE)); + assertNull(map.get(null)); + assertTrue(map.replace(null, null, EXTRA_VALUE)); + assertSame(map.get(null), EXTRA_VALUE); + assertTrue(map.replace(null, EXTRA_VALUE, EXTRA_VALUE)); + assertSame(map.get(null), EXTRA_VALUE); + } + + @Test(dataProvider = "Map rw=true keys=nonNull values=nonNull") + public void testReplaceKVVNoNulls(String description, Map map) { + assertTrue(map.containsKey(FIRST_KEY), "expected key missing"); + assertSame(map.get(FIRST_KEY), FIRST_VALUE, "found wrong value"); + assertThrowsNPE(() -> map.replace(FIRST_KEY, FIRST_VALUE, null)); + assertThrowsNPE( + () -> { + if (!map.replace(FIRST_KEY, null, EXTRA_VALUE)) { + throw new NullPointerException("default returns false rather than throwing"); + } + }); + assertTrue(map.replace(FIRST_KEY, FIRST_VALUE, EXTRA_VALUE), description + ": replaced wrong value"); + assertSame(map.get(FIRST_KEY), EXTRA_VALUE, "found wrong value"); + } + + @Test(dataProvider = "Map rw=true keys=all values=all") + public void testReplaceKVV(String description, Map map) { + assertTrue(map.containsKey(KEYS[1])); + Object expected = map.get(KEYS[1]); + assertTrue(null == expected || expected == VALUES[1]); + assertFalse(map.replace(KEYS[1], EXTRA_VALUE, EXTRA_VALUE)); + assertSame(map.get(KEYS[1]), expected); + assertTrue(map.replace(KEYS[1], (String)expected, EXTRA_VALUE)); + assertSame(map.get(KEYS[1]), EXTRA_VALUE); + assertTrue(map.replace(KEYS[1], EXTRA_VALUE, EXTRA_VALUE)); + assertSame(map.get(KEYS[1]), EXTRA_VALUE); + + assertFalse(map.containsKey(EXTRA_KEY)); + assertFalse(map.replace(EXTRA_KEY, EXTRA_VALUE, EXTRA_VALUE)); + assertFalse(map.containsKey(EXTRA_KEY)); + assertNull(map.get(EXTRA_KEY)); + assertNull(map.put(EXTRA_KEY, EXTRA_VALUE)); + assertTrue(map.containsKey(EXTRA_KEY)); + assertSame(map.get(EXTRA_KEY), EXTRA_VALUE); + assertTrue(map.replace(EXTRA_KEY, EXTRA_VALUE, EXTRA_VALUE)); + assertSame(map.get(EXTRA_KEY), EXTRA_VALUE); + } + + @Test(dataProvider = "Map rw=true keys=withNull values=withNull") + public void testComputeIfAbsentNulls(String description, Map map) { + // null -> null + assertTrue(map.containsKey(null), "null key absent"); + assertNull(map.get(null), "value not null"); + assertSame(map.computeIfAbsent(null, (k) -> null), null, "not expected result"); + assertTrue(map.containsKey(null), "null key absent"); + assertNull(map.get(null), "value not null"); + assertSame(map.computeIfAbsent(null, (k) -> EXTRA_VALUE), EXTRA_VALUE, "not mapped to result"); + // null -> EXTRA_VALUE + assertTrue(map.containsKey(null), "null key absent"); + assertSame(map.get(null), EXTRA_VALUE, "not expected value"); + assertSame(map.remove(null), EXTRA_VALUE, "removed unexpected value"); + // null -> + assertFalse(map.containsKey(null), "null key present"); + assertSame(map.computeIfAbsent(null, (k) -> EXTRA_VALUE), EXTRA_VALUE, "not mapped to result"); + // null -> EXTRA_VALUE + assertTrue(map.containsKey(null), "null key absent"); + assertSame(map.get(null), EXTRA_VALUE, "not expected value"); + } + + @Test(dataProvider = "Map rw=true keys=all values=all") + public void testComputeIfAbsent(String description, Map map) { + // 1 -> 1 + assertTrue(map.containsKey(KEYS[1])); + Object expected = map.get(KEYS[1]); + assertTrue(null == expected || expected == VALUES[1], description + String.valueOf(expected)); + expected = (null == expected) ? EXTRA_VALUE : expected; + assertSame(map.computeIfAbsent(KEYS[1], (k) -> EXTRA_VALUE), expected, description); + assertSame(map.get(KEYS[1]), expected, description); + + // EXTRA_KEY -> + assertFalse(map.containsKey(EXTRA_KEY)); + assertNull(map.computeIfAbsent(EXTRA_KEY, (k) -> null)); + assertFalse(map.containsKey(EXTRA_KEY)); + assertSame(map.computeIfAbsent(EXTRA_KEY, (k) -> EXTRA_VALUE), EXTRA_VALUE); + // EXTRA_KEY -> EXTRA_VALUE + assertSame(map.get(EXTRA_KEY), EXTRA_VALUE); + } + + @Test(dataProvider = "Map rw=true keys=all values=all") + public void testComputeIfAbsentNullFunction(String description, Map map) { + assertThrowsNPE(() -> map.computeIfAbsent(KEYS[1], null)); + } + + @Test(dataProvider = "Map rw=true keys=withNull values=withNull") + public void testComputeIfPresentNulls(String description, Map map) { + assertTrue(map.containsKey(null), description + ": null key absent"); + assertNull(map.get(null), description + ": value not null"); + assertSame(map.computeIfPresent(null, (k, v) -> { + fail(description + ": null value is not deemed present"); + return EXTRA_VALUE; + }), null, description); + assertTrue(map.containsKey(null)); + assertNull(map.get(null), description); + assertNull(map.remove(EXTRA_KEY), description + ": unexpected mapping"); + assertNull(map.put(EXTRA_KEY, null), description + ": unexpected value"); + assertSame(map.computeIfPresent(EXTRA_KEY, (k, v) -> { + fail(description + ": null value is not deemed present"); + return EXTRA_VALUE; + }), null, description); + assertNull(map.get(EXTRA_KEY), description + ": null mapping gone"); + } + + @Test(dataProvider = "Map rw=true keys=all values=all") + public void testComputeIfPresent(String description, Map map) { + assertTrue(map.containsKey(KEYS[1])); + Object value = map.get(KEYS[1]); + assertTrue(null == value || value == VALUES[1], description + String.valueOf(value)); + Object expected = (null == value) ? null : EXTRA_VALUE; + assertSame(map.computeIfPresent(KEYS[1], (k, v) -> { + assertSame(v, value); + return EXTRA_VALUE; + }), expected, description); + assertSame(map.get(KEYS[1]), expected, description); + + assertFalse(map.containsKey(EXTRA_KEY)); + assertSame(map.computeIfPresent(EXTRA_KEY, (k, v) -> { + fail(); + return EXTRA_VALUE; + }), null); + assertFalse(map.containsKey(EXTRA_KEY)); + assertSame(map.get(EXTRA_KEY), null); + } + + @Test(dataProvider = "Map rw=true keys=all values=all") + public void testComputeIfPresentNullFunction(String description, Map map) { + assertThrowsNPE(() -> map.computeIfPresent(KEYS[1], null)); + } + + @Test(dataProvider = "Map rw=true keys=withNull values=withNull") + public void testComputeNulls(String description, Map map) { + assertTrue(map.containsKey(null), "null key absent"); + assertNull(map.get(null), "value not null"); + assertSame(map.compute(null, (k, v) -> { + assertNull(k); + assertNull(v); + return null; + }), null, description); + assertFalse(map.containsKey(null), description + ": null key present."); + assertSame(map.compute(null, (k, v) -> { + assertSame(k, null); + assertNull(v); + return EXTRA_VALUE; + }), EXTRA_VALUE, description); + assertTrue(map.containsKey(null)); + assertSame(map.get(null), EXTRA_VALUE, description); + assertSame(map.remove(null), EXTRA_VALUE, description + ": removed value not expected"); + // no mapping before and after + assertFalse(map.containsKey(null), description + ": null key present"); + assertSame(map.compute(null, (k, v) -> { + assertNull(k); + assertNull(v); + return null; + }), null, description + ": expected null result" ); + assertFalse(map.containsKey(null), description + ": null key present"); + // compute with map not containing value + assertNull(map.remove(EXTRA_KEY), description + ": unexpected mapping"); + assertFalse(map.containsKey(EXTRA_KEY), description + ": key present"); + assertSame(map.compute(EXTRA_KEY, (k, v) -> { + assertSame(k, EXTRA_KEY); + assertNull(v); + return null; + }), null, description); + assertFalse(map.containsKey(EXTRA_KEY), description + ": null key present"); + // ensure removal. + assertNull(map.put(EXTRA_KEY, EXTRA_VALUE)); + assertSame(map.compute(EXTRA_KEY, (k, v) -> { + assertSame(k, EXTRA_KEY); + assertSame(v, EXTRA_VALUE); + return null; + }), null, description + ": null resulted expected"); + assertFalse(map.containsKey(EXTRA_KEY), description + ": null key present"); + // compute with map containing null value + assertNull(map.put(EXTRA_KEY, null), description + ": unexpected value"); + assertSame(map.compute(EXTRA_KEY, (k, v) -> { + assertSame(k, EXTRA_KEY); + assertNull(v); + return null; + }), null, description); + assertFalse(map.containsKey(EXTRA_KEY), description + ": null key present"); + assertNull(map.put(EXTRA_KEY, null), description + ": unexpected value"); + assertSame(map.compute(EXTRA_KEY, (k, v) -> { + assertSame(k, EXTRA_KEY); + assertNull(v); + return EXTRA_VALUE; + }), EXTRA_VALUE, description); + assertTrue(map.containsKey(EXTRA_KEY), "null key present"); + } + + @Test(dataProvider = "Map rw=true keys=all values=all") + public void testCompute(String description, Map map) { + assertTrue(map.containsKey(KEYS[1])); + Object value = map.get(KEYS[1]); + assertTrue(null == value || value == VALUES[1], description + String.valueOf(value)); + assertSame(map.compute(KEYS[1], (k, v) -> { + assertSame(k, KEYS[1]); + assertSame(v, value); + return EXTRA_VALUE; + }), EXTRA_VALUE, description); + assertSame(map.get(KEYS[1]), EXTRA_VALUE, description); + assertNull(map.compute(KEYS[1], (k, v) -> { + assertSame(v, EXTRA_VALUE); + return null; + }), description); + assertFalse(map.containsKey(KEYS[1])); + + assertFalse(map.containsKey(EXTRA_KEY)); + assertSame(map.compute(EXTRA_KEY, (k, v) -> { + assertNull(v); + return EXTRA_VALUE; + }), EXTRA_VALUE); + assertTrue(map.containsKey(EXTRA_KEY)); + assertSame(map.get(EXTRA_KEY), EXTRA_VALUE); + } + + @Test(dataProvider = "Map rw=true keys=all values=all") + public void testComputeNullFunction(String description, Map map) { + assertThrowsNPE(() -> map.compute(KEYS[1], null)); + } + + @Test(dataProvider = "MergeCases") + private void testMerge(String description, Map map, Merging.Value oldValue, Merging.Value newValue, Merging.Merger merger, Merging.Value put, Merging.Value result) { + // add and check initial conditions. + switch (oldValue) { + case ABSENT : + map.remove(EXTRA_KEY); + assertFalse(map.containsKey(EXTRA_KEY), "key not absent"); + break; + case NULL : + map.put(EXTRA_KEY, null); + assertTrue(map.containsKey(EXTRA_KEY), "key absent"); + assertNull(map.get(EXTRA_KEY), "wrong value"); + break; + case OLDVALUE : + map.put(EXTRA_KEY, VALUES[1]); + assertTrue(map.containsKey(EXTRA_KEY), "key absent"); + assertSame(map.get(EXTRA_KEY), VALUES[1], "wrong value"); + break; + default: + fail("unexpected old value"); + } + + String returned = map.merge(EXTRA_KEY, + newValue == Merging.Value.NULL ? (String) null : VALUES[2], + merger + ); + + // check result + + switch (result) { + case NULL : + assertNull(returned, "wrong value"); + break; + case NEWVALUE : + assertSame(returned, VALUES[2], "wrong value"); + break; + case RESULT : + assertSame(returned, VALUES[3], "wrong value"); + break; + default: + fail("unexpected new value"); + } + + // check map + switch (put) { + case ABSENT : + assertFalse(map.containsKey(EXTRA_KEY), "key not absent"); + break; + case NULL : + assertTrue(map.containsKey(EXTRA_KEY), "key absent"); + assertNull(map.get(EXTRA_KEY), "wrong value"); + break; + case NEWVALUE : + assertTrue(map.containsKey(EXTRA_KEY), "key absent"); + assertSame(map.get(EXTRA_KEY), VALUES[2], "wrong value"); + break; + case RESULT : + assertTrue(map.containsKey(EXTRA_KEY), "key absent"); + assertSame(map.get(EXTRA_KEY), VALUES[3], "wrong value"); + break; + default: + fail("unexpected new value"); + } + } + + @Test(dataProvider = "Map rw=true keys=all values=all") + public void testMergeNullMerger(String description, Map map) { + assertThrowsNPE(() -> map.merge(KEYS[1], VALUES[1], null)); + } + + /** A function that flipflops between running two other functions. */ + static BiFunction twoStep(AtomicBoolean b, + BiFunction first, + BiFunction second) { + return (t, u) -> { + boolean bb = b.get(); + try { + return (b.get() ? first : second).apply(t, u); + } finally { + b.set(!bb); + }}; + } + + /** + * Simulates races by modifying the map within the mapping function. + */ + @Test(dataProvider = "ImplementsConcurrentMap") + public void testConcurrentMap_computeIfAbsent_racy(String name, ConcurrentMap map) { + final Long two = 2L; + Function f, g; + + // race not detected if function returns null + f = (k) -> { map.put(two, 42L); return null; }; + assertNull(map.computeIfAbsent(two, f)); + assertEquals(42L, (long)map.get(two)); + + map.clear(); + f = (k) -> { map.put(two, 42L); return 86L; }; + assertEquals(42L, (long)map.computeIfAbsent(two, f)); + assertEquals(42L, (long)map.get(two)); + + // mapping function ignored if value already exists + map.put(two, 99L); + assertEquals(99L, (long)map.computeIfAbsent(two, f)); + assertEquals(99L, (long)map.get(two)); + } + + /** + * Simulates races by modifying the map within the remapping function. + */ + @Test(dataProvider = "ImplementsConcurrentMap") + public void testConcurrentMap_computeIfPresent_racy(String name, ConcurrentMap map) { + final AtomicBoolean b = new AtomicBoolean(true); + final Long two = 2L; + BiFunction f, g; + + for (Long val : new Long[] { null, 86L }) { + map.clear(); + + // Function not invoked if no mapping exists + f = (k, v) -> { map.put(two, 42L); return val; }; + assertNull(map.computeIfPresent(two, f)); + assertNull(map.get(two)); + + map.put(two, 42L); + f = (k, v) -> { map.put(two, 86L); return val; }; + g = (k, v) -> { + assertSame(two, k); + assertEquals(86L, (long)v); + return null; + }; + assertNull(map.computeIfPresent(two, twoStep(b, f, g))); + assertFalse(map.containsKey(two)); + assertTrue(b.get()); + + map.put(two, 42L); + f = (k, v) -> { map.put(two, 86L); return val; }; + g = (k, v) -> { + assertSame(two, k); + assertEquals(86L, (long)v); + return 99L; + }; + assertEquals(99L, (long)map.computeIfPresent(two, twoStep(b, f, g))); + assertTrue(map.containsKey(two)); + assertTrue(b.get()); + } + } + + @Test(dataProvider = "ImplementsConcurrentMap") + public void testConcurrentMap_compute_simple(String name, ConcurrentMap map) { + BiFunction fun = (k, v) -> ((v == null) ? 0L : k + v); + assertEquals(Long.valueOf(0L), map.compute(3L, fun)); + assertEquals(Long.valueOf(3L), map.compute(3L, fun)); + assertEquals(Long.valueOf(6L), map.compute(3L, fun)); + assertNull(map.compute(3L, (k, v) -> null)); + assertTrue(map.isEmpty()); + + assertEquals(Long.valueOf(0L), map.compute(3L, fun)); + assertEquals(Long.valueOf(3L), map.compute(3L, fun)); + assertEquals(Long.valueOf(6L), map.compute(3L, fun)); + assertNull(map.compute(3L, (k, v) -> null)); + assertTrue(map.isEmpty()); + } + + /** + * Simulates races by modifying the map within the remapping function. + */ + @Test(dataProvider = "ImplementsConcurrentMap") + public void testConcurrentMap_compute_racy(String name, ConcurrentMap map) { + final AtomicBoolean b = new AtomicBoolean(true); + final Long two = 2L; + BiFunction f, g; + + // null -> null is a no-op; race not detected + f = (k, v) -> { map.put(two, 42L); return null; }; + assertNull(map.compute(two, f)); + assertEquals(42L, (long)map.get(two)); + + for (Long val : new Long[] { null, 86L }) { + map.clear(); + + f = (k, v) -> { map.put(two, 42L); return 86L; }; + g = (k, v) -> { + assertSame(two, k); + assertEquals(42L, (long)v); + return k + v; + }; + assertEquals(44L, (long)map.compute(two, twoStep(b, f, g))); + assertEquals(44L, (long)map.get(two)); + assertTrue(b.get()); + + f = (k, v) -> { map.remove(two); return val; }; + g = (k, v) -> { + assertSame(two, k); + assertNull(v); + return 44L; + }; + assertEquals(44L, (long)map.compute(two, twoStep(b, f, g))); + assertEquals(44L, (long)map.get(two)); + assertTrue(map.containsKey(two)); + assertTrue(b.get()); + + f = (k, v) -> { map.remove(two); return val; }; + g = (k, v) -> { + assertSame(two, k); + assertNull(v); + return null; + }; + assertNull(map.compute(two, twoStep(b, f, g))); + assertNull(map.get(two)); + assertFalse(map.containsKey(two)); + assertTrue(b.get()); + } + } + + /** + * Simulates races by modifying the map within the remapping function. + */ + @Test(dataProvider = "ImplementsConcurrentMap") + public void testConcurrentMap_merge_racy(String name, ConcurrentMap map) { + final AtomicBoolean b = new AtomicBoolean(true); + final Long two = 2L; + BiFunction f, g; + + for (Long val : new Long[] { null, 86L }) { + map.clear(); + + f = (v, w) -> { throw new AssertionError(); }; + assertEquals(99L, (long)map.merge(two, 99L, f)); + assertEquals(99L, (long)map.get(two)); + + f = (v, w) -> { map.put(two, 42L); return val; }; + g = (v, w) -> { + assertEquals(42L, (long)v); + assertEquals(3L, (long)w); + return v + w; + }; + assertEquals(45L, (long)map.merge(two, 3L, twoStep(b, f, g))); + assertEquals(45L, (long)map.get(two)); + assertTrue(b.get()); + + f = (v, w) -> { map.remove(two); return val; }; + g = (k, v) -> { throw new AssertionError(); }; + assertEquals(55L, (long)map.merge(two, 55L, twoStep(b, f, g))); + assertEquals(55L, (long)map.get(two)); + assertTrue(map.containsKey(two)); + assertFalse(b.get()); b.set(true); + } + } + + public enum IntegerEnum { + + e0, e1, e2, e3, e4, e5, e6, e7, e8, e9, + e10, e11, e12, e13, e14, e15, e16, e17, e18, e19, + e20, e21, e22, e23, e24, e25, e26, e27, e28, e29, + e30, e31, e32, e33, e34, e35, e36, e37, e38, e39, + e40, e41, e42, e43, e44, e45, e46, e47, e48, e49, + e50, e51, e52, e53, e54, e55, e56, e57, e58, e59, + e60, e61, e62, e63, e64, e65, e66, e67, e68, e69, + e70, e71, e72, e73, e74, e75, e76, e77, e78, e79, + e80, e81, e82, e83, e84, e85, e86, e87, e88, e89, + e90, e91, e92, e93, e94, e95, e96, e97, e98, e99, + EXTRA_KEY; + public static final int SIZE = values().length; + } + private static final int TEST_SIZE = IntegerEnum.SIZE - 1; + /** + * Realized keys ensure that there is always a hard ref to all test objects. + */ + private static final IntegerEnum[] KEYS = new IntegerEnum[TEST_SIZE]; + /** + * Realized values ensure that there is always a hard ref to all test + * objects. + */ + private static final String[] VALUES = new String[TEST_SIZE]; + + static { + IntegerEnum[] keys = IntegerEnum.values(); + for (int each = 0; each < TEST_SIZE; each++) { + KEYS[each] = keys[each]; + VALUES[each] = String.valueOf(each); + } + } + + private static final IntegerEnum FIRST_KEY = KEYS[0]; + private static final String FIRST_VALUE = VALUES[0]; + private static final IntegerEnum EXTRA_KEY = IntegerEnum.EXTRA_KEY; + private static final String EXTRA_VALUE = String.valueOf(TEST_SIZE); + + @DataProvider(name = "Map rw=all keys=all values=all", parallel = true) + public static Iterator allMapProvider() { + return makeAllMaps().iterator(); + } + + @DataProvider(name = "Map rw=all keys=withNull values=withNull", parallel = true) + public static Iterator allMapWithNullsProvider() { + return makeAllMapsWithNulls().iterator(); + } + + @DataProvider(name = "Map rw=true keys=nonNull values=nonNull", parallel = true) + public static Iterator rwNonNullMapProvider() { + return makeRWNoNullsMaps().iterator(); + } + + @DataProvider(name = "Map rw=true keys=nonNull values=all", parallel = true) + public static Iterator rwNonNullKeysMapProvider() { + return makeRWMapsNoNulls().iterator(); + } + + @DataProvider(name = "Map rw=true keys=all values=all", parallel = true) + public static Iterator rwMapProvider() { + return makeAllRWMaps().iterator(); + } + + @DataProvider(name = "Map rw=true keys=withNull values=withNull", parallel = true) + public static Iterator rwNullsMapProvider() { + return makeAllRWMapsWithNulls().iterator(); + } + + @DataProvider(name = "ImplementsConcurrentMap", parallel = true) + public static Iterator implementsConcurrentMap() { + return makeImplementsConcurrentMap().iterator(); + } + + private static Collection makeAllRWMapsWithNulls() { + Collection all = new ArrayList<>(); + + //all.addAll(makeRWMaps(true, true)); + + return all; + } + + private static Collection makeRWMapsNoNulls() { + Collection all = new ArrayList<>(); + + //all.addAll(makeRWNoNullKeysMaps(false)); + all.addAll(makeRWNoNullsMaps()); + + return all; + } + + private static Collection makeAllROMaps() { + Collection all = new ArrayList<>(); + +// all.addAll(makeROMaps(false)); +// all.addAll(makeROMaps(true)); + + return all; + } + + private static Collection makeAllRWMaps() { + Collection all = new ArrayList<>(); + + all.addAll(makeRWNoNullsMaps()); + //all.addAll(makeRWMaps(false,true)); + //all.addAll(makeRWMaps(true,true)); + //all.addAll(makeRWNoNullKeysMaps(true)); + return all; + } + + private static Collection makeAllMaps() { + Collection all = new ArrayList<>(); + +// all.addAll(makeAllROMaps()); + all.addAll(makeAllRWMaps()); + + return all; + } + + private static Collection makeAllMapsWithNulls() { + Collection all = new ArrayList<>(); + +// all.addAll(makeROMaps(true)); +// all.addAll(makeRWMaps(true,true)); + + return all; + } + + /** + * @param nullKeys include null keys + * @param nullValues include null values + * @return + */ + private static Collection makeRWMaps(boolean nullKeys, boolean nullValues) { + return Arrays.asList( + new Object[]{"HashMap", makeMap(HashMap::new, nullKeys, nullValues)}, + new Object[]{"IdentityHashMap", makeMap(IdentityHashMap::new, nullKeys, nullValues)}, + new Object[]{"LinkedHashMap", makeMap(LinkedHashMap::new, nullKeys, nullValues)}, + new Object[]{"WeakHashMap", makeMap(WeakHashMap::new, nullKeys, nullValues)}, + new Object[]{"Collections.checkedMap(HashMap)", Collections.checkedMap(makeMap(HashMap::new, nullKeys, nullValues), IntegerEnum.class, String.class)}, + new Object[]{"Collections.synchronizedMap(HashMap)", Collections.synchronizedMap(makeMap(HashMap::new, nullKeys, nullValues))}, + new Object[]{"ExtendsAbstractMap", makeMap(ExtendsAbstractMap::new, nullKeys, nullValues)}); + } + + /** + * @param nulls include null values + * @return + */ + private static Collection makeRWNoNullKeysMaps(boolean nulls) { + return Arrays.asList( + // null key hostile + new Object[]{"EnumMap", makeMap(() -> new EnumMap(IntegerEnum.class), false, nulls)}, + new Object[]{"TreeMap", makeMap(TreeMap::new, false, nulls)}, + new Object[]{"ExtendsAbstractMap(TreeMap)", makeMap(() -> {return new ExtendsAbstractMap(new TreeMap());}, false, nulls)}, + new Object[]{"Collections.synchronizedMap(EnumMap)", Collections.synchronizedMap(makeMap(() -> new EnumMap(IntegerEnum.class), false, nulls))} + ); + } + + private static Collection makeRWNoNullsMaps() { + Supplier> bounded = () -> { + Cache cache = Caffeine.newBuilder() + .expireAfterWrite(Duration.ofNanos(Long.MAX_VALUE)) + .maximumSize(Long.MAX_VALUE) + .build(); + return cache.asMap(); + }; + Supplier> unbounded = () -> { + Cache cache = Caffeine.newBuilder().build(); + return cache.asMap(); + }; + return Arrays.asList( + // null key and value hostile + new Object[]{"Bounded", makeMap(bounded, false, false)}, + new Object[]{"Unbounded", makeMap(Hashtable::new, false, false)} + + +// new Object[]{"Hashtable", makeMap(Hashtable::new, false, false)}, +// new Object[]{"ConcurrentHashMap", makeMap(ConcurrentHashMap::new, false, false)}, +// new Object[]{"ConcurrentSkipListMap", makeMap(ConcurrentSkipListMap::new, false, false)}, +// new Object[]{"Collections.synchronizedMap(ConcurrentHashMap)", Collections.synchronizedMap(makeMap(ConcurrentHashMap::new, false, false))}, +// new Object[]{"Collections.checkedMap(ConcurrentHashMap)", Collections.checkedMap(makeMap(ConcurrentHashMap::new, false, false), IntegerEnum.class, String.class)}, +// new Object[]{"ExtendsAbstractMap(ConcurrentHashMap)", makeMap(() -> {return new ExtendsAbstractMap(new ConcurrentHashMap());}, false, false)}, +// new Object[]{"ImplementsConcurrentMap", makeMap(ImplementsConcurrentMap::new, false, false)} + ); + } + + private static Collection makeImplementsConcurrentMap() { + var bounded = Caffeine.newBuilder() + .expireAfterWrite(Duration.ofNanos(Long.MAX_VALUE)) + .maximumSize(Long.MAX_VALUE) + .build(); + var unbounded = Caffeine.newBuilder().build(); + return Arrays.asList( + // null key and value hostile + new Object[]{"Bounded", new ImplementsConcurrentMap(bounded.asMap())}, + new Object[]{"Unbounded", new ImplementsConcurrentMap(unbounded.asMap())}); + } + + + /** + * @param nulls include nulls + * @return + */ + private static Collection makeROMaps(boolean nulls) { + return Arrays.asList(new Object[][]{ + new Object[]{"Collections.unmodifiableMap(HashMap)", Collections.unmodifiableMap(makeMap(HashMap::new, nulls, nulls))} + }); + } + + /** + * @param supplier a supplier of mutable map instances. + * + * @param nullKeys include null keys + * @param nullValues include null values + * @return + */ + private static Map makeMap(Supplier> supplier, boolean nullKeys, boolean nullValues) { + Map result = supplier.get(); + + for (int each = 0; each < TEST_SIZE; each++) { + IntegerEnum key = nullKeys ? (each == 0) ? null : KEYS[each] : KEYS[each]; + String value = nullValues ? (each == 0) ? null : VALUES[each] : VALUES[each]; + + result.put(key, value); + } + + return result; + } + + static class Merging { + public enum Value { + ABSENT, + NULL, + OLDVALUE, + NEWVALUE, + RESULT + } + + public enum Merger implements BiFunction { + UNUSED { + @Override + public String apply(String oldValue, String newValue) { + fail("should not be called"); + return null; + } + }, + NULL { + @Override + public String apply(String oldValue, String newValue) { + return null; + } + }, + RESULT { + @Override + public String apply(String oldValue, String newValue) { + return VALUES[3]; + } + }, + } + } + + @DataProvider(name = "MergeCases", parallel = true) + public Iterator mergeCasesProvider() { + Collection cases = new ArrayList<>(); + + cases.addAll(makeMergeTestCases()); + + return cases.iterator(); + } + + static Collection makeMergeTestCases() { + Collection cases = new ArrayList<>(); + + for (Object[] mapParams : makeAllRWMaps() ) { + cases.add(new Object[] { mapParams[0], mapParams[1], Merging.Value.ABSENT, Merging.Value.NEWVALUE, Merging.Merger.UNUSED, Merging.Value.NEWVALUE, Merging.Value.NEWVALUE }); + } + + for (Object[] mapParams : makeAllRWMaps() ) { + cases.add(new Object[] { mapParams[0], mapParams[1], Merging.Value.OLDVALUE, Merging.Value.NEWVALUE, Merging.Merger.NULL, Merging.Value.ABSENT, Merging.Value.NULL }); + } + + for (Object[] mapParams : makeAllRWMaps() ) { + cases.add(new Object[] { mapParams[0], mapParams[1], Merging.Value.OLDVALUE, Merging.Value.NEWVALUE, Merging.Merger.RESULT, Merging.Value.RESULT, Merging.Value.RESULT }); + } + + return cases; + } + + public static void assertThrowsNPE(ThrowingRunnable r) { + assertThrows(NullPointerException.class, r); + } + + /** + * A simple mutable map implementation that provides only default + * implementations of all methods. ie. none of the Map interface default + * methods have overridden implementations. + * + * @param Type of keys + * @param Type of values + */ + public static class ExtendsAbstractMap, K, V> extends AbstractMap { + + protected final M map; + + public ExtendsAbstractMap() { this( (M) new HashMap()); } + + protected ExtendsAbstractMap(M map) { this.map = map; } + + @Override public Set> entrySet() { + return new AbstractSet>() { + @Override public int size() { + return map.size(); + } + + @Override public Iterator> iterator() { + final Iterator> source = map.entrySet().iterator(); + return new Iterator>() { + @Override + public boolean hasNext() { return source.hasNext(); } + @Override + public Map.Entry next() { return source.next(); } + @Override + public void remove() { source.remove(); } + }; + } + + @Override public boolean add(Map.Entry e) { + return map.entrySet().add(e); + } + }; + } + + @Override public V put(K key, V value) { + return map.put(key, value); + } + } + + /** + * A simple mutable concurrent map implementation that provides only default + * implementations of all methods, i.e. none of the ConcurrentMap interface + * default methods have overridden implementations. + * + * @param Type of keys + * @param Type of values + */ + public static class ImplementsConcurrentMap extends ExtendsAbstractMap, K, V> implements ConcurrentMap { + public ImplementsConcurrentMap(ConcurrentMap delegate) { super(delegate); } + + // ConcurrentMap reabstracts these methods. + // + // Unlike ConcurrentHashMap, we have zero tolerance for null values. + + @Override public V replace(K k, V v) { + return map.replace(requireNonNull(k), requireNonNull(v)); + } + + @Override public boolean replace(K k, V v, V vv) { + return map.replace(requireNonNull(k), + requireNonNull(v), + requireNonNull(vv)); + } + + @Override public boolean remove(Object k, Object v) { + return map.remove(requireNonNull(k), requireNonNull(v)); + } + + @Override public V putIfAbsent(K k, V v) { + return map.putIfAbsent(requireNonNull(k), requireNonNull(v)); + } + } +} diff --git a/caffeine/src/test/java/com/github/benmanes/caffeine/openjdk/map/EntryHashCode.java b/caffeine/src/test/java/com/github/benmanes/caffeine/openjdk/map/EntryHashCode.java new file mode 100644 index 0000000000..e5db6bd7c4 --- /dev/null +++ b/caffeine/src/test/java/com/github/benmanes/caffeine/openjdk/map/EntryHashCode.java @@ -0,0 +1,138 @@ +/* + * Copyright (c) 2012, Oracle and/or its affiliates. All rights reserved. + * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. + * + * This code is free software; you can redistribute it and/or modify it + * under the terms of the GNU General Public License version 2 only, as + * published by the Free Software Foundation. + * + * This code is distributed in the hope that it will be useful, but WITHOUT + * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or + * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License + * version 2 for more details (a copy is included in the LICENSE file that + * accompanied this code). + * + * You should have received a copy of the GNU General Public License version + * 2 along with this work; if not, write to the Free Software Foundation, + * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. + * + * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA + * or visit www.oracle.com if you need additional information or have any + * questions. + */ + +/* + * Portions Copyright (c) 2012 IBM Corporation + */ +package com.github.benmanes.caffeine.openjdk.map; + +import java.time.Duration; +import java.util.HashMap; +import java.util.Hashtable; +import java.util.IdentityHashMap; +import java.util.LinkedHashMap; +import java.util.List; +import java.util.Map; +import java.util.Objects; +import java.util.TreeMap; +import java.util.WeakHashMap; +import java.util.concurrent.ConcurrentHashMap; +import java.util.concurrent.ConcurrentSkipListMap; + +import org.testng.annotations.Test; + +import com.github.benmanes.caffeine.cache.Cache; +import com.github.benmanes.caffeine.cache.Caffeine; + +/* + * @test + * @bug 8000955 + * @summary Map.Entry implementations need to comply with Map.Entry.hashCode() defined behaviour. + * @author ngmr + */ +@SuppressWarnings({"AlmostJavadoc", "ComparableType", "JdkObsolete", "rawtypes", "unchecked"}) +public class EntryHashCode { + private static final int TEST_SIZE = 100; + + static final Object[][] entryData = { + new Object[TEST_SIZE], + new Object[TEST_SIZE] + }; + + static final Map[] maps = new Map[] { + new HashMap<>(), + new Hashtable<>(), + new IdentityHashMap<>(), + new LinkedHashMap<>(), + new TreeMap<>(), + new WeakHashMap<>(), + new ConcurrentHashMap<>(), + new ConcurrentSkipListMap<>() + }; + + static { + for (int i = 0; i < entryData[0].length; i++) { + // key objects need to be Comparable for use in TreeMap + entryData[0][i] = new Comparable() { + @Override + public int compareTo(Object o) { + return (hashCode() - o.hashCode()); + } + }; + entryData[1][i] = new Object(); + } + } + + private static void addTestData(Map map) { + for (int i = 0; i < entryData[0].length; i++) { + map.put(entryData[0][i], entryData[1][i]); + } + } + + public static void test(Cache cache) throws Exception { + Exception failure = null; + for (Map map: List.of(cache.asMap())) { + addTestData(map); + + try { + for (Map.Entry e: map.entrySet()) { + Object key = e.getKey(); + Object value = e.getValue(); + int expectedEntryHashCode = + (Objects.hashCode(key) ^ Objects.hashCode(value)); + + if (e.hashCode() != expectedEntryHashCode) { + throw new Exception("FAILURE: " + + e.getClass().getName() + + ".hashCode() does not conform to defined" + + " behaviour of java.util.Map.Entry.hashCode()"); + } + } + } catch (Exception e) { + if (failure == null) { + failure = e; + } else { + failure.addSuppressed(e); + } + } finally { + map.clear(); + } + } + if (failure != null) { + throw failure; + } + } + + @Test + public void bounded() throws Exception { + test(Caffeine.newBuilder() + .expireAfterWrite(Duration.ofNanos(Long.MAX_VALUE)) + .maximumSize(Long.MAX_VALUE) + .build()); + } + + @Test + public void unbounded() throws Exception { + test(Caffeine.newBuilder().build()); + } +} diff --git a/caffeine/src/test/java/com/github/benmanes/caffeine/openjdk/map/Get.java b/caffeine/src/test/java/com/github/benmanes/caffeine/openjdk/map/Get.java new file mode 100644 index 0000000000..6198c10b34 --- /dev/null +++ b/caffeine/src/test/java/com/github/benmanes/caffeine/openjdk/map/Get.java @@ -0,0 +1,180 @@ +/* + * Copyright (c) 2005, 2012, Oracle and/or its affiliates. All rights reserved. + * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. + * + * This code is free software; you can redistribute it and/or modify it + * under the terms of the GNU General Public License version 2 only, as + * published by the Free Software Foundation. + * + * This code is distributed in the hope that it will be useful, but WITHOUT + * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or + * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License + * version 2 for more details (a copy is included in the LICENSE file that + * accompanied this code). + * + * You should have received a copy of the GNU General Public License version + * 2 along with this work; if not, write to the Free Software Foundation, + * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. + * + * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA + * or visit www.oracle.com if you need additional information or have any + * questions. + */ +package com.github.benmanes.caffeine.openjdk.map; + +import static java.util.Locale.US; + +import java.time.Duration; +import java.util.HashMap; +import java.util.Hashtable; +import java.util.IdentityHashMap; +import java.util.LinkedHashMap; +import java.util.Map; +import java.util.Objects; +import java.util.SortedMap; +import java.util.TreeMap; +import java.util.WeakHashMap; +import java.util.concurrent.ConcurrentHashMap; +import java.util.concurrent.ConcurrentMap; +import java.util.concurrent.ConcurrentSkipListMap; + +import org.testng.annotations.Test; + +import com.github.benmanes.caffeine.cache.Cache; +import com.github.benmanes.caffeine.cache.Caffeine; + +/* + * @test + * @bug 6306829 + * @summary Verify assertions in get() javadocs + * @author Martin Buchholz + */ +@SuppressWarnings({"AlmostJavadoc", "BooleanParameter", "EmptyCatch", "InconsistentOverloads", + "JdkObsolete", "MultiVariableDeclaration", "NonAtomicVolatileUpdate", "NonFinalStaticField", + "NullAway", "ParameterMissingNullable", "SystemOut", "UnnecessaryParentheses", + "UnusedVariable"}) +public class Get { + + @Test + public void bounded() { + Cache cache = Caffeine.newBuilder() + .expireAfterWrite(Duration.ofNanos(Long.MAX_VALUE)) + .maximumSize(Long.MAX_VALUE) + .build(); + testMap(cache.asMap()); + } + + @Test + public void unbounded() { + Cache cache = Caffeine.newBuilder().build(); + testMap(cache.asMap()); + } + + private static void realMain(String[] args) throws Throwable { + testMap(new Hashtable()); + testMap(new HashMap()); + testMap(new IdentityHashMap()); + testMap(new LinkedHashMap()); + testMap(new ConcurrentHashMap()); + testMap(new WeakHashMap()); + testMap(new TreeMap()); + testMap(new ConcurrentSkipListMap()); + } + + private static void put(Map m, + Character key, Boolean value, + Boolean oldValue) { + if (oldValue != null) { + check("containsValue(oldValue)", m.containsValue(oldValue)); + check("values.contains(oldValue)", m.values().contains(oldValue)); + } + equal(m.put(key, value), oldValue); + equal(m.get(key), value); + check("containsKey", m.containsKey(key)); + check("keySet.contains", m.keySet().contains(key)); + check("containsValue", m.containsValue(value)); + check("values.contains", m.values().contains(value)); + check("!isEmpty", ! m.isEmpty()); + } + + private static void testMap(Map m) { + // We verify following assertions in get(Object) method javadocs + boolean permitsNullKeys = (! (m instanceof ConcurrentMap || + m instanceof Hashtable || + m instanceof SortedMap)); + boolean permitsNullValues = (! (m instanceof ConcurrentMap || + m instanceof Hashtable)); + boolean usesIdentity = m instanceof IdentityHashMap; + + System.err.println(m.getClass()); + put(m, 'A', true, null); + put(m, 'A', false, true); // Guaranteed identical by JLS + put(m, 'B', true, null); + put(m, 'A', false, usesIdentity ? null : false); + if (permitsNullKeys) { + try { + put(m, null, true, null); + put(m, null, false, true); + } + catch (Throwable t) { unexpected(m.getClass().getName(), t); } + } else { + try { m.get(null); fail(m.getClass().getName() + " did not reject null key"); } + catch (NullPointerException e) {} + catch (Throwable t) { unexpected(m.getClass().getName(), t); } + + try { m.put(null, true); fail(m.getClass().getName() + " did not reject null key"); } + catch (NullPointerException e) {} + catch (Throwable t) { unexpected(m.getClass().getName(), t); } + } + if (permitsNullValues) { + try { + put(m, 'C', null, null); + put(m, 'C', true, null); + put(m, 'C', null, true); + } + catch (Throwable t) { unexpected(m.getClass().getName(), t); } + } else { + try { m.put('A', null); fail(m.getClass().getName() + " did not reject null key"); } + catch (NullPointerException e) {} + catch (Throwable t) { unexpected(m.getClass().getName(), t); } + + try { m.put('C', null); fail(m.getClass().getName() + " did not reject null key"); } + catch (NullPointerException e) {} + catch (Throwable t) { unexpected(m.getClass().getName(), t); } + } + } + + //--------------------- Infrastructure --------------------------- + static volatile int passed = 0, failed = 0; + static void pass() { passed++; } + static void fail() { failed++; new Error("Failure").printStackTrace(System.err); } + static void fail(String msg) { failed++; new Error("Failure: " + msg).printStackTrace(System.err); } + static void unexpected(String msg, Throwable t) { System.err.println("Unexpected: " + msg); unexpected(t); } + static void unexpected(Throwable t) { failed++; t.printStackTrace(System.err); } + static void check(boolean cond) { if (cond) { + pass(); + } else { + fail(); + } } + static void check(String desc, boolean cond) { if (cond) { + pass(); + } else { + fail(desc); + } } + static void equal(Object x, Object y) { + if (Objects.equals(x,y)) { + pass(); + } else { + fail(x + " not equal to " + y); + } + } + + public static void main(String[] args) throws Throwable { + try { realMain(args); } catch (Throwable t) { unexpected(t); } + + System.out.printf(US, "%nPassed = %d, failed = %d%n%n", passed, failed); + if (failed > 0) { + throw new Error("Some tests failed"); + } + } +} diff --git a/caffeine/src/test/java/com/github/benmanes/caffeine/openjdk/map/InPlaceOpsCollisions.java b/caffeine/src/test/java/com/github/benmanes/caffeine/openjdk/map/InPlaceOpsCollisions.java new file mode 100644 index 0000000000..229abdd379 --- /dev/null +++ b/caffeine/src/test/java/com/github/benmanes/caffeine/openjdk/map/InPlaceOpsCollisions.java @@ -0,0 +1,552 @@ +/* + * Copyright (c) 2013, 2021, Oracle and/or its affiliates. All rights reserved. + * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. + * + * This code is free software; you can redistribute it and/or modify it + * under the terms of the GNU General Public License version 2 only, as + * published by the Free Software Foundation. + * + * This code is distributed in the hope that it will be useful, but WITHOUT + * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or + * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License + * version 2 for more details (a copy is included in the LICENSE file that + * accompanied this code). + * + * You should have received a copy of the GNU General Public License version + * 2 along with this work; if not, write to the Free Software Foundation, + * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. + * + * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA + * or visit www.oracle.com if you need additional information or have any + * questions. + */ +package com.github.benmanes.caffeine.openjdk.map; + +import static java.util.Locale.US; +import static org.testng.Assert.assertEquals; +import static org.testng.Assert.assertFalse; +import static org.testng.Assert.assertNull; +import static org.testng.Assert.assertTrue; + +import java.util.Arrays; +import java.util.Comparator; +import java.util.HashMap; +import java.util.Iterator; +import java.util.LinkedHashMap; +import java.util.Map; +import java.util.Objects; +import java.util.TreeMap; +import java.util.function.BiFunction; +import java.util.function.Function; +import java.util.function.Supplier; + +import org.testng.annotations.DataProvider; +import org.testng.annotations.Test; + +/* + * @test + * @bug 8005698 + * @run testng/othervm -Dtest.map.collisions.shortrun=true InPlaceOpsCollisions + * @summary Ensure overrides of in-place operations in Maps behave well with lots of collisions. + */ +@SuppressWarnings("NullAway") +public class InPlaceOpsCollisions extends MapWithCollisionsProviders { + + @Test(dataProvider = "mapsWithObjectsAndStrings") + void testPutIfAbsent(String desc, Supplier> ms, Object val) { + Map map = ms.get(); + Object[] keys = map.keySet().toArray(); + Object retVal; + removeOddKeys(map, keys); + for (int i = 0; i < keys.length; i++) { + retVal = map.putIfAbsent(keys[i], val); + if (i % 2 == 0) { // even: not absent, not put + + assertEquals(retVal, keys[i], + String.format(US, "putIfAbsent: (%s[%d]) retVal", desc, i)); + assertEquals(keys[i], map.get(keys[i]), + String.format(US, "putIfAbsent: get(%s[%d])", desc, i)); + assertTrue(map.containsValue(keys[i]), + String.format(US, "putIfAbsent: containsValue(%s[%d])", desc, i)); + } else { // odd: absent, was put + assertNull(retVal, + String.format(US, "putIfAbsent: (%s[%d]) retVal", desc, i)); + assertEquals(val, map.get(keys[i]), + String.format(US, "putIfAbsent: get(%s[%d])", desc, i)); + assertFalse(map.containsValue(keys[i]), + String.format(US, "putIfAbsent: !containsValue(%s[%d])", desc, i)); + } + assertTrue(map.containsKey(keys[i]), + String.format(US, "insertion: containsKey(%s[%d])", desc, i)); + } + assertEquals(map.size(), keys.length, + String.format(US, "map expected size m%d != k%d", map.size(), keys.length)); + } + + @Test(enabled= false, dataProvider = "nullValueFriendlyMaps") + void testPutIfAbsentOverwriteNull(String desc, Supplier> ms) { + Map map = ms.get(); + map.put("key", null); + assertEquals(map.size(), 1, desc + ": size != 1"); + assertTrue(map.containsKey("key"), desc + ": does not have key"); + assertNull(map.get("key"), desc + ": value is not null"); + map.putIfAbsent("key", "value"); // must rewrite + assertEquals(map.size(), 1, desc + ": size != 1"); + assertTrue(map.containsKey("key"), desc + ": does not have key"); + assertEquals(map.get("key"), "value", desc + ": value is not 'value'"); + } + + @Test(dataProvider = "mapsWithObjectsAndStrings") + void testRemoveMapping(String desc, Supplier> ms, Object val) { + Map map = ms.get(); + Object[] keys = map.keySet().toArray(); + boolean removed; + int removes = 0; + remapOddKeys(map, keys, val); + for (int i = 0; i < keys.length; i++) { + removed = map.remove(keys[i], keys[i]); + if (i % 2 == 0) { // even: original mapping, should be removed + assertTrue(removed, + String.format(US, "removeMapping: retVal(%s[%d])", desc, i)); + assertNull(map.get(keys[i]), + String.format(US, "removeMapping: get(%s[%d])", desc, i)); + assertFalse(map.containsKey(keys[i]), + String.format(US, "removeMapping: !containsKey(%s[%d])", desc, i)); + assertFalse(map.containsValue(keys[i]), + String.format(US, "removeMapping: !containsValue(%s[%d])", desc, i)); + removes++; + } else { // odd: new mapping, not removed + assertFalse(removed, + String.format(US, "removeMapping: retVal(%s[%d])", desc, i)); + assertEquals(val, map.get(keys[i]), + String.format(US, "removeMapping: get(%s[%d])", desc, i)); + assertTrue(map.containsKey(keys[i]), + String.format(US, "removeMapping: containsKey(%s[%d])", desc, i)); + assertTrue(map.containsValue(val), + String.format(US, "removeMapping: containsValue(%s[%d])", desc, i)); + } + } + assertEquals(map.size(), keys.length - removes, + String.format(US, "map expected size m%d != k%d", map.size(), keys.length - removes)); + } + + @Test(dataProvider = "mapsWithObjectsAndStrings") + void testReplaceOldValue(String desc, Supplier> ms, Object val) { + // remap odds to val + // call replace to replace for val, for all keys + // check that all keys map to value from keys array + Map map = ms.get(); + Object[] keys = map.keySet().toArray(); + boolean replaced; + remapOddKeys(map, keys, val); + + for (int i = 0; i < keys.length; i++) { + replaced = map.replace(keys[i], val, keys[i]); + if (i % 2 == 0) { // even: original mapping, should not be replaced + assertFalse(replaced, + String.format(US, "replaceOldValue: retVal(%s[%d])", desc, i)); + } else { // odd: new mapping, should be replaced + assertTrue(replaced, + String.format(US, "replaceOldValue: get(%s[%d])", desc, i)); + } + assertEquals(keys[i], map.get(keys[i]), + String.format(US, "replaceOldValue: get(%s[%d])", desc, i)); + assertTrue(map.containsKey(keys[i]), + String.format(US, "replaceOldValue: containsKey(%s[%d])", desc, i)); + assertTrue(map.containsValue(keys[i]), + String.format(US, "replaceOldValue: containsValue(%s[%d])", desc, i)); + } + assertFalse(map.containsValue(val), + String.format(US, "replaceOldValue: !containsValue(%s[%s])", desc, val)); + assertEquals(map.size(), keys.length, + String.format(US, "map expected size m%d != k%d", map.size(), keys.length)); + } + + @Test(dataProvider = "mapsWithObjectsAndStrings") + void testReplaceIfMapped(String desc, Supplier> ms, Object val) { + // remove odd keys + // call replace for all keys[] + // odd keys should remain absent, even keys should be mapped to EXTRA, no value from keys[] should be in map + Map map = ms.get(); + Object[] keys = map.keySet().toArray(); + int expectedSize1 = 0; + removeOddKeys(map, keys); + int expectedSize2 = map.size(); + + for (int i = 0; i < keys.length; i++) { + Object retVal = map.replace(keys[i], val); + if (i % 2 == 0) { // even: still in map, should be replaced + assertEquals(retVal, keys[i], + String.format(US, "replaceIfMapped: retVal(%s[%d])", desc, i)); + assertEquals(val, map.get(keys[i]), + String.format(US, "replaceIfMapped: get(%s[%d])", desc, i)); + assertTrue(map.containsKey(keys[i]), + String.format(US, "replaceIfMapped: containsKey(%s[%d])", desc, i)); + expectedSize1++; + } else { // odd: was removed, should not be replaced + assertNull(retVal, + String.format(US, "replaceIfMapped: retVal(%s[%d])", desc, i)); + assertNull(map.get(keys[i]), + String.format(US, "replaceIfMapped: get(%s[%d])", desc, i)); + assertFalse(map.containsKey(keys[i]), + String.format(US, "replaceIfMapped: containsKey(%s[%d])", desc, i)); + } + assertFalse(map.containsValue(keys[i]), + String.format(US, "replaceIfMapped: !containsValue(%s[%d])", desc, i)); + } + assertTrue(map.containsValue(val), + String.format(US, "replaceIfMapped: containsValue(%s[%s])", desc, val)); + assertEquals(map.size(), expectedSize1, + String.format(US, "map expected size#1 m%d != k%d", map.size(), expectedSize1)); + assertEquals(map.size(), expectedSize2, + String.format(US, "map expected size#2 m%d != k%d", map.size(), expectedSize2)); + + } + + private static void testComputeIfAbsent(Map map, String desc, T[] keys, + Function mappingFunction) { + // remove a third of the keys + // call computeIfAbsent for all keys, func returns EXTRA + // check that removed keys now -> EXTRA, other keys -> original val + T expectedVal = mappingFunction.apply(keys[0]); + T retVal; + int expectedSize = 0; + removeThirdKeys(map, keys); + for (int i = 0; i < keys.length; i++) { + retVal = map.computeIfAbsent(keys[i], mappingFunction); + if (i % 3 != 2) { // key present, not computed + assertEquals(retVal, keys[i], + String.format(US, "computeIfAbsent: (%s[%d]) retVal", desc, i)); + assertEquals(keys[i], map.get(keys[i]), + String.format(US, "computeIfAbsent: get(%s[%d])", desc, i)); + assertTrue(map.containsValue(keys[i]), + String.format(US, "computeIfAbsent: containsValue(%s[%d])", desc, i)); + assertTrue(map.containsKey(keys[i]), + String.format(US, "insertion: containsKey(%s[%d])", desc, i)); + expectedSize++; + } else { // key absent, computed unless function return null + assertEquals(retVal, expectedVal, + String.format(US, "computeIfAbsent: (%s[%d]) retVal", desc, i)); + assertEquals(expectedVal, map.get(keys[i]), + String.format(US, "computeIfAbsent: get(%s[%d])", desc, i)); + assertFalse(map.containsValue(keys[i]), + String.format(US, "computeIfAbsent: !containsValue(%s[%d])", desc, i)); + // mapping should not be added if function returns null + assertTrue(map.containsKey(keys[i]) != (expectedVal == null), + String.format(US, "insertion: containsKey(%s[%d])", desc, i)); + if (expectedVal != null) { + expectedSize++; + } + } + } + if (expectedVal != null) { + assertTrue(map.containsValue(expectedVal), + String.format(US, "computeIfAbsent: containsValue(%s[%s])", desc, expectedVal)); + } + assertEquals(map.size(), expectedSize, + String.format(US, "map expected size m%d != k%d", map.size(), expectedSize)); + } + + @Test(dataProvider = "mapsWithObjectsAndStrings") + void testComputeIfAbsentNonNull(String desc, Supplier> ms, Object val) { + Map map = ms.get(); + Object[] keys = map.keySet().toArray(); + testComputeIfAbsent(map, desc, keys, (k) -> val); + } + + @Test(dataProvider = "mapsWithObjectsAndStrings") + void testComputeIfAbsentNull(String desc, Supplier> ms, Object val) { + Map map = ms.get(); + Object[] keys = map.keySet().toArray(); + testComputeIfAbsent(map, desc, keys, (k) -> null); + } + + @Test(enabled= false, dataProvider = "nullValueFriendlyMaps") + void testComputeIfAbsentOverwriteNull(String desc, Supplier> ms) { + Map map = ms.get(); + map.put("key", null); + assertEquals(map.size(), 1, desc + ": size != 1"); + assertTrue(map.containsKey("key"), desc + ": does not have key"); + assertNull(map.get("key"), desc + ": value is not null"); + Object result = map.computeIfAbsent("key", k -> "value"); // must rewrite + assertEquals(result, "value", desc + ": computeIfAbsent result is not 'value'"); + assertEquals(map.size(), 1, desc + ": size != 1"); + assertTrue(map.containsKey("key"), desc + ": does not have key"); + assertEquals(map.get("key"), "value", desc + ": value is not 'value'"); + } + + private static void testComputeIfPresent(Map map, String desc, T[] keys, + BiFunction mappingFunction) { + // remove a third of the keys + // call testComputeIfPresent for all keys[] + // removed keys should remain absent, even keys should be mapped to $RESULT + // no value from keys[] should be in map + T funcResult = mappingFunction.apply(keys[0], keys[0]); + int expectedSize1 = 0; + removeThirdKeys(map, keys); + + for (int i = 0; i < keys.length; i++) { + T retVal = map.computeIfPresent(keys[i], mappingFunction); + if (i % 3 != 2) { // key present + if (funcResult == null) { // was removed + assertFalse(map.containsKey(keys[i]), + String.format(US, "replaceIfMapped: containsKey(%s[%d])", desc, i)); + } else { // value was replaced + assertTrue(map.containsKey(keys[i]), + String.format(US, "replaceIfMapped: containsKey(%s[%d])", desc, i)); + expectedSize1++; + } + assertEquals(retVal, funcResult, + String.format(US, "computeIfPresent: retVal(%s[%s])", desc, i)); + assertEquals(funcResult, map.get(keys[i]), + String.format(US, "replaceIfMapped: get(%s[%d])", desc, i)); + + } else { // odd: was removed, should not be replaced + assertNull(retVal, + String.format(US, "replaceIfMapped: retVal(%s[%d])", desc, i)); + assertNull(map.get(keys[i]), + String.format(US, "replaceIfMapped: get(%s[%d])", desc, i)); + assertFalse(map.containsKey(keys[i]), + String.format(US, "replaceIfMapped: containsKey(%s[%d])", desc, i)); + } + assertFalse(map.containsValue(keys[i]), + String.format(US, "replaceIfMapped: !containsValue(%s[%d])", desc, i)); + } + assertEquals(map.size(), expectedSize1, + String.format(US, "map expected size#1 m%d != k%d", map.size(), expectedSize1)); + } + + @Test(dataProvider = "mapsWithObjectsAndStrings") + void testComputeIfPresentNonNull(String desc, Supplier> ms, Object val) { + Map map = ms.get(); + Object[] keys = map.keySet().toArray(); + testComputeIfPresent(map, desc, keys, (k, v) -> val); + } + + @Test(dataProvider = "mapsWithObjectsAndStrings") + void testComputeIfPresentNull(String desc, Supplier> ms, Object val) { + Map map = ms.get(); + Object[] keys = map.keySet().toArray(); + testComputeIfPresent(map, desc, keys, (k, v) -> null); + } + + @Test(dataProvider = "hashMapsWithObjects") + void testComputeNonNull(String desc, Supplier> ms, IntKey val) { + // remove a third of the keys + // call compute() for all keys[] + // all keys should be present: removed keys -> EXTRA, others to k-1 + Map map = ms.get(); + IntKey[] keys = map.keySet().stream().sorted().toArray(IntKey[]::new); + BiFunction mappingFunction = (k, v) -> { + if (v == null) { + return val; + } else { + return keys[k.getValue() - 1]; + } + }; + removeThirdKeys(map, keys); + for (int i = 1; i < keys.length; i++) { + IntKey retVal = map.compute(keys[i], mappingFunction); + if (i % 3 != 2) { // key present, should be mapped to k-1 + assertEquals(retVal, keys[i - 1], + String.format(US, "compute: retVal(%s[%d])", desc, i)); + assertEquals(keys[i - 1], map.get(keys[i]), + String.format(US, "compute: get(%s[%d])", desc, i)); + } else { // odd: was removed, should be replaced with EXTRA + assertEquals(retVal, val, + String.format(US, "compute: retVal(%s[%d])", desc, i)); + assertEquals(val, map.get(keys[i]), + String.format(US, "compute: get(%s[%d])", desc, i)); + } + assertTrue(map.containsKey(keys[i]), + String.format(US, "compute: containsKey(%s[%d])", desc, i)); + } + assertEquals(map.size(), keys.length, + String.format(US, "map expected size#1 m%d != k%d", map.size(), keys.length)); + assertTrue(map.containsValue(val), + String.format(US, "compute: containsValue(%s[%s])", desc, val)); +// assertFalse(map.containsValue(null), +// String.format(US, "compute: !containsValue(%s,[null])", desc)); + assertFalse(map.values().stream().anyMatch(Objects::isNull), + String.format(US, "compute: !hasNullValue(%s,[null])", desc)); + } + + @Test(dataProvider = "mapsWithObjectsAndStrings") + void testComputeNull(String desc, Supplier> ms, Object val) { + // remove a third of the keys + // call compute() for all keys[] + // removed keys should -> EXTRA + // for other keys: func returns null, should have no mapping + Map map = ms.get(); + Object[] keys = map.keySet().toArray(); + BiFunction mappingFunction = (k, v) -> { + // if absent/null -> EXTRA + // if present -> null + if (v == null) { + return val; + } else { + return null; + } + }; + int expectedSize = 0; + removeThirdKeys(map, keys); + for (int i = 0; i < keys.length; i++) { + Object retVal = map.compute(keys[i], mappingFunction); + if (i % 3 != 2) { // key present, func returned null, should be absent from map + assertNull(retVal, + String.format(US, "compute: retVal(%s[%d])", desc, i)); + assertNull(map.get(keys[i]), + String.format(US, "compute: get(%s[%d])", desc, i)); + assertFalse(map.containsKey(keys[i]), + String.format(US, "compute: containsKey(%s[%d])", desc, i)); + assertFalse(map.containsValue(keys[i]), + String.format(US, "compute: containsValue(%s[%s])", desc, i)); + } else { // odd: was removed, should now be mapped to EXTRA + assertEquals(retVal, val, + String.format(US, "compute: retVal(%s[%d])", desc, i)); + assertEquals(val, map.get(keys[i]), + String.format(US, "compute: get(%s[%d])", desc, i)); + assertTrue(map.containsKey(keys[i]), + String.format(US, "compute: containsKey(%s[%d])", desc, i)); + expectedSize++; + } + } + assertTrue(map.containsValue(val), + String.format(US, "compute: containsValue(%s[%s])", desc, val)); + assertEquals(map.size(), expectedSize, + String.format(US, "map expected size#1 m%d != k%d", map.size(), expectedSize)); + } + + @Test(dataProvider = "hashMapsWithObjects") + void testMergeNonNull(String desc, Supplier> ms, IntKey val) { + // remove a third of the keys + // call merge() for all keys[] + // all keys should be present: removed keys now -> EXTRA, other keys -> k-1 + Map map = ms.get(); + IntKey[] keys = map.keySet().stream().sorted().toArray(IntKey[]::new); + + // Map to preceding key + BiFunction mappingFunction + = (k, v) -> keys[k.getValue() - 1]; + removeThirdKeys(map, keys); + for (int i = 1; i < keys.length; i++) { + IntKey retVal = map.merge(keys[i], val, mappingFunction); + if (i % 3 != 2) { // key present, should be mapped to k-1 + assertEquals(retVal, keys[i - 1], + String.format(US, "compute: retVal(%s[%d])", desc, i)); + assertEquals(keys[i - 1], map.get(keys[i]), + String.format(US, "compute: get(%s[%d])", desc, i)); + } else { // odd: was removed, should be replaced with EXTRA + assertEquals(retVal, val, + String.format(US, "compute: retVal(%s[%d])", desc, i)); + assertEquals(val, map.get(keys[i]), + String.format(US, "compute: get(%s[%d])", desc, i)); + } + assertTrue(map.containsKey(keys[i]), + String.format(US, "compute: containsKey(%s[%d])", desc, i)); + } + + assertEquals(map.size(), keys.length, + String.format(US, "map expected size#1 m%d != k%d", map.size(), keys.length)); + assertTrue(map.containsValue(val), + String.format(US, "compute: containsValue(%s[%s])", desc, val)); +// assertFalse(map.containsValue(null), +// String.format(US, "compute: !containsValue(%s,[null])", desc)); + assertFalse(map.values().stream().anyMatch(Objects::isNull), + String.format(US, "compute: !hasNullValue(%s,[null])", desc)); + } + + @Test(dataProvider = "mapsWithObjectsAndStrings") + void testMergeNull(String desc, Supplier> ms, Object val) { + // remove a third of the keys + // call merge() for all keys[] + // result: removed keys -> EXTRA, other keys absent + + Map map = ms.get(); + Object[] keys = map.keySet().toArray(); + BiFunction mappingFunction = (k, v) -> null; + int expectedSize = 0; + removeThirdKeys(map, keys); + for (int i = 0; i < keys.length; i++) { + Object retVal = map.merge(keys[i], val, mappingFunction); + if (i % 3 != 2) { // key present, func returned null, should be absent from map + assertNull(retVal, + String.format(US, "compute: retVal(%s[%d])", desc, i)); + assertNull(map.get(keys[i]), + String.format(US, "compute: get(%s[%d])", desc, i)); + assertFalse(map.containsKey(keys[i]), + String.format(US, "compute: containsKey(%s[%d])", desc, i)); + } else { // odd: was removed, should now be mapped to EXTRA + assertEquals(retVal, val, + String.format(US, "compute: retVal(%s[%d])", desc, i)); + assertEquals(val, map.get(keys[i]), + String.format(US, "compute: get(%s[%d])", desc, i)); + assertTrue(map.containsKey(keys[i]), + String.format(US, "compute: containsKey(%s[%d])", desc, i)); + expectedSize++; + } + assertFalse(map.containsValue(keys[i]), + String.format(US, "compute: containsValue(%s[%s])", desc, i)); + } + assertTrue(map.containsValue(val), + String.format(US, "compute: containsValue(%s[%s])", desc, val)); + assertEquals(map.size(), expectedSize, + String.format(US, "map expected size#1 m%d != k%d", map.size(), expectedSize)); + } + + /* + * Remove half of the keys + */ + private static void removeOddKeys(Map map, /*String keys_desc, */ T[] keys) { + int removes = 0; + for (int i = 0; i < keys.length; i++) { + if (i % 2 != 0) { + map.remove(keys[i]); + removes++; + } + } + assertEquals(map.size(), keys.length - removes, String.format(US, + "map expected size m%d != k%d", map.size(), keys.length - removes)); + } + + /* + * Remove every third key + * This will hopefully leave some removed keys in TreeBins for, e.g., computeIfAbsent + * w/ a func that returns null. + * + * TODO: consider using this in other tests (and maybe adding a remapThirdKeys) + */ + private static void removeThirdKeys(Map map, /*String keys_desc, */ T[] keys) { + int removes = 0; + for (int i = 0; i < keys.length; i++) { + if (i % 3 == 2) { + map.remove(keys[i]); + removes++; + } + } + assertEquals(map.size(), keys.length - removes, String.format(US, + "map expected size m%d != k%d", map.size(), keys.length - removes)); + } + + /* + * Re-map the odd-numbered keys to map to the EXTRA value + */ + private static void remapOddKeys(Map map, T[] keys, T val) { + for (int i = 0; i < keys.length; i++) { + if (i % 2 != 0) { + map.put(keys[i], val); + } + } + } + + @DataProvider + public Iterator nullValueFriendlyMaps() { + return Arrays.asList( + new Object[]{"HashMap", (Supplier>) HashMap::new}, + new Object[]{"LinkedHashMap", (Supplier>) LinkedHashMap::new}, + new Object[]{"TreeMap", (Supplier>) TreeMap::new}, + new Object[]{"TreeMap(cmp)", (Supplier>) () -> new TreeMap<>(Comparator.reverseOrder())}, + new Object[]{"TreeMap.descendingMap", (Supplier>) () -> new TreeMap<>().descendingMap()} + ).iterator(); + } +} diff --git a/caffeine/src/test/java/com/github/benmanes/caffeine/openjdk/map/LockStep.java b/caffeine/src/test/java/com/github/benmanes/caffeine/openjdk/map/LockStep.java new file mode 100644 index 0000000000..fe835f8038 --- /dev/null +++ b/caffeine/src/test/java/com/github/benmanes/caffeine/openjdk/map/LockStep.java @@ -0,0 +1,176 @@ +/* + * Copyright (c) 2008, 2013, Oracle and/or its affiliates. All rights reserved. + * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. + * + * This code is free software; you can redistribute it and/or modify it + * under the terms of the GNU General Public License version 2 only, as + * published by the Free Software Foundation. + * + * This code is distributed in the hope that it will be useful, but WITHOUT + * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or + * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License + * version 2 for more details (a copy is included in the LICENSE file that + * accompanied this code). + * + * You should have received a copy of the GNU General Public License version + * 2 along with this work; if not, write to the Free Software Foundation, + * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. + * + * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA + * or visit www.oracle.com if you need additional information or have any + * questions. + */ +package com.github.benmanes.caffeine.openjdk.map; + +import static java.util.Locale.US; + +import java.time.Duration; +import java.util.Iterator; +import java.util.List; +import java.util.Map; +import java.util.Random; +import java.util.function.Supplier; + +import org.testng.annotations.Test; + +import com.github.benmanes.caffeine.cache.Caffeine; + +/* + * @test + * @bug 6612102 + * @summary Test Map implementations for mutual compatibility + * @key randomness + */ + +/** + * Based on the strange scenario required to reproduce + * (coll) IdentityHashMap.iterator().remove() might decrement size twice + * + * It would be good to add more "Lockstep-style" tests to this file. + */ +@SuppressWarnings({"MultiVariableDeclaration", "NonAtomicVolatileUpdate", "NonAtomicVolatileUpdate", + "rawtypes", "RedundantStringConversion", "SystemOut", "unchecked", "UnnecessaryFinal"}) +public class LockStep { + void mapsEqual(Map m1, Map m2) { + equal(m1, m2); + equal(m2, m1); + equal(m1.size(), m2.size()); + equal(m1.isEmpty(), m2.isEmpty()); + equal(m1.keySet(), m2.keySet()); + equal(m2.keySet(), m1.keySet()); + } + + void mapsEqual(List maps) { + Map first = maps.get(0); + for (Map map : maps) { + mapsEqual(first, map); + } + } + + void put(List maps, Object key, Object val) { + for (Map map : maps) { + map.put(key, val); + } + mapsEqual(maps); + } + + void removeLastTwo(List maps) { + Map first = maps.get(0); + int size = first.size(); + Iterator fit = first.keySet().iterator(); + for (int j = 0; j < size - 2; j++) { + fit.next(); + } + Object x1 = fit.next(); + Object x2 = fit.next(); + + for (Map map : maps) { + Iterator it = map.keySet().iterator(); + while (it.hasNext()) { + Object x = it.next(); + if (x == x1 || x == x2) { + it.remove(); + } + } + } + mapsEqual(maps); + } + + void remove(Map m, Iterator it) { + int size = m.size(); + it.remove(); + if (m.size() != size-1) { + throw new Error(String.format(US, "Incorrect size!%nmap=%s, size=%d%n", + m.toString(), m.size())); + } + } + + void test(Supplier supplier) { + final int iterations = 100; + final Random r = new Random(); + + for (int i = 0; i < iterations; i++) { + List maps = List.of( + supplier.get() +// new IdentityHashMap(11), +// new HashMap(16), +// new LinkedHashMap(16), +// new WeakHashMap(16), +// new Hashtable(16), +// new TreeMap(), +// new ConcurrentHashMap(16), +// new ConcurrentSkipListMap(), +// Collections.checkedMap(new HashMap(16), Integer.class, Integer.class), +// Collections.checkedSortedMap(new TreeMap(), Integer.class, Integer.class), +// Collections.checkedNavigableMap(new TreeMap(), Integer.class, Integer.class), +// Collections.synchronizedMap(new HashMap(16)), +// Collections.synchronizedSortedMap(new TreeMap()), +// Collections.synchronizedNavigableMap(new TreeMap()) + ); + + for (int j = 0; j < 10; j++) { + put(maps, r.nextInt(100), r.nextInt(100)); + } + removeLastTwo(maps); + } + } + + @Test + public void bounded() { + test(() -> Caffeine.newBuilder() + .expireAfterWrite(Duration.ofNanos(Long.MAX_VALUE)) + .maximumSize(Long.MAX_VALUE) + .build().asMap()); + } + + @Test + public void unbounded() { + test(() -> Caffeine.newBuilder().build().asMap()); + } + + //--------------------- Infrastructure --------------------------- + volatile int passed = 0, failed = 0; + void pass() {passed++;} + void fail() {failed++; Thread.dumpStack();} + void fail(String msg) {System.err.println(msg); fail();} + void unexpected(Throwable t) {failed++; t.printStackTrace();} + void check(boolean cond) {if (cond) { + pass(); + } else { + fail(); + }} + void equal(Object x, Object y) { + if (x == null ? y == null : x.equals(y)) { + pass(); + } else { + fail(x + " not equal to " + y); + }} +// public static void main(String[] args) throws Throwable { +// new LockStep().instanceMain(args);} +// void instanceMain(String[] args) throws Throwable { +// try {test();} catch (Throwable t) {unexpected(t);} +// System.out.printf("%nPassed = %d, failed = %d%n%n", passed, failed); +// if (failed > 0) { +// throw new AssertionError("Some tests failed"); +// }} +} diff --git a/caffeine/src/test/java/com/github/benmanes/caffeine/openjdk/map/MapBinToFromTreeTest.java b/caffeine/src/test/java/com/github/benmanes/caffeine/openjdk/map/MapBinToFromTreeTest.java new file mode 100644 index 0000000000..e61e523808 --- /dev/null +++ b/caffeine/src/test/java/com/github/benmanes/caffeine/openjdk/map/MapBinToFromTreeTest.java @@ -0,0 +1,251 @@ +/* + * Copyright (c) 2013, Oracle and/or its affiliates. All rights reserved. + * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. + * + * This code is free software; you can redistribute it and/or modify it + * under the terms of the GNU General Public License version 2 only, as + * published by the Free Software Foundation. + * + * This code is distributed in the hope that it will be useful, but WITHOUT + * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or + * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License + * version 2 for more details (a copy is included in the LICENSE file that + * accompanied this code). + * + * You should have received a copy of the GNU General Public License version + * 2 along with this work; if not, write to the Free Software Foundation, + * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. + * + * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA + * or visit www.oracle.com if you need additional information or have any + * questions. + */ +package com.github.benmanes.caffeine.openjdk.map; + +import static java.util.Locale.US; +import static org.testng.Assert.assertEquals; + +import java.time.Duration; +import java.util.Collection; +import java.util.LinkedHashMap; +import java.util.Map; +import java.util.function.BiConsumer; +import java.util.stream.Collector; +import java.util.stream.Collectors; +import java.util.stream.IntStream; + +import org.testng.annotations.DataProvider; +import org.testng.annotations.Test; + +import com.github.benmanes.caffeine.cache.Caffeine; + +/* + * @test + * @bug 8023463 + * @summary Test the case where a bin is treeified and vice verser + * @run testng MapBinToFromTreeTest + */ +@Test +@SuppressWarnings({"CollectorMutability", "DirectReturn", "NullAway", "rawtypes"}) +public class MapBinToFromTreeTest { + + // Initial capacity of map + // Should be >= the map capacity for treeifiying, see HashMap/ConcurrentMap.MIN_TREEIFY_CAPACITY + static final int INITIAL_CAPACITY = 64; + + // Maximum size of map + // Should be > the treeify threshold, see HashMap/ConcurrentMap.TREEIFY_THRESHOLD + // Should be > INITIAL_CAPACITY to ensure resize occurs + static final int SIZE = 256; + + // Load factor of map + // A value 1.0 will ensure that a new threshold == capacity + static final float LOAD_FACTOR = 1.0f; + + @DataProvider(name = "maps") + static Object[][] mapProvider() { + var bounded = Caffeine.newBuilder() + .expireAfterWrite(Duration.ofNanos(Long.MAX_VALUE)) + .maximumSize(Long.MAX_VALUE) + .build(); + var unbounded = Caffeine.newBuilder().build(); + + return new Object[][] { + { "Bounded", bounded.asMap() }, + { "Unbounded", unbounded.asMap() }, + // Pass in the class name as a description for test reporting + // purposes +// { HashMap.class.getName(), new HashMap(INITIAL_CAPACITY, LOAD_FACTOR) }, +// { LinkedHashMap.class.getName(), new LinkedHashMap(INITIAL_CAPACITY, LOAD_FACTOR) }, +// { ConcurrentHashMap.class.getName(), new ConcurrentHashMap(INITIAL_CAPACITY, LOAD_FACTOR) }, + }; + } + + @Test(dataProvider = "maps") + public void testPutThenGet(String d, Map m) { + put(SIZE, m, (i, s) -> { + for (int j = 0; j < s; j++) { + assertEquals(m.get(new HashCodeInteger(j)).intValue(), j, + String.format(US, "Map.get(%d)", j)); + } + }); + } + + @Test(dataProvider = "maps") + public void testPutThenTraverse(String d, Map m) { + Collector> c = getCollector(m); + + put(SIZE, m, (i, s) -> { + // Note that it is OK to collect to a Set (HashSet) as long as + // integer values are used since these tests only check for + // collisions and other tests will verify more general functionality + Collection actual = m.keySet().stream().map(e -> e.value).collect(c); + Collection expected = IntStream.range(0, s).boxed().collect(c); + assertEquals(actual, expected, "Map.keySet()"); + }); + } + + @Test(dataProvider = "maps") + public void testRemoveThenGet(String d, Map m) { + put(SIZE, m, (i, s) -> { }); + + remove(m, (i, s) -> { + for (int j = i + 1; j < SIZE; j++) { + assertEquals(m.get(new HashCodeInteger(j)).intValue(), j, + String.format(US, "Map.get(%d)", j)); + } + }); + } + + @Test(dataProvider = "maps") + public void testRemoveThenTraverse(String d, Map m) { + put(SIZE, m, (i, s) -> { }); + + Collector> c = getCollector(m); + + remove(m, (i, s) -> { + Collection actual = m.keySet().stream().map(e -> e.value).collect(c); + Collection expected = IntStream.range(i + 1, SIZE).boxed().collect(c); + assertEquals(actual, expected, "Map.keySet()"); + }); + } + + @Test(dataProvider = "maps") + public void testUntreeifyOnResizeWithGet(String d, Map m) { + // Fill the map with 64 entries grouped into 4 buckets + put(INITIAL_CAPACITY, m, (i, s) -> { }); + + for (int i = INITIAL_CAPACITY; i < SIZE; i++) { + // Add further entries in the 0'th bucket so as not to disturb + // other buckets, entries of which may be distributed and/or + // the bucket untreeified on resize + m.put(new HashCodeInteger(i, 0), i); + + for (int j = 0; j < INITIAL_CAPACITY; j++) { + assertEquals(m.get(new HashCodeInteger(j)).intValue(), j, + String.format(US, "Map.get(%d) < INITIAL_CAPACITY", j)); + } + for (int j = INITIAL_CAPACITY; j <= i; j++) { + assertEquals(m.get(new HashCodeInteger(j, 0)).intValue(), j, + String.format(US, "Map.get(%d) >= INITIAL_CAPACITY", j)); + } + } + } + + @Test(dataProvider = "maps") + public void testUntreeifyOnResizeWithTraverse(String d, Map m) { + // Fill the map with 64 entries grouped into 4 buckets + put(INITIAL_CAPACITY, m, (i, s) -> { }); + + Collector> c = getCollector(m); + + for (int i = INITIAL_CAPACITY; i < SIZE; i++) { + // Add further entries in the 0'th bucket so as not to disturb + // other buckets, entries of which may be distributed and/or + // the bucket untreeified on resize + m.put(new HashCodeInteger(i, 0), i); + + Collection actual = m.keySet().stream().map(e -> e.value).collect(c); + Collection expected = IntStream.rangeClosed(0, i).boxed().collect(c); + assertEquals(actual, expected, "Key set"); + } + } + + Collector> getCollector(Map m) { + Collector> collector = m instanceof LinkedHashMap + ? Collectors.toList() + : Collectors.toSet(); + return collector; + } + + void put(int size, Map m, BiConsumer c) { + for (int i = 0; i < size; i++) { + m.put(new HashCodeInteger(i), i); + + c.accept(i, m.size()); + } + } + + void remove(Map m, BiConsumer c) { + int size = m.size(); + // Remove all elements thus ensuring at some point trees will be + // converting back to bins + for (int i = 0; i < size; i++) { + m.remove(new HashCodeInteger(i)); + + c.accept(i, m.size()); + } + } + + static final class HashCodeInteger implements Comparable { + final int value; + + final int hashcode; + + HashCodeInteger(int value) { + this(value, hash(value)); + } + + HashCodeInteger(int value, int hashcode) { + this.value = value; + this.hashcode = hashcode; + } + + static int hash(int i) { + // Assuming 64 entries with keys from 0 to 63 then a map: + // - of capacity 64 will have 4 buckets with 16 entries per-bucket + // - of capacity 128 will have 8 buckets with 8 entries per-bucket + // - of capacity 256 will have 16 buckets with 4 entries per-bucket + // + // Re-sizing will result in re-distribution, doubling the buckets + // and reducing the entries by half. This will result in + // untreeifying when the number of entries is less than untreeify + // threshold (see HashMap/ConcurrentMap.UNTREEIFY_THRESHOLD) + return (i % 4) + (i / 4) * INITIAL_CAPACITY; + } + + @Override + public boolean equals(Object obj) { + if (obj instanceof HashCodeInteger) { + HashCodeInteger other = (HashCodeInteger) obj; + return other.value == value; + } + return false; + } + + @Override + public int hashCode() { + return hashcode; + } + + @Override + public int compareTo(HashCodeInteger o) { + return value - o.value; + } + + @Override + public String toString() { + return Integer.toString(value); + } + } +} diff --git a/caffeine/src/test/java/com/github/benmanes/caffeine/openjdk/map/MapWithCollisionsProviders.java b/caffeine/src/test/java/com/github/benmanes/caffeine/openjdk/map/MapWithCollisionsProviders.java new file mode 100644 index 0000000000..8e52404569 --- /dev/null +++ b/caffeine/src/test/java/com/github/benmanes/caffeine/openjdk/map/MapWithCollisionsProviders.java @@ -0,0 +1,297 @@ +/* + * Copyright (c) 2016, Oracle and/or its affiliates. All rights reserved. + * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. + * + * This code is free software; you can redistribute it and/or modify it + * under the terms of the GNU General Public License version 2 only, as + * published by the Free Software Foundation. + * + * This code is distributed in the hope that it will be useful, but WITHOUT + * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or + * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License + * version 2 for more details (a copy is included in the LICENSE file that + * accompanied this code). + * + * You should have received a copy of the GNU General Public License version + * 2 along with this work; if not, write to the Free Software Foundation, + * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. + * + * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA + * or visit www.oracle.com if you need additional information or have any + * questions. + */ +package com.github.benmanes.caffeine.openjdk.map; + +import static org.testng.Assert.assertEquals; +import static org.testng.Assert.assertTrue; + +import java.time.Duration; +import java.util.ArrayList; +import java.util.Collection; +import java.util.Iterator; +import java.util.Map; +import java.util.function.Supplier; + +import org.testng.annotations.DataProvider; + +import com.github.benmanes.caffeine.cache.Caffeine; + +@SuppressWarnings({"CanIgnoreReturnValueSuggester", "DirectReturn", "DoubleBraceInitialization", + "MixedArrayDimensions", "PreferredInterfaceType", "serial", "UnusedVariable", "YodaCondition"}) +public class MapWithCollisionsProviders { + + private static final int TEST_SIZE + = Boolean.valueOf(System.getProperty("test.map.collisions.shortrun")) + ? 2500 + : 5000; + + private static final IntKey EXTRA_INTKEY_VAL + = new IntKey(TEST_SIZE, Integer.MAX_VALUE); + + private static final String EXTRA_STRING_VAL = "Extra Value"; + + public static final class IntKey implements Comparable { + + private final int value; + private final int hashmask; //yes duplication + + IntKey(int value, int hashmask) { + this.value = value; + this.hashmask = hashmask; + } + + @Override + public boolean equals(Object obj) { + if (obj instanceof IntKey) { + IntKey other = (IntKey) obj; + + return other.value == value; + } + + return false; + } + + @Override + public int hashCode() { + return value % hashmask; + } + + @Override + public int compareTo(IntKey o) { + return value - o.value; + } + + @Override + public String toString() { + return Integer.toString(value); + } + + public int getValue() { + return value; + } + } + + private static Object[] createUniqueObjectKeys() { + IntKey UNIQUE_OBJECTS[] = new IntKey[TEST_SIZE]; + for (int i = 0; i < TEST_SIZE; i++) { + UNIQUE_OBJECTS[i] = new IntKey(i, Integer.MAX_VALUE); + } + return UNIQUE_OBJECTS; + } + + private static Object[] createUniqueStringKeys() { + String UNIQUE_STRINGS[] = new String[TEST_SIZE]; + for (int i = 0; i < TEST_SIZE; i++) { + UNIQUE_STRINGS[i] = unhash(i); + } + return UNIQUE_STRINGS; + } + + private static Object[] createCollidingObjectKeys() { + IntKey COLLIDING_OBJECTS[] = new IntKey[TEST_SIZE]; + for (int i = 0; i < TEST_SIZE; i++) { + COLLIDING_OBJECTS[i] = new IntKey(i, 10); + } + return COLLIDING_OBJECTS; + } + + private static Object[] createCollidingStringKeys() { + String COLLIDING_STRINGS[] = new String[TEST_SIZE]; + String UNIQUE_STRINGS[] = new String[TEST_SIZE]; + for (int i = 0; i < TEST_SIZE; i++) { + UNIQUE_STRINGS[i] = unhash(i); + COLLIDING_STRINGS[i] = (0 == i % 2) + ? UNIQUE_STRINGS[i / 2] + : "\u0000\u0000\u0000\u0000\u0000" + COLLIDING_STRINGS[i - 1]; + } + return COLLIDING_STRINGS; + } + + /** + * Returns a string with a hash equal to the argument. + * + * @return string with a hash equal to the argument. + */ + private static String unhash(int target) { + StringBuilder answer = new StringBuilder(); + if (target < 0) { + // String with hash of Integer.MIN_VALUE, 0x80000000 + answer.append("\\u0915\\u0009\\u001e\\u000c\\u0002"); + + if (target == Integer.MIN_VALUE) { + return answer.toString(); + } + // Find target without sign bit set + target = target & Integer.MAX_VALUE; + } + + unhash0(answer, target); + return answer.toString(); + } + + private static void unhash0(StringBuilder partial, int target) { + int div = target / 31; + int rem = target % 31; + + if (div <= Character.MAX_VALUE) { + if (div != 0) { + partial.append((char) div); + } + partial.append((char) rem); + } else { + unhash0(partial, div); + partial.append((char) rem); + } + } + + private static Map fillMap(Map m, T[] keys) { + for (T k : keys) { + m.put(k, k); + assertTrue(m.containsKey(k)); + assertTrue(m.containsValue(k)); + } + assertEquals(m.size(), keys.length); + return m; + } + + private static Supplier> createMap(Supplier> supplier, T[] keys) { + return () -> fillMap(supplier.get(), keys); + } + + private static Object[] createCase(String desc, + Supplier> supplier, T[] keys, T val) { + return new Object[]{desc, createMap(supplier, keys), val}; + } + + private static Collection makeMapsMoreTypes(String desc, + T[] keys, + T val) { + Collection cases = new ArrayList<>(); +// cases.add(createCase("Hashtable with " + desc, +// new Hashtable<>(), keys, val)); +// cases.add(createCase("IdentityHashMap with " + desc, +// new IdentityHashMap<>(), keys, val)); +// cases.add(createCase("TreeMap with " + desc, +// new TreeMap<>(), keys, val)); +// cases.add(createCase("Descending TreeMap with " + desc, +// new TreeMap<>().descendingMap(), keys, val)); +// cases.add(createCase("WeakHashMap with " + desc, +// new WeakHashMap<>(), keys, val)); +// cases.add(createCase("ConcurrentHashMap with " + desc, +// new ConcurrentHashMap<>(), keys, val)); +// cases.add(createCase("ConcurrentSkipListMap with " + desc, +// new ConcurrentSkipListMap<>(), keys, val)); + return cases; + } + + private static Collection makeMapsHashMap(String desc, + T[] keys, + T val) { + Collection cases = new ArrayList<>(); +// cases.add(createCase("HashMap with " + desc, +// new HashMap<>(), keys, val)); +// cases.add(createCase("LinkedHashMap with " + desc, +// new LinkedHashMap<>(), keys, val)); + + cases.add(createCase("Bounded with " + desc, () -> + Caffeine.newBuilder() + .expireAfterWrite(Duration.ofNanos(Long.MAX_VALUE)) + .maximumSize(Long.MAX_VALUE) + .build().asMap(), + keys, val)); + cases.add(createCase("Unbounded with " + desc, () -> + Caffeine.newBuilder().build().asMap(), keys, val)); + return cases; + } + + private static Collection makeMaps(String desc, T[] keys, T val) { + Collection cases = new ArrayList<>(); + cases.addAll(makeMapsHashMap(desc, keys, val)); + cases.addAll(makeMapsMoreTypes(desc, keys, val)); + return cases; + } + + private static Collection makeObjectsCases(String desc, T[] keys) { + return makeMaps(desc, keys, EXTRA_INTKEY_VAL); + } + + private static Collection makeStringsCases(String desc, + T[] keys) { + return makeMaps(desc, keys, EXTRA_STRING_VAL); + } + + private static final Collection mapsWithObjectsCases + = new ArrayList<>() { + { + addAll(makeObjectsCases("unique objects", createUniqueObjectKeys())); + addAll(makeObjectsCases("colliding objects", createCollidingObjectKeys())); + } + }; + + private static final Collection mapsWithStringsCases + = new ArrayList<>() { + { + addAll(makeStringsCases("unique strings", createUniqueStringKeys())); + addAll(makeStringsCases("colliding strings", createCollidingStringKeys())); + } + }; + + private static final Collection mapsWithObjectsAndStringsCases + = new ArrayList<>() { + { + addAll(mapsWithObjectsCases); + addAll(mapsWithStringsCases); + } + }; + + private static final Collection hashMapsWithObjectsCases + = new ArrayList<>() { + { + addAll(makeMapsHashMap("unique objects", + createUniqueObjectKeys(), EXTRA_INTKEY_VAL)); + addAll(makeMapsHashMap("collisions objects", + createCollidingObjectKeys(), EXTRA_INTKEY_VAL)); + } + }; + + @DataProvider + public Iterator mapsWithObjects() { + return mapsWithObjectsCases.iterator(); + } + + @DataProvider + public Iterator mapsWithStrings() { + return mapsWithStringsCases.iterator(); + } + + @DataProvider + public Iterator mapsWithObjectsAndStrings() { + return mapsWithObjectsAndStringsCases.iterator(); + } + + @DataProvider + public Iterator hashMapsWithObjects() { + return hashMapsWithObjectsCases.iterator(); + } + +} diff --git a/caffeine/src/test/java/com/github/benmanes/caffeine/openjdk/map/ToArray.java b/caffeine/src/test/java/com/github/benmanes/caffeine/openjdk/map/ToArray.java new file mode 100644 index 0000000000..641759a8d2 --- /dev/null +++ b/caffeine/src/test/java/com/github/benmanes/caffeine/openjdk/map/ToArray.java @@ -0,0 +1,225 @@ +/* + * Copyright (c) 2013, Oracle and/or its affiliates. All rights reserved. + * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. + * + * This code is free software; you can redistribute it and/or modify it + * under the terms of the GNU General Public License version 2 only, as + * published by the Free Software Foundation. + * + * This code is distributed in the hope that it will be useful, but WITHOUT + * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or + * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License + * version 2 for more details (a copy is included in the LICENSE file that + * accompanied this code). + * + * You should have received a copy of the GNU General Public License version + * 2 along with this work; if not, write to the Free Software Foundation, + * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. + * + * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA + * or visit www.oracle.com if you need additional information or have any + * questions. + */ +package com.github.benmanes.caffeine.openjdk.map; + +import static java.util.Locale.US; + +import java.time.Duration; +import java.util.Arrays; +import java.util.Comparator; +import java.util.HashMap; +import java.util.Hashtable; +import java.util.IdentityHashMap; +import java.util.LinkedHashMap; +import java.util.Map; +import java.util.Objects; +import java.util.TreeMap; +import java.util.WeakHashMap; +import java.util.concurrent.ConcurrentHashMap; +import java.util.concurrent.ConcurrentSkipListMap; + +import org.testng.annotations.Test; + +import com.github.benmanes.caffeine.cache.Cache; +import com.github.benmanes.caffeine.cache.Caffeine; + +/* + * @test + * @bug 8008785 + * @summary Ensure toArray() implementations return correct results. + * @author Mike Duigou + */ +@SuppressWarnings({"AlmostJavadoc", "BoxedPrimitiveEquality", "CatchingUnchecked", + "IdentityConversion", "InconsistentOverloads", "JdkObsolete", "MultiVariableDeclaration", + "NonAtomicVolatileUpdate", "NonFinalStaticField", "rawtypes", "ReferenceEquality", "SystemOut", + "unchecked", "UnnecessaryBoxedAssignment", "UnnecessaryParentheses", "UnusedVariable"}) +public class ToArray { + + /** + * Number of elements per map. + */ + private static final int TEST_SIZE = 5000; + + @Test + public void bounded() throws Exception { + Cache cache = Caffeine.newBuilder() + .expireAfterWrite(Duration.ofNanos(Long.MAX_VALUE)) + .maximumSize(Long.MAX_VALUE) + .build(); + testMap(cache.asMap()); + } + + @Test + public void unbounded() throws Exception { + Cache cache = Caffeine.newBuilder().build(); + testMap(cache.asMap()); + } + + private static void realMain(String[] args) throws Throwable { + Map[] maps = new Map[]{ + new HashMap<>(), + new Hashtable<>(), + new IdentityHashMap<>(), + new LinkedHashMap<>(), + new TreeMap<>(), + new WeakHashMap<>(), + new ConcurrentHashMap<>(), + new ConcurrentSkipListMap<>() + }; + + // for each map type. + for (Map map : maps) { + try { + testMap(map); + } catch(Exception all) { + unexpected("Failed for " + map.getClass().getName(), all); + } + } + } + + private static final Integer[] KEYS = new Integer[TEST_SIZE]; + + private static final Long[] VALUES = new Long[TEST_SIZE]; + + static { + for (int each = 0; each < TEST_SIZE; each++) { + KEYS[each] = Integer.valueOf(each); + VALUES[each] = Long.valueOf(each + TEST_SIZE); + } + } + + + private static void testMap(Map map) { + System.out.println("Testing " + map.getClass()); + System.out.flush(); + + // Fill the map + for (int each = 0; each < TEST_SIZE; each++) { + map.put(KEYS[each], VALUES[each]); + } + + // check the keys + Object[] keys = map.keySet().toArray(); + Arrays.sort(keys); + + for(int each = 0; each < TEST_SIZE; each++) { + check( "unexpected key", keys[each] == KEYS[each]); + } + + // check the values + Object[] values = map.values().toArray(); + Arrays.sort(values); + + for(int each = 0; each < TEST_SIZE; each++) { + check( "unexpected value", values[each] == VALUES[each]); + } + + // check the entries + Map.Entry[] entries = map.entrySet().toArray(new Map.Entry[TEST_SIZE]); + Arrays.sort( entries,new Comparator>() { + @Override + public int compare(Map.Entry o1, Map.Entry o2) { + return o1.getKey().compareTo(o2.getKey()); + }}); + + for(int each = 0; each < TEST_SIZE; each++) { + check( "unexpected entry", entries[each].getKey() == KEYS[each] && entries[each].getValue() == VALUES[each]); + } + } + + //--------------------- Infrastructure --------------------------- + static volatile int passed = 0, failed = 0; + + static void pass() { + passed++; + } + + static void fail() { + failed++; + (new Error("Failure")).printStackTrace(System.err); + } + + static void fail(String msg) { + failed++; + (new Error("Failure: " + msg)).printStackTrace(System.err); + } + + static void abort() { + fail(); + System.exit(1); + } + + static void abort(String msg) { + fail(msg); + System.exit(1); + } + + static void unexpected(String msg, Throwable t) { + System.err.println("Unexpected: " + msg); + unexpected(t); + } + + static void unexpected(Throwable t) { + failed++; + t.printStackTrace(System.err); + } + + static void check(boolean cond) { + if (cond) { + pass(); + } else { + fail(); + } + } + + static void check(String desc, boolean cond) { + if (cond) { + pass(); + } else { + fail(desc); + } + } + + static void equal(Object x, Object y) { + if (Objects.equals(x, y)) { + pass(); + } else { + fail(x + " not equal to " + y); + } + } + + public static void main(String[] args) throws Throwable { + Thread.currentThread().setName(ToArray.class.getName()); +// Thread.currentThread().setPriority(Thread.MAX_PRIORITY); + try { + realMain(args); + } catch (Throwable t) { + unexpected(t); + } + + System.out.printf(US, "%nPassed = %d, failed = %d%n%n", passed, failed); + if (failed > 0) { + throw new Error("Some tests failed"); + } + } +} diff --git a/caffeine/src/test/java/com/github/benmanes/caffeine/openjdk/map/package-info.java b/caffeine/src/test/java/com/github/benmanes/caffeine/openjdk/map/package-info.java new file mode 100644 index 0000000000..58105217b7 --- /dev/null +++ b/caffeine/src/test/java/com/github/benmanes/caffeine/openjdk/map/package-info.java @@ -0,0 +1,7 @@ +@NullMarked +@CheckReturnValue +package com.github.benmanes.caffeine.openjdk.map; + +import org.jspecify.annotations.NullMarked; + +import com.google.errorprone.annotations.CheckReturnValue; diff --git a/examples/coalescing-bulkloader-reactor/settings.gradle.kts b/examples/coalescing-bulkloader-reactor/settings.gradle.kts index 53023a7268..78ef9ea336 100644 --- a/examples/coalescing-bulkloader-reactor/settings.gradle.kts +++ b/examples/coalescing-bulkloader-reactor/settings.gradle.kts @@ -1,7 +1,7 @@ plugins { id("com.gradle.develocity") version "3.19" id("com.gradle.common-custom-user-data-gradle-plugin") version "2.0.2" - id("org.gradle.toolchains.foojay-resolver-convention") version "0.8.0" + id("org.gradle.toolchains.foojay-resolver-convention") version "0.9.0" } dependencyResolutionManagement { diff --git a/examples/graal-native/settings.gradle.kts b/examples/graal-native/settings.gradle.kts index 1bb26f570f..28406a0f2a 100644 --- a/examples/graal-native/settings.gradle.kts +++ b/examples/graal-native/settings.gradle.kts @@ -7,7 +7,7 @@ pluginManagement { plugins { id("com.gradle.develocity") version "3.19" id("com.gradle.common-custom-user-data-gradle-plugin") version "2.0.2" - id("org.gradle.toolchains.foojay-resolver-convention") version "0.8.0" + id("org.gradle.toolchains.foojay-resolver-convention") version "0.9.0" } dependencyResolutionManagement { diff --git a/examples/hibernate/settings.gradle.kts b/examples/hibernate/settings.gradle.kts index a6fd48777d..f82406bd98 100644 --- a/examples/hibernate/settings.gradle.kts +++ b/examples/hibernate/settings.gradle.kts @@ -1,7 +1,7 @@ plugins { id("com.gradle.develocity") version "3.19" id("com.gradle.common-custom-user-data-gradle-plugin") version "2.0.2" - id("org.gradle.toolchains.foojay-resolver-convention") version "0.8.0" + id("org.gradle.toolchains.foojay-resolver-convention") version "0.9.0" } dependencyResolutionManagement { diff --git a/examples/indexable/settings.gradle.kts b/examples/indexable/settings.gradle.kts index 58d363260e..571662f93d 100644 --- a/examples/indexable/settings.gradle.kts +++ b/examples/indexable/settings.gradle.kts @@ -1,7 +1,7 @@ plugins { id("com.gradle.develocity") version "3.19" id("com.gradle.common-custom-user-data-gradle-plugin") version "2.0.2" - id("org.gradle.toolchains.foojay-resolver-convention") version "0.8.0" + id("org.gradle.toolchains.foojay-resolver-convention") version "0.9.0" } dependencyResolutionManagement { diff --git a/examples/resilience-failsafe/settings.gradle.kts b/examples/resilience-failsafe/settings.gradle.kts index 090e9af4ed..93043dbbe0 100644 --- a/examples/resilience-failsafe/settings.gradle.kts +++ b/examples/resilience-failsafe/settings.gradle.kts @@ -1,7 +1,7 @@ plugins { id("com.gradle.develocity") version "3.19" id("com.gradle.common-custom-user-data-gradle-plugin") version "2.0.2" - id("org.gradle.toolchains.foojay-resolver-convention") version "0.8.0" + id("org.gradle.toolchains.foojay-resolver-convention") version "0.9.0" } dependencyResolutionManagement { diff --git a/examples/write-behind-rxjava/settings.gradle.kts b/examples/write-behind-rxjava/settings.gradle.kts index c288943379..5fe9123026 100644 --- a/examples/write-behind-rxjava/settings.gradle.kts +++ b/examples/write-behind-rxjava/settings.gradle.kts @@ -1,7 +1,7 @@ plugins { id("com.gradle.develocity") version "3.19" id("com.gradle.common-custom-user-data-gradle-plugin") version "2.0.2" - id("org.gradle.toolchains.foojay-resolver-convention") version "0.8.0" + id("org.gradle.toolchains.foojay-resolver-convention") version "0.9.0" } dependencyResolutionManagement { diff --git a/gradle/config/pmd/rulesSets-test.xml b/gradle/config/pmd/rulesSets-test.xml index 9c8d7291d7..6bcf9d8651 100644 --- a/gradle/config/pmd/rulesSets-test.xml +++ b/gradle/config/pmd/rulesSets-test.xml @@ -6,6 +6,7 @@ PMD rules for Caffeine tests .*/guava/compatibility/.* .*/eclipse/.* + .*/openjdk/.* .*/jsr166/.* diff --git a/gradle/config/spotbugs/exclude.xml b/gradle/config/spotbugs/exclude.xml index 301505ca2e..1aa371183e 100644 --- a/gradle/config/spotbugs/exclude.xml +++ b/gradle/config/spotbugs/exclude.xml @@ -493,8 +493,9 @@ - + + diff --git a/gradle/libs.versions.toml b/gradle/libs.versions.toml index 7c60625e97..1590afa640 100644 --- a/gradle/libs.versions.toml +++ b/gradle/libs.versions.toml @@ -65,7 +65,7 @@ junit5 = "5.11.4" jvm-dependency-conflict-resolution = "2.1.2" kotlin = "2.1.0" lincheck = "2.34" -mockito = "5.14.2" +mockito = "5.15.2" nexus-publish = "2.0.0" nullaway = "0.12.2" nullaway-plugin = "2.1.0" @@ -96,7 +96,7 @@ versions = "0.51.0" xz = "1.10" ycsb = "0.17.0" zero-allocation-hashing = "0.27ea0" -zstd = "1.5.6-8" +zstd = "1.5.6-9" [libraries] asm-bom = { module = "org.ow2.asm:asm-bom", version.ref = "asm" } diff --git a/gradle/plugins/settings.gradle.kts b/gradle/plugins/settings.gradle.kts index c8e64b1f28..e4aa6a8197 100644 --- a/gradle/plugins/settings.gradle.kts +++ b/gradle/plugins/settings.gradle.kts @@ -1,7 +1,7 @@ plugins { id("com.gradle.develocity") version "3.19" id("com.gradle.common-custom-user-data-gradle-plugin") version "2.0.2" - id("org.gradle.toolchains.foojay-resolver-convention") version "0.8.0" + id("org.gradle.toolchains.foojay-resolver-convention") version "0.9.0" } dependencyResolutionManagement { diff --git a/simulator/src/main/java/com/github/benmanes/caffeine/cache/simulator/policy/PolicyStats.java b/simulator/src/main/java/com/github/benmanes/caffeine/cache/simulator/policy/PolicyStats.java index 894af8bb8d..0186d1804d 100644 --- a/simulator/src/main/java/com/github/benmanes/caffeine/cache/simulator/policy/PolicyStats.java +++ b/simulator/src/main/java/com/github/benmanes/caffeine/cache/simulator/policy/PolicyStats.java @@ -97,7 +97,7 @@ public PolicyStats(String format, Object... args) { .type(PERCENT).addCharacteristic(WEIGHTED)); addPercentMetric("Adaption", this::percentAdaption); addMetric("Average Miss Penalty", this::averageMissPenalty); - addMetric("Average Penalty", this::avergePenalty); + addMetric("Average Penalty", this::averagePenalty); addMetric("Steps", this::operationCount); addMetric("Time", this::stopwatch); } @@ -283,12 +283,12 @@ public double complexity() { return (requestCount == 0) ? 0.0 : (double) operationCount / requestCount; } - public double avergePenalty() { + public double averagePenalty() { long requestCount = requestCount(); return (requestCount == 0) ? 0.0 : totalPenalty() / requestCount; } - public double avergeHitPenalty() { + public double averageHitPenalty() { return (hitCount == 0) ? 0.0 : hitPenalty / hitCount; }