Fix a nasty bug where elements in LinkedHashTreeMap could be dropped.

The underlying problem is that the doubleCapacity function would drop the parent links when all nodes ended up on the same side in a doubling. This was caused by the fact that the AvlIterator was destructive on parent nodes, and we weren't putting them back together with the AvlBuilder. This removes an incorrect optimization and fixes the problem.

Also move LinkedHashTreeMap back into main from test.
This commit is contained in:
Jesse Wilson 2013-05-14 21:43:20 +00:00
parent d2b660570e
commit 1840466704
2 changed files with 25 additions and 34 deletions

View File

@ -38,8 +38,6 @@ import java.util.Set;
* LinkedHashMap classes.
*/
public final class LinkedHashTreeMap<K, V> extends AbstractMap<K, V> implements Serializable {
private static final int MAX_CAPACITY = 8192;
@SuppressWarnings({ "unchecked", "rawtypes" }) // to avoid Comparable<Comparable<Comparable<...>>>
private static final Comparator<Comparable> NATURAL_ORDER = new Comparator<Comparable>() {
public int compare(Comparable a, Comparable b) {
@ -566,15 +564,10 @@ public final class LinkedHashTreeMap<K, V> extends AbstractMap<K, V> implements
* twice as many trees, each of (approximately) half the previous size.
*/
static <K, V> Node<K, V>[] doubleCapacity(Node<K, V>[] oldTable) {
// TODO: don't do anything if we're already at MAX_CAPACITY
int oldCapacity = oldTable.length;
if (oldCapacity >= MAX_CAPACITY) {
return oldTable;
}
int newCapacity = oldCapacity * 2;
@SuppressWarnings("unchecked") // Arrays and generics don't get along.
Node<K, V>[] newTable = new Node[newCapacity];
Node<K, V>[] newTable = new Node[oldCapacity * 2];
AvlIterator<K, V> iterator = new AvlIterator<K, V>();
AvlBuilder<K, V> leftBuilder = new AvlBuilder<K, V>();
AvlBuilder<K, V> rightBuilder = new AvlBuilder<K, V>();
@ -591,7 +584,7 @@ public final class LinkedHashTreeMap<K, V> extends AbstractMap<K, V> implements
int leftSize = 0;
int rightSize = 0;
for (Node<K, V> node; (node = iterator.next()) != null; ) {
if ((node.hash & (newCapacity - 1)) == i) {
if ((node.hash & oldCapacity) == 0) {
leftSize++;
} else {
rightSize++;
@ -599,30 +592,20 @@ public final class LinkedHashTreeMap<K, V> extends AbstractMap<K, V> implements
}
// Split the tree into two.
Node<K, V> leftRoot = null;
Node<K, V> rightRoot = null;
if (leftSize > 0 && rightSize > 0) {
leftBuilder.reset(leftSize);
rightBuilder.reset(rightSize);
iterator.reset(root);
for (Node<K, V> node; (node = iterator.next()) != null; ) {
if ((node.hash & (newCapacity - 1)) == i) {
leftBuilder.add(node);
} else {
rightBuilder.add(node);
}
leftBuilder.reset(leftSize);
rightBuilder.reset(rightSize);
iterator.reset(root);
for (Node<K, V> node; (node = iterator.next()) != null; ) {
if ((node.hash & oldCapacity) == 0) {
leftBuilder.add(node);
} else {
rightBuilder.add(node);
}
leftRoot = leftBuilder.root();
rightRoot = rightBuilder.root();
} else if (leftSize > 0) {
leftRoot = root;
} else {
rightRoot = root;
}
// Populate the enlarged array with these new roots.
newTable[i] = leftRoot;
newTable[i + oldCapacity] = rightRoot;
newTable[i] = leftSize > 0 ? leftBuilder.root() : null;
newTable[i + oldCapacity] = rightSize > 0 ? rightBuilder.root() : null;
}
return newTable;
}

View File

@ -20,14 +20,12 @@ import com.google.gson.common.MoreAsserts;
import com.google.gson.internal.LinkedHashTreeMap.AvlBuilder;
import com.google.gson.internal.LinkedHashTreeMap.AvlIterator;
import com.google.gson.internal.LinkedHashTreeMap.Node;
import junit.framework.TestCase;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Iterator;
import java.util.Map;
import java.util.Random;
import junit.framework.TestCase;
public final class LinkedHashTreeMapTest extends TestCase {
public void testIterationOrder() {
@ -104,7 +102,7 @@ public final class LinkedHashTreeMapTest extends TestCase {
// NOTE that this does not happen every time, but given the below predictable random,
// this test will consistently fail (assuming the initial size is 16 and rehashing
// size remains at 3/4)
public void disabled_testForceDoublingAndRehash() throws Exception {
public void testForceDoublingAndRehash() throws Exception {
Random random = new Random(1367593214724L);
LinkedHashTreeMap<String, String> map = new LinkedHashTreeMap<String, String>();
String[] keys = new String[1000];
@ -209,6 +207,16 @@ public final class LinkedHashTreeMapTest extends TestCase {
Node<String, String>[] newTable = LinkedHashTreeMap.doubleCapacity(oldTable);
assertTree("(b d f)", newTable[0]); // Even hash codes!
assertTree("(a c (. e g))", newTable[1]); // Odd hash codes!
}
public void testDoubleCapacityAllNodesOnLeft() {
@SuppressWarnings("unchecked") // Arrays and generics don't get along.
Node<String, String>[] oldTable = new Node[1];
oldTable[0] = node(node("b"), "d", node("f"));
Node<String, String>[] newTable = LinkedHashTreeMap.doubleCapacity(oldTable);
assertTree("(b d f)", newTable[0]); // Even hash codes!
assertNull(newTable[1]); // Odd hash codes!
for (Node<?, ?> node : newTable) {
if (node != null) {