Fix a nasty bug where elements in LinkedHashTreeMap could be dropped.
The underlying problem is that the doubleCapacity function would drop the parent links when all nodes ended up on the same side in a doubling. This was caused by the fact that the AvlIterator was destructive on parent nodes, and we weren't putting them back together with the AvlBuilder. This removes an incorrect optimization and fixes the problem. Also move LinkedHashTreeMap back into main from test.
This commit is contained in:
parent
d2b660570e
commit
1840466704
@ -38,8 +38,6 @@ import java.util.Set;
|
|||||||
* LinkedHashMap classes.
|
* LinkedHashMap classes.
|
||||||
*/
|
*/
|
||||||
public final class LinkedHashTreeMap<K, V> extends AbstractMap<K, V> implements Serializable {
|
public final class LinkedHashTreeMap<K, V> extends AbstractMap<K, V> implements Serializable {
|
||||||
private static final int MAX_CAPACITY = 8192;
|
|
||||||
|
|
||||||
@SuppressWarnings({ "unchecked", "rawtypes" }) // to avoid Comparable<Comparable<Comparable<...>>>
|
@SuppressWarnings({ "unchecked", "rawtypes" }) // to avoid Comparable<Comparable<Comparable<...>>>
|
||||||
private static final Comparator<Comparable> NATURAL_ORDER = new Comparator<Comparable>() {
|
private static final Comparator<Comparable> NATURAL_ORDER = new Comparator<Comparable>() {
|
||||||
public int compare(Comparable a, Comparable b) {
|
public int compare(Comparable a, Comparable b) {
|
||||||
@ -566,15 +564,10 @@ public final class LinkedHashTreeMap<K, V> extends AbstractMap<K, V> implements
|
|||||||
* twice as many trees, each of (approximately) half the previous size.
|
* twice as many trees, each of (approximately) half the previous size.
|
||||||
*/
|
*/
|
||||||
static <K, V> Node<K, V>[] doubleCapacity(Node<K, V>[] oldTable) {
|
static <K, V> Node<K, V>[] doubleCapacity(Node<K, V>[] oldTable) {
|
||||||
|
// TODO: don't do anything if we're already at MAX_CAPACITY
|
||||||
int oldCapacity = oldTable.length;
|
int oldCapacity = oldTable.length;
|
||||||
if (oldCapacity >= MAX_CAPACITY) {
|
|
||||||
return oldTable;
|
|
||||||
}
|
|
||||||
|
|
||||||
int newCapacity = oldCapacity * 2;
|
|
||||||
|
|
||||||
@SuppressWarnings("unchecked") // Arrays and generics don't get along.
|
@SuppressWarnings("unchecked") // Arrays and generics don't get along.
|
||||||
Node<K, V>[] newTable = new Node[newCapacity];
|
Node<K, V>[] newTable = new Node[oldCapacity * 2];
|
||||||
AvlIterator<K, V> iterator = new AvlIterator<K, V>();
|
AvlIterator<K, V> iterator = new AvlIterator<K, V>();
|
||||||
AvlBuilder<K, V> leftBuilder = new AvlBuilder<K, V>();
|
AvlBuilder<K, V> leftBuilder = new AvlBuilder<K, V>();
|
||||||
AvlBuilder<K, V> rightBuilder = new AvlBuilder<K, V>();
|
AvlBuilder<K, V> rightBuilder = new AvlBuilder<K, V>();
|
||||||
@ -591,7 +584,7 @@ public final class LinkedHashTreeMap<K, V> extends AbstractMap<K, V> implements
|
|||||||
int leftSize = 0;
|
int leftSize = 0;
|
||||||
int rightSize = 0;
|
int rightSize = 0;
|
||||||
for (Node<K, V> node; (node = iterator.next()) != null; ) {
|
for (Node<K, V> node; (node = iterator.next()) != null; ) {
|
||||||
if ((node.hash & (newCapacity - 1)) == i) {
|
if ((node.hash & oldCapacity) == 0) {
|
||||||
leftSize++;
|
leftSize++;
|
||||||
} else {
|
} else {
|
||||||
rightSize++;
|
rightSize++;
|
||||||
@ -599,30 +592,20 @@ public final class LinkedHashTreeMap<K, V> extends AbstractMap<K, V> implements
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Split the tree into two.
|
// Split the tree into two.
|
||||||
Node<K, V> leftRoot = null;
|
leftBuilder.reset(leftSize);
|
||||||
Node<K, V> rightRoot = null;
|
rightBuilder.reset(rightSize);
|
||||||
if (leftSize > 0 && rightSize > 0) {
|
iterator.reset(root);
|
||||||
leftBuilder.reset(leftSize);
|
for (Node<K, V> node; (node = iterator.next()) != null; ) {
|
||||||
rightBuilder.reset(rightSize);
|
if ((node.hash & oldCapacity) == 0) {
|
||||||
iterator.reset(root);
|
leftBuilder.add(node);
|
||||||
for (Node<K, V> node; (node = iterator.next()) != null; ) {
|
} else {
|
||||||
if ((node.hash & (newCapacity - 1)) == i) {
|
rightBuilder.add(node);
|
||||||
leftBuilder.add(node);
|
|
||||||
} else {
|
|
||||||
rightBuilder.add(node);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
leftRoot = leftBuilder.root();
|
|
||||||
rightRoot = rightBuilder.root();
|
|
||||||
} else if (leftSize > 0) {
|
|
||||||
leftRoot = root;
|
|
||||||
} else {
|
|
||||||
rightRoot = root;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// Populate the enlarged array with these new roots.
|
// Populate the enlarged array with these new roots.
|
||||||
newTable[i] = leftRoot;
|
newTable[i] = leftSize > 0 ? leftBuilder.root() : null;
|
||||||
newTable[i + oldCapacity] = rightRoot;
|
newTable[i + oldCapacity] = rightSize > 0 ? rightBuilder.root() : null;
|
||||||
}
|
}
|
||||||
return newTable;
|
return newTable;
|
||||||
}
|
}
|
@ -20,14 +20,12 @@ import com.google.gson.common.MoreAsserts;
|
|||||||
import com.google.gson.internal.LinkedHashTreeMap.AvlBuilder;
|
import com.google.gson.internal.LinkedHashTreeMap.AvlBuilder;
|
||||||
import com.google.gson.internal.LinkedHashTreeMap.AvlIterator;
|
import com.google.gson.internal.LinkedHashTreeMap.AvlIterator;
|
||||||
import com.google.gson.internal.LinkedHashTreeMap.Node;
|
import com.google.gson.internal.LinkedHashTreeMap.Node;
|
||||||
|
|
||||||
import junit.framework.TestCase;
|
|
||||||
|
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
import java.util.Arrays;
|
import java.util.Arrays;
|
||||||
import java.util.Iterator;
|
import java.util.Iterator;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
import java.util.Random;
|
import java.util.Random;
|
||||||
|
import junit.framework.TestCase;
|
||||||
|
|
||||||
public final class LinkedHashTreeMapTest extends TestCase {
|
public final class LinkedHashTreeMapTest extends TestCase {
|
||||||
public void testIterationOrder() {
|
public void testIterationOrder() {
|
||||||
@ -104,7 +102,7 @@ public final class LinkedHashTreeMapTest extends TestCase {
|
|||||||
// NOTE that this does not happen every time, but given the below predictable random,
|
// NOTE that this does not happen every time, but given the below predictable random,
|
||||||
// this test will consistently fail (assuming the initial size is 16 and rehashing
|
// this test will consistently fail (assuming the initial size is 16 and rehashing
|
||||||
// size remains at 3/4)
|
// size remains at 3/4)
|
||||||
public void disabled_testForceDoublingAndRehash() throws Exception {
|
public void testForceDoublingAndRehash() throws Exception {
|
||||||
Random random = new Random(1367593214724L);
|
Random random = new Random(1367593214724L);
|
||||||
LinkedHashTreeMap<String, String> map = new LinkedHashTreeMap<String, String>();
|
LinkedHashTreeMap<String, String> map = new LinkedHashTreeMap<String, String>();
|
||||||
String[] keys = new String[1000];
|
String[] keys = new String[1000];
|
||||||
@ -209,6 +207,16 @@ public final class LinkedHashTreeMapTest extends TestCase {
|
|||||||
Node<String, String>[] newTable = LinkedHashTreeMap.doubleCapacity(oldTable);
|
Node<String, String>[] newTable = LinkedHashTreeMap.doubleCapacity(oldTable);
|
||||||
assertTree("(b d f)", newTable[0]); // Even hash codes!
|
assertTree("(b d f)", newTable[0]); // Even hash codes!
|
||||||
assertTree("(a c (. e g))", newTable[1]); // Odd hash codes!
|
assertTree("(a c (. e g))", newTable[1]); // Odd hash codes!
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testDoubleCapacityAllNodesOnLeft() {
|
||||||
|
@SuppressWarnings("unchecked") // Arrays and generics don't get along.
|
||||||
|
Node<String, String>[] oldTable = new Node[1];
|
||||||
|
oldTable[0] = node(node("b"), "d", node("f"));
|
||||||
|
|
||||||
|
Node<String, String>[] newTable = LinkedHashTreeMap.doubleCapacity(oldTable);
|
||||||
|
assertTree("(b d f)", newTable[0]); // Even hash codes!
|
||||||
|
assertNull(newTable[1]); // Odd hash codes!
|
||||||
|
|
||||||
for (Node<?, ?> node : newTable) {
|
for (Node<?, ?> node : newTable) {
|
||||||
if (node != null) {
|
if (node != null) {
|
||||||
|
Loading…
Reference in New Issue
Block a user