premature optimization

This commit is contained in:
Leijurv 2019-01-30 20:19:54 -08:00
parent 272dd79426
commit 59e0d6406e
No known key found for this signature in database
GPG Key ID: 44A3EA646EADAC6A
2 changed files with 73 additions and 20 deletions

View File

@ -72,6 +72,7 @@ public final class BinaryHeapOpenSet implements IOpenSet {
int index = val.heapPosition;
int parentInd = index >>> 1;
double cost = val.combinedCost;
PathNode[] array = this.array;
PathNode parentNode = array[parentInd];
while (index > 1 && parentNode.combinedCost > cost) {
array[index] = parentNode;
@ -94,40 +95,50 @@ public final class BinaryHeapOpenSet implements IOpenSet {
if (size == 0) {
throw new IllegalStateException();
}
PathNode[] array = this.array;
PathNode result = array[1];
PathNode val = array[size];
array[1] = val;
val.heapPosition = 1;
array[size] = null;
size--;
int size = --this.size;
result.heapPosition = -1;
if (size < 2) {
return result;
}
int index = 1;
int smallerChild = 2;
int smallerChild = 1;
double cost = val.combinedCost;
do {
while ((smallerChild <<= 1) < size) {
PathNode smallerChildNode = array[smallerChild];
PathNode otherChildNode = array[smallerChild + 1];
double smallerChildCost = smallerChildNode.combinedCost;
if (smallerChild < size) {
PathNode rightChildNode = array[smallerChild + 1];
double rightChildCost = rightChildNode.combinedCost;
if (smallerChildCost > rightChildCost) {
smallerChild++;
smallerChildCost = rightChildCost;
smallerChildNode = rightChildNode;
}
double rightChildCost = otherChildNode.combinedCost;
if (smallerChildCost > rightChildCost) {
smallerChild++;
smallerChildCost = rightChildCost;
smallerChildNode = otherChildNode;
}
if (cost <= smallerChildCost) {
break;
return result;
}
array[index] = smallerChildNode;
array[smallerChild] = val;
val.heapPosition = smallerChild;
smallerChildNode.heapPosition = index;
index = smallerChild;
} while ((smallerChild <<= 1) <= size);
}
// if we get here, then smallerChild >= size
// one last swap to check
if (smallerChild == size) {
PathNode onlyChildNode = array[smallerChild];
if (cost > onlyChildNode.combinedCost) {
array[index] = onlyChildNode;
array[smallerChild] = val;
val.heapPosition = smallerChild;
onlyChildNode.heapPosition = index;
}
}
return result;
}
}

View File

@ -36,24 +36,39 @@ public class OpenSetsTest {
this.size = size;
}
private static final ArrayList<Long> insertions = new ArrayList<>();
private static final ArrayList<Long> removal1 = new ArrayList<>();
private static final ArrayList<Long> removal2 = new ArrayList<>();
@Parameterized.Parameters
public static Collection<Object[]> data() {
ArrayList<Object[]> testSizes = new ArrayList<>();
for (int size = 1; size < 20; size++) {
/*for (int size = 1; size < 20; size++) {
testSizes.add(new Object[]{size});
}
for (int size = 100; size <= 1000; size += 100) {
testSizes.add(new Object[]{size});
}
testSizes.add(new Object[]{5000});
testSizes.add(new Object[]{10000});
testSizes.add(new Object[]{10000});*/
testSizes.add(new Object[]{100000});
testSizes.add(new Object[]{100000});
testSizes.add(new Object[]{100000});
testSizes.add(new Object[]{100000});
testSizes.add(new Object[]{100000});
testSizes.add(new Object[]{100000});
testSizes.add(new Object[]{100000});
testSizes.add(new Object[]{100000});
testSizes.add(new Object[]{100000});
testSizes.add(new Object[]{100000});
testSizes.add(new Object[]{100000});
return testSizes;
}
private static void removeAndTest(int amount, IOpenSet[] test, Optional<Collection<PathNode>> mustContain) {
double[][] results = new double[test.length][amount];
for (int i = 0; i < test.length; i++) {
long before = System.nanoTime() / 1000000L;
long before = System.nanoTime();
for (int j = 0; j < amount; j++) {
PathNode pn = test[i].removeLowest();
if (mustContain.isPresent() && !mustContain.get().contains(pn)) {
@ -61,7 +76,13 @@ public class OpenSetsTest {
}
results[i][j] = pn.combinedCost;
}
System.out.println(test[i].getClass() + " " + (System.nanoTime() / 1000000L - before));
long time = System.nanoTime() - before;
System.out.println(test[i].getClass() + " " + (time));
if (mustContain.isPresent()) {
removal1.add(time);
} else {
removal2.add(time);
}
}
for (int j = 0; j < amount; j++) {
for (int i = 1; i < test.length; i++) {
@ -75,10 +96,16 @@ public class OpenSetsTest {
@Test
public void testSize() {
try {
System.gc();
Thread.sleep(1000L);
} catch (InterruptedException e) {
e.printStackTrace();
}
System.out.println("Testing size " + size);
// Include LinkedListOpenSet even though it's not performant because I absolutely trust that it behaves properly
// I'm really testing the heap implementations against it as the ground truth
IOpenSet[] test = new IOpenSet[]{new BinaryHeapOpenSet(), new LinkedListOpenSet()};
IOpenSet[] test = new IOpenSet[]{new BinaryHeapOpenSet()/*, new LinkedListOpenSet()*/};
for (IOpenSet set : test) {
assertTrue(set.isEmpty());
}
@ -116,10 +143,12 @@ public class OpenSetsTest {
System.out.println("Insertion");
for (IOpenSet set : test) {
long before = System.nanoTime() / 1000000L;
long before = System.nanoTime();
for (int i = 0; i < size; i++)
set.insert(toInsert[i]);
System.out.println(set.getClass() + " " + (System.nanoTime() / 1000000L - before));
long time = System.nanoTime() - before;
System.out.println(set.getClass() + " " + (time));
insertions.add(time);
//all three take either 0 or 1ms to insert up to 10,000 nodes
//linkedlist takes 0ms most often (because there's no array resizing or allocation there, just pointer shuffling)
}
@ -166,5 +195,18 @@ public class OpenSetsTest {
for (IOpenSet set : test) {
assertTrue(set.isEmpty());
}
printo(insertions);
printo(removal1);
printo(removal2);
}
private static void printo(ArrayList<Long> data) {
if (data.size() < 3) {
return;
}
System.out.println(data.subList(2, data.size()).stream().mapToLong(x -> x).average().getAsDouble() / 1000000D);
}
}