mirror of
https://github.com/nosqlbench/nosqlbench.git
synced 2024-12-25 08:11:06 -06:00
add hash range scaled with scale factor parameter
This commit is contained in:
parent
69ad2b54e4
commit
c3910d69a4
@ -9,11 +9,23 @@ import java.util.function.LongToIntFunction;
|
||||
public class HashRangeScaled implements LongToIntFunction {
|
||||
|
||||
private final Hash hash = new Hash();
|
||||
private final double scalefactor;
|
||||
|
||||
public HashRangeScaled(double scalefactor) {
|
||||
this.scalefactor = scalefactor;
|
||||
}
|
||||
|
||||
public HashRangeScaled() {
|
||||
this.scalefactor = 1.0D;
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public int applyAsInt(long operand) {
|
||||
if (operand==0) { return 0; }
|
||||
if (operand == 0) {
|
||||
return 0;
|
||||
}
|
||||
long l = hash.applyAsLong(operand);
|
||||
return (int) ((l % operand) % Integer.MAX_VALUE);
|
||||
return (int) (((l % operand) * scalefactor) % Integer.MAX_VALUE);
|
||||
}
|
||||
}
|
||||
|
@ -5,20 +5,29 @@ import io.nosqlbench.virtdata.api.annotations.ThreadSafeMapper;
|
||||
import java.util.function.LongUnaryOperator;
|
||||
|
||||
/**
|
||||
* Return a pseudo-random value which can only be as large as the input.
|
||||
* Return a pseudo-random value which can only be as large as the input times
|
||||
* a scale factor, with a default scale factor of 1.0d
|
||||
*/
|
||||
@ThreadSafeMapper
|
||||
public class HashRangeScaled implements LongUnaryOperator {
|
||||
|
||||
private Hash hash = new Hash();
|
||||
private final double scalefactor;
|
||||
private final Hash hash = new Hash();
|
||||
|
||||
public HashRangeScaled(double scalefactor) {
|
||||
this.scalefactor = scalefactor;
|
||||
}
|
||||
|
||||
public HashRangeScaled() {
|
||||
this.scalefactor = 1.0D;
|
||||
}
|
||||
|
||||
@Override
|
||||
public long applyAsLong(long operand) {
|
||||
if (operand==0) { return 0; }
|
||||
if (operand == 0) {
|
||||
return 0;
|
||||
}
|
||||
long hashed = hash.applyAsLong(operand);
|
||||
return hashed % operand;
|
||||
return (long) ((hashed % operand) * scalefactor);
|
||||
}
|
||||
}
|
||||
|
@ -8,10 +8,21 @@ import java.util.function.IntUnaryOperator;
|
||||
public class HashRangeScaled implements IntUnaryOperator {
|
||||
|
||||
private final Hash hash = new Hash();
|
||||
private final double scalefactor;
|
||||
|
||||
public HashRangeScaled(double scalefactor) {
|
||||
this.scalefactor = scalefactor;
|
||||
}
|
||||
|
||||
public HashRangeScaled() {
|
||||
this.scalefactor = 1.0D;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int applyAsInt(int operand) {
|
||||
if (operand==0) { return 0; }
|
||||
return hash.applyAsInt(operand) % operand;
|
||||
if (operand == 0) {
|
||||
return 0;
|
||||
}
|
||||
return (int) ((hash.applyAsInt(operand) % operand) * scalefactor) % Integer.MAX_VALUE;
|
||||
}
|
||||
}
|
||||
|
@ -1,7 +1,10 @@
|
||||
package io.nosqlbench.virtdata.library.basics.shared.from_long.to_long;
|
||||
|
||||
import org.assertj.core.data.Percentage;
|
||||
import org.junit.Test;
|
||||
|
||||
import java.util.LongSummaryStatistics;
|
||||
|
||||
import static org.assertj.core.api.Assertions.assertThat;
|
||||
|
||||
public class HashRangeScaledTest {
|
||||
@ -11,8 +14,51 @@ public class HashRangeScaledTest {
|
||||
HashRangeScaled hrs = new HashRangeScaled();
|
||||
for (long i = 0; i < 100; i++) {
|
||||
long l = hrs.applyAsLong(i);
|
||||
assertThat(l).isBetween(0L,i);
|
||||
assertThat(l).isBetween(0L, i);
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
@Test
|
||||
public void testHashRangeScaledLongs() {
|
||||
// This presumes a sliding triangular distribution in the data
|
||||
HashRangeScaled hrs = new HashRangeScaled();
|
||||
LongSummaryStatistics lss = new LongSummaryStatistics();
|
||||
|
||||
long top = 1000000;
|
||||
for (long i = 0; i < top; i++) {
|
||||
lss.accept(hrs.applyAsLong(i));
|
||||
}
|
||||
System.out.println(lss);
|
||||
assertThat(lss.getAverage()).isCloseTo(top / 4d, Percentage.withPercentage(1d));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testHashRangeScaledLongsHalf() {
|
||||
// This presumes a sliding triangular distribution in the data
|
||||
HashRangeScaled hrs = new HashRangeScaled(0.5d);
|
||||
LongSummaryStatistics lss = new LongSummaryStatistics();
|
||||
|
||||
long top = 1000000;
|
||||
for (long i = 0; i < top; i++) {
|
||||
lss.accept(hrs.applyAsLong(i));
|
||||
}
|
||||
System.out.println(lss);
|
||||
assertThat(lss.getAverage()).isCloseTo(top / 8d, Percentage.withPercentage(1d));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testHashRangeScaledLongsDoubled() {
|
||||
// This presumes a sliding triangular distribution in the data
|
||||
HashRangeScaled hrs = new HashRangeScaled(2d);
|
||||
LongSummaryStatistics lss = new LongSummaryStatistics();
|
||||
|
||||
long top = 1000000;
|
||||
for (long i = 0; i < top; i++) {
|
||||
lss.accept(hrs.applyAsLong(i));
|
||||
}
|
||||
System.out.println(lss);
|
||||
assertThat(lss.getAverage()).isCloseTo(top / 2d, Percentage.withPercentage(1d));
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
|
Loading…
Reference in New Issue
Block a user