mirror of
https://github.com/nosqlbench/nosqlbench.git
synced 2024-11-22 00:38:05 -06:00
Merge branch 'main' into my-astra-schema-examples
This commit is contained in:
commit
14d26ee58f
7
.github/workflows/build.yml
vendored
7
.github/workflows/build.yml
vendored
@ -12,7 +12,7 @@ on:
|
||||
|
||||
jobs:
|
||||
build:
|
||||
runs-on: ubuntu-20.04
|
||||
runs-on: ubuntu-22.04
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
name: checkout nosqlbench
|
||||
@ -25,6 +25,9 @@ jobs:
|
||||
java-package: jdk
|
||||
java-version: '21'
|
||||
|
||||
- name: install fuse2
|
||||
run: sudo apt install libfuse2
|
||||
|
||||
- name: Cache Maven packages
|
||||
uses: actions/cache@v4
|
||||
with:
|
||||
@ -74,7 +77,7 @@ jobs:
|
||||
|
||||
builddocs:
|
||||
needs: build
|
||||
runs-on: ubuntu-20.04
|
||||
runs-on: ubuntu-22.04
|
||||
if: ${{ github.repository == 'nosqlbench/nosqlbench' && github.event_name == 'push' && github.ref_name == 'main' }}
|
||||
steps:
|
||||
|
||||
|
7
.github/workflows/preview.yml
vendored
7
.github/workflows/preview.yml
vendored
@ -15,7 +15,7 @@ on:
|
||||
|
||||
jobs:
|
||||
preview-build:
|
||||
runs-on: ubuntu-20.04
|
||||
runs-on: ubuntu-22.04
|
||||
outputs:
|
||||
preview_version: ${{ steps.versions.outputs.PREVIEW_VERSION }}
|
||||
docker_tags: ${{ steps.versions.outputs.DOCKER_TAGS }}
|
||||
@ -46,6 +46,9 @@ jobs:
|
||||
docker rmi $(docker image ls -aq)
|
||||
df -h
|
||||
|
||||
- name: install fuse2
|
||||
run: sudo apt install libfuse2
|
||||
|
||||
- name: Cache Maven packages
|
||||
uses: actions/cache@v4
|
||||
with:
|
||||
@ -204,7 +207,7 @@ jobs:
|
||||
|
||||
preview-docs:
|
||||
needs: preview-build
|
||||
runs-on: ubuntu-20.04
|
||||
runs-on: ubuntu-22.04
|
||||
steps:
|
||||
- name: import env vars
|
||||
run: |
|
||||
|
9
.github/workflows/release.yml
vendored
9
.github/workflows/release.yml
vendored
@ -15,7 +15,7 @@ on:
|
||||
|
||||
jobs:
|
||||
release-build:
|
||||
runs-on: ubuntu-20.04
|
||||
runs-on: ubuntu-22.04
|
||||
steps:
|
||||
|
||||
- name: checkout repo
|
||||
@ -43,6 +43,9 @@ jobs:
|
||||
docker rmi $(docker image ls -aq)
|
||||
df -h
|
||||
|
||||
- name: install fuse2
|
||||
run: sudo apt install libfuse2
|
||||
|
||||
- name: Cache Maven packages
|
||||
uses: actions/cache@v4
|
||||
with:
|
||||
@ -186,7 +189,7 @@ jobs:
|
||||
|
||||
# javadocs:
|
||||
# needs: release
|
||||
# runs-on: ubuntu-20.04
|
||||
# runs-on: ubuntu-22.04
|
||||
# steps:
|
||||
# - name: set git username
|
||||
# run: git config --global user.email "${{ secrets.NBDROID_EMAIL }}"
|
||||
@ -210,7 +213,7 @@ jobs:
|
||||
#
|
||||
# docs:
|
||||
# needs: release
|
||||
# runs-on: ubuntu-20.04
|
||||
# runs-on: ubuntu-22.04
|
||||
# steps:
|
||||
#
|
||||
# - name: set git username
|
||||
|
@ -2,7 +2,7 @@
|
||||
|
||||
## requirements
|
||||
|
||||
You need Java 17 or newer to build NoSQLBench.
|
||||
You need Java 21 or newer to build NoSQLBench.
|
||||
|
||||
# Building Locally
|
||||
|
||||
|
@ -26,7 +26,7 @@
|
||||
|
||||
<properties>
|
||||
|
||||
<revision>5.21.1-SNAPSHOT</revision>
|
||||
<revision>5.21.2-SNAPSHOT</revision>
|
||||
<!-- Set this level to override the logging level for tests during build -->
|
||||
<project.testlevel>INFO</project.testlevel>
|
||||
<!-- Set this level to override the logging level for tests logging configuration during build -->
|
||||
|
@ -66,8 +66,11 @@ public class DataApiDeleteOneOpDispenser extends DataApiOpDispenser {
|
||||
}
|
||||
|
||||
private float[] getVectorFromOp(ParsedOp op, long l) {
|
||||
if (op.isDefined("vector")) {
|
||||
return getVectorValues(op.get("vector", l));
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public DataApiBaseOp getOp(long value) {
|
||||
|
@ -64,28 +64,10 @@ public abstract class DataApiOpDispenser extends BaseOpDispenser<DataApiBaseOp,
|
||||
List<Filter> orFilterList = new ArrayList<>();
|
||||
for (Map<String,Object> filterFields : filters) {
|
||||
switch ((String)filterFields.get("conjunction")) {
|
||||
case "and" -> {
|
||||
switch (filterFields.get("operator").toString()) {
|
||||
case "lt" ->
|
||||
andFilterList.add(Filters.lt(filterFields.get("field").toString(), (long) filterFields.get("value")));
|
||||
case "gt" ->
|
||||
andFilterList.add(Filters.gt(filterFields.get("field").toString(), (long) filterFields.get("value")));
|
||||
case "eq" ->
|
||||
andFilterList.add(Filters.eq(filterFields.get("field").toString(), filterFields.get("value")));
|
||||
default -> logger.error(() -> "Operation " + filterFields.get("operator") + " not supported");
|
||||
}
|
||||
}
|
||||
case "or" -> {
|
||||
switch (filterFields.get("operator").toString()) {
|
||||
case "lt" ->
|
||||
orFilterList.add(Filters.lt(filterFields.get("field").toString(), (long) filterFields.get("value")));
|
||||
case "gt" ->
|
||||
orFilterList.add(Filters.gt(filterFields.get("field").toString(), (long) filterFields.get("value")));
|
||||
case "eq" ->
|
||||
orFilterList.add(Filters.eq(filterFields.get("field").toString(), filterFields.get("value")));
|
||||
default -> logger.error(() -> "Operation " + filterFields.get("operator") + " not supported");
|
||||
}
|
||||
}
|
||||
case "and" ->
|
||||
addOperatorFilter(andFilterList, filterFields.get("operator").toString(), filterFields.get("field").toString(), filterFields.get("value"));
|
||||
case "or" ->
|
||||
addOperatorFilter(orFilterList, filterFields.get("operator").toString(), filterFields.get("field").toString(), filterFields.get("value"));
|
||||
default -> logger.error(() -> "Conjunction " + filterFields.get("conjunction") + " not supported");
|
||||
}
|
||||
}
|
||||
@ -97,6 +79,38 @@ public abstract class DataApiOpDispenser extends BaseOpDispenser<DataApiBaseOp,
|
||||
return filter;
|
||||
}
|
||||
|
||||
protected void addOperatorFilter(List<Filter> filtersList, String operator, String fieldName, Object fieldValue) {
|
||||
switch (operator) {
|
||||
case "all" ->
|
||||
filtersList.add(Filters.all(fieldName, fieldValue));
|
||||
case "eq" ->
|
||||
filtersList.add(Filters.eq(fieldName, fieldValue));
|
||||
case "exists" -> {
|
||||
if (fieldValue != null) {
|
||||
logger.warn(() -> "'exists' operator does not support value field");
|
||||
}
|
||||
filtersList.add(Filters.exists(fieldName));
|
||||
}
|
||||
case "gt" ->
|
||||
filtersList.add(Filters.gt(fieldName, (long) fieldValue));
|
||||
case "gte" ->
|
||||
filtersList.add(Filters.gte(fieldName, (long) fieldValue));
|
||||
case "hasSize" ->
|
||||
filtersList.add(Filters.hasSize(fieldName, (int) fieldValue));
|
||||
case "in" ->
|
||||
filtersList.add(Filters.in(fieldName, fieldValue));
|
||||
case "lt" ->
|
||||
filtersList.add(Filters.lt(fieldName, (long) fieldValue));
|
||||
case "lte" ->
|
||||
filtersList.add(Filters.lte(fieldName, (long) fieldValue));
|
||||
case "ne" ->
|
||||
filtersList.add(Filters.ne(fieldName, fieldValue));
|
||||
case "nin" ->
|
||||
filtersList.add(Filters.nin(fieldName, fieldValue));
|
||||
default -> logger.error(() -> "Operation '" + operator + "' not supported");
|
||||
}
|
||||
}
|
||||
|
||||
protected Update getUpdates(ParsedOp op, long l) {
|
||||
Update update = new Update();
|
||||
Optional<LongFunction<Map>> updatesFunction = op.getAsOptionalFunction("updates", Map.class);
|
||||
|
@ -18,13 +18,20 @@ package io.nosqlbench.adapter.neo4j;
|
||||
|
||||
import io.nosqlbench.nb.api.config.standard.*;
|
||||
|
||||
import io.nosqlbench.nb.api.errors.BasicError;
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
|
||||
import org.neo4j.driver.AuthTokens;
|
||||
import org.neo4j.driver.Driver;
|
||||
import org.neo4j.driver.GraphDatabase;
|
||||
import org.neo4j.driver.*;
|
||||
import org.neo4j.driver.async.AsyncSession;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
import java.nio.file.Paths;
|
||||
import java.util.Optional;
|
||||
|
||||
public class Neo4JSpace implements AutoCloseable {
|
||||
@ -32,6 +39,7 @@ public class Neo4JSpace implements AutoCloseable {
|
||||
private final static Logger logger = LogManager.getLogger(Neo4JSpace.class);
|
||||
private final String space;
|
||||
private Driver driver;
|
||||
private SessionConfig sessionConfig;
|
||||
|
||||
public Neo4JSpace(String space, NBConfiguration cfg) {
|
||||
this.space = space;
|
||||
@ -40,50 +48,74 @@ public class Neo4JSpace implements AutoCloseable {
|
||||
}
|
||||
|
||||
private Driver initializeDriver(NBConfiguration cfg) {
|
||||
SessionConfig.Builder builder = SessionConfig.builder();
|
||||
cfg.getOptional("database").ifPresent(builder::withDatabase);
|
||||
this.sessionConfig = builder.build();
|
||||
|
||||
String dbURI = cfg.get("db_uri");
|
||||
|
||||
Optional<String> usernameOpt = cfg.getOptional("username");
|
||||
Optional<String> userfileOpt = cfg.getOptional("userfile");
|
||||
Optional<String> passwordOpt = cfg.getOptional("password");
|
||||
String username;
|
||||
String password;
|
||||
// user has supplied both username and password
|
||||
if (usernameOpt.isPresent() && passwordOpt.isPresent()) {
|
||||
Optional<String> passfileOpt = cfg.getOptional("passfile");
|
||||
|
||||
String username = null;
|
||||
if (usernameOpt.isPresent()) {
|
||||
username = usernameOpt.get();
|
||||
} else if (userfileOpt.isPresent()) {
|
||||
Path path = Paths.get(userfileOpt.get());
|
||||
try {
|
||||
username = Files.readAllLines(path).get(0);
|
||||
} catch (IOException e) {
|
||||
String error = "Error while reading username from file:" + path;
|
||||
logger.error(error, e);
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
}
|
||||
|
||||
String password = null;
|
||||
if (username != null) {
|
||||
|
||||
if (passwordOpt.isPresent()) {
|
||||
password = passwordOpt.get();
|
||||
logger.info(this.space + ": Creating new Neo4J driver with [" +
|
||||
"dbURI = " + dbURI +
|
||||
", username = " + username +
|
||||
", password = " + Neo4JAdapterUtils.maskDigits(password) +
|
||||
"]"
|
||||
);
|
||||
} else if (passfileOpt.isPresent()) {
|
||||
Path path = Paths.get(passfileOpt.get());
|
||||
try {
|
||||
password = Files.readAllLines(path).get(0);
|
||||
} catch (IOException e) {
|
||||
String error = "Error while reading password from file:" + path;
|
||||
logger.error(error, e);
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
} else {
|
||||
String error = "username is present, but neither password nor passfile are defined.";
|
||||
logger.error(error);
|
||||
throw new RuntimeException(error);
|
||||
}
|
||||
}
|
||||
|
||||
if ((username == null) != (password == null)) {
|
||||
throw new BasicError("You must provide both username and password, or neither, with either " +
|
||||
"username|userfile and password|passfile options");
|
||||
}
|
||||
if (username != null) {
|
||||
return GraphDatabase.driver(dbURI, AuthTokens.basic(username, password));
|
||||
} else {
|
||||
|
||||
}
|
||||
// user has only supplied username
|
||||
else if (usernameOpt.isPresent()) {
|
||||
String error = "username is present, but password is not defined.";
|
||||
logger.error(error);
|
||||
throw new RuntimeException(error);
|
||||
}
|
||||
// user has only supplied password
|
||||
else if (passwordOpt.isPresent()) {
|
||||
String error = "password is present, but username is not defined.";
|
||||
logger.error(error);
|
||||
throw new RuntimeException(error);
|
||||
}
|
||||
// user has supplied neither
|
||||
else {
|
||||
logger.info(this.space + ": Creating new Neo4J driver with [" +
|
||||
"dbURI = " + dbURI +
|
||||
"]"
|
||||
);
|
||||
|
||||
// user has supplied both username and password
|
||||
return GraphDatabase.driver(dbURI);
|
||||
}
|
||||
}
|
||||
|
||||
public static NBConfigModel getConfigModel() {
|
||||
return ConfigModel.of(Neo4JSpace.class)
|
||||
.add(Param.required("db_uri", String.class))
|
||||
.add(Param.optional("username", String.class))
|
||||
.add(Param.optional("password", String.class))
|
||||
.add(Param.optional("database", String.class))
|
||||
.add(Param.optional("userfile", String.class))
|
||||
.add(Param.optional("passfile", String.class))
|
||||
.asReadOnly();
|
||||
}
|
||||
|
||||
@ -91,6 +123,14 @@ public class Neo4JSpace implements AutoCloseable {
|
||||
return driver;
|
||||
}
|
||||
|
||||
public AsyncSession newAsyncSession() {
|
||||
return driver.session(AsyncSession.class, sessionConfig);
|
||||
}
|
||||
|
||||
public Session newSession() {
|
||||
return driver.session(sessionConfig);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void close() throws Exception {
|
||||
if (driver != null) {
|
||||
|
@ -35,7 +35,7 @@ public class Neo4JAsyncAutoCommitOpDispenser extends Neo4JBaseOpDispenser {
|
||||
@Override
|
||||
public LongFunction<Neo4JAsyncAutoCommitOp> createOpFunc() {
|
||||
return l -> new Neo4JAsyncAutoCommitOp(
|
||||
spaceFunc.apply(l).getDriver().session(AsyncSession.class),
|
||||
asyncSessionFunc.apply(l),
|
||||
queryFunc.apply(l)
|
||||
);
|
||||
}
|
||||
|
@ -34,7 +34,7 @@ public class Neo4JAsyncReadTxnOpDispenser extends Neo4JBaseOpDispenser {
|
||||
@Override
|
||||
public LongFunction<Neo4JAsyncReadTxnOp> createOpFunc() {
|
||||
return l -> new Neo4JAsyncReadTxnOp(
|
||||
spaceFunc.apply(l).getDriver().session(AsyncSession.class),
|
||||
asyncSessionFunc.apply(l),
|
||||
queryFunc.apply(l)
|
||||
);
|
||||
}
|
||||
|
@ -35,7 +35,7 @@ public class Neo4JAsyncWriteTxnOpDispenser extends Neo4JBaseOpDispenser {
|
||||
@Override
|
||||
public LongFunction<Neo4JAsyncWriteTxnOp> createOpFunc() {
|
||||
return l -> new Neo4JAsyncWriteTxnOp(
|
||||
spaceFunc.apply(l).getDriver().session(AsyncSession.class),
|
||||
asyncSessionFunc.apply(l),
|
||||
queryFunc.apply(l)
|
||||
);
|
||||
}
|
||||
|
@ -23,6 +23,9 @@ import io.nosqlbench.adapters.api.activityimpl.BaseOpDispenser;
|
||||
import io.nosqlbench.adapters.api.templating.ParsedOp;
|
||||
|
||||
import org.neo4j.driver.Query;
|
||||
import org.neo4j.driver.Session;
|
||||
import org.neo4j.driver.SessionConfig;
|
||||
import org.neo4j.driver.async.AsyncSession;
|
||||
|
||||
import java.util.Collections;
|
||||
import java.util.function.LongFunction;
|
||||
@ -35,6 +38,8 @@ public abstract class Neo4JBaseOpDispenser extends BaseOpDispenser<Neo4JBaseOp,
|
||||
protected final LongFunction<Query> queryFunc;
|
||||
protected final LongFunction<Map> paramFunc;
|
||||
protected final LongFunction<Neo4JBaseOp> opFunc;
|
||||
protected final LongFunction<Session> sessionFunc;
|
||||
protected final LongFunction<AsyncSession> asyncSessionFunc;
|
||||
|
||||
public Neo4JBaseOpDispenser(Neo4JDriverAdapter adapter, ParsedOp op, LongFunction<Neo4JSpace> spaceFunc, String requiredTemplateKey) {
|
||||
super(adapter, op);
|
||||
@ -43,6 +48,8 @@ public abstract class Neo4JBaseOpDispenser extends BaseOpDispenser<Neo4JBaseOp,
|
||||
this.paramFunc = createParamFunc(op);
|
||||
this.queryFunc = createQueryFunc();
|
||||
this.opFunc = (LongFunction<Neo4JBaseOp>) createOpFunc();
|
||||
this.sessionFunc = getSessionFunction(spaceFunc,op);
|
||||
this.asyncSessionFunc = getAsyncSessionFunction(spaceFunc,op);
|
||||
}
|
||||
|
||||
private LongFunction<Map> createParamFunc(ParsedOp op) {
|
||||
@ -50,6 +57,24 @@ public abstract class Neo4JBaseOpDispenser extends BaseOpDispenser<Neo4JBaseOp,
|
||||
.orElse(l -> Collections.emptyMap());
|
||||
}
|
||||
|
||||
private LongFunction<Session> getSessionFunction(LongFunction<Neo4JSpace> spaceFunc, ParsedOp op) {
|
||||
LongFunction<SessionConfig.Builder> scbF = (long l) -> SessionConfig.builder();
|
||||
scbF = op.enhanceFuncOptionally(scbF,"database",String.class,SessionConfig.Builder::withDatabase);
|
||||
LongFunction<SessionConfig.Builder> finalScbF = scbF;
|
||||
LongFunction<SessionConfig> scF = (long l) -> finalScbF.apply(l).build();
|
||||
return (long l) -> spaceFunc.apply(l).getDriver().session(Session.class,scF.apply(l));
|
||||
|
||||
}
|
||||
|
||||
private LongFunction<AsyncSession> getAsyncSessionFunction(LongFunction<Neo4JSpace> spaceFunc, ParsedOp op) {
|
||||
LongFunction<SessionConfig.Builder> scbF = (long l) -> SessionConfig.builder();
|
||||
scbF = op.enhanceFuncOptionally(scbF,"database",String.class,SessionConfig.Builder::withDatabase);
|
||||
LongFunction<SessionConfig.Builder> finalScbF = scbF;
|
||||
LongFunction<SessionConfig> scF = (long l) -> finalScbF.apply(l).build();
|
||||
return (long l) -> spaceFunc.apply(l).getDriver().session(AsyncSession.class,scF.apply(l));
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* Reference:
|
||||
* - https://neo4j.com/docs/api/java-driver/current/org.neo4j.driver/org/neo4j/driver/Query.html#withParameters(java.util.Map)
|
||||
|
@ -35,7 +35,7 @@ public class Neo4JSyncAutoCommitOpDispenser extends Neo4JBaseOpDispenser {
|
||||
@Override
|
||||
public LongFunction<Neo4JSyncAutoCommitOp> createOpFunc() {
|
||||
return l -> new Neo4JSyncAutoCommitOp(
|
||||
spaceFunc.apply(l).getDriver().session(Session.class),
|
||||
sessionFunc.apply(l),
|
||||
queryFunc.apply(l)
|
||||
);
|
||||
}
|
||||
|
@ -21,8 +21,6 @@ import io.nosqlbench.adapter.neo4j.ops.Neo4JSyncReadTxnOp;
|
||||
import io.nosqlbench.adapter.neo4j.Neo4JSpace;
|
||||
import io.nosqlbench.adapters.api.templating.ParsedOp;
|
||||
|
||||
import org.neo4j.driver.Session;
|
||||
|
||||
import java.util.function.LongFunction;
|
||||
|
||||
|
||||
@ -35,7 +33,7 @@ public class Neo4JSyncReadTxnOpDispenser extends Neo4JBaseOpDispenser {
|
||||
@Override
|
||||
public LongFunction<Neo4JSyncReadTxnOp> createOpFunc() {
|
||||
return l -> new Neo4JSyncReadTxnOp(
|
||||
spaceFunc.apply(l).getDriver().session(Session.class),
|
||||
sessionFunc.apply(l),
|
||||
queryFunc.apply(l)
|
||||
);
|
||||
}
|
||||
|
@ -28,14 +28,19 @@ import java.util.function.LongFunction;
|
||||
|
||||
public class Neo4JSyncWriteTxnOpDispenser extends Neo4JBaseOpDispenser {
|
||||
|
||||
public Neo4JSyncWriteTxnOpDispenser(Neo4JDriverAdapter adapter, ParsedOp op, LongFunction<Neo4JSpace> spaceFunc, String requiredTemplateKey) {
|
||||
public Neo4JSyncWriteTxnOpDispenser(
|
||||
Neo4JDriverAdapter adapter,
|
||||
ParsedOp op,
|
||||
LongFunction<Neo4JSpace> spaceFunc,
|
||||
String requiredTemplateKey
|
||||
) {
|
||||
super(adapter, op, spaceFunc, requiredTemplateKey);
|
||||
}
|
||||
|
||||
@Override
|
||||
public LongFunction<Neo4JSyncWriteTxnOp> createOpFunc() {
|
||||
return l -> new Neo4JSyncWriteTxnOp(
|
||||
spaceFunc.apply(l).getDriver().session(Session.class),
|
||||
sessionFunc.apply(l),
|
||||
queryFunc.apply(l)
|
||||
);
|
||||
}
|
||||
|
@ -0,0 +1,135 @@
|
||||
min_version: 5.21.1
|
||||
description: |
|
||||
Vector workload for Neo4J
|
||||
|
||||
Template Variables:
|
||||
TEMPLATE(dataset)
|
||||
TEMPLATE(node_label,Node)
|
||||
TEMPLATE(k,100)
|
||||
TEMPLATE(batch_size)
|
||||
TEMPLATE(delete_batch_size,1000)
|
||||
|
||||
params:
|
||||
driver: neo4j
|
||||
instrument: true
|
||||
labels:
|
||||
target: TEMPLATE(targetname,neo4j)
|
||||
database: TEMPLATE(database,neo4j)
|
||||
|
||||
scenarios:
|
||||
default:
|
||||
# Remove any existing data
|
||||
drop: >-
|
||||
run tags='block:drop' threads===1 cycles===UNDEF
|
||||
labels='target:TEMPLATE(targetname,neo4j)'
|
||||
errors=count
|
||||
# Install the schema required to run the test
|
||||
schema: >-
|
||||
run tags='block:schema' threads===1 cycles===UNDEF
|
||||
labels='target:TEMPLATE(targetname,neo4j)'
|
||||
# Load training data, measure how long it takes to load
|
||||
rampup: >-
|
||||
run tags='block:rampup_batch' threads=TEMPLATE(rampup_threads,auto)
|
||||
cycles===TEMPLATE(rampup_cycles,TEMPLATE(trainsize))
|
||||
errors=count,warn
|
||||
labels='target:TEMPLATE(targetname,neo4j)'
|
||||
# Measure how the system responds to queries under a read only workload
|
||||
search_and_verify: >-
|
||||
run alias=search_and_verify tags='block:search_and_verify'
|
||||
threads=TEMPLATE(search_threads,auto) cycles===TEMPLATE(search_cycles,TEMPLATE(testsize))
|
||||
errors=count,warn
|
||||
labels='target:TEMPLATE(targetname,neo4j)'
|
||||
verify_recall: >-
|
||||
run alias=verify_recall tags='block:search_and_verify'
|
||||
threads=TEMPLATE(search_threads,auto) cycles===TEMPLATE(search_cycles,TEMPLATE(testsize))
|
||||
errors=count,warn
|
||||
labels='target:TEMPLATE(targetname,neo4j)'
|
||||
|
||||
|
||||
bindings:
|
||||
id: ToString()
|
||||
id_batch: Mul(TEMPLATE(batch_size)L); ListSizedStepped(TEMPLATE(batch_size),long->ToString());
|
||||
train_vector: HdfFileToFloatList("TEMPLATE(dataset)", "/train");
|
||||
train_vector_batch: Mul(TEMPLATE(batch_size)L); ListSizedStepped(TEMPLATE(batch_size),HdfFileToFloatList("TEMPLATE(dataset)", "/train"));
|
||||
test_vector: HdfFileToFloatList("TEMPLATE(dataset)", "/test");
|
||||
relevant_indices: HdfFileToIntArray("TEMPLATE(dataset)", "/neighbors")
|
||||
|
||||
blocks:
|
||||
# TODO: Node deletion times out; attempt this in future: CREATE OR REPLACE DATABASE neo4j
|
||||
drop:
|
||||
ops:
|
||||
# Reference: https://support.neo4j.com/s/article/360059882854-Deleting-large-numbers-of-nodes#h_01H95CXNJ8TN4126T3Y01BRWKS
|
||||
delete_nodes:
|
||||
sync_autocommit: |
|
||||
MATCH (n)
|
||||
CALL { WITH n
|
||||
DETACH DELETE n
|
||||
} IN TRANSACTIONS OF $delete_batch_size ROWS;
|
||||
query_params:
|
||||
delete_batch_size: TEMPLATE(delete_batch_size,1000)
|
||||
drop_index:
|
||||
sync_autocommit: DROP INDEX $index_name IF EXISTS
|
||||
query_params:
|
||||
index_name: vector_index
|
||||
|
||||
schema:
|
||||
ops:
|
||||
create_vector_index:
|
||||
sync_autocommit: |
|
||||
CREATE VECTOR INDEX $index_name IF NOT EXISTS FOR (n:TEMPLATE(node_label,Node))
|
||||
ON (n.embedding) OPTIONS
|
||||
{indexConfig: {`vector.dimensions`: $dimensions, `vector.similarity_function`: $similarity_function}}
|
||||
query_params:
|
||||
index_name: vector_index
|
||||
dimensions: TEMPLATE(dimensions)
|
||||
similarity_function: TEMPLATE(similarity_function,cosine)
|
||||
|
||||
rampup:
|
||||
ops:
|
||||
insert_node:
|
||||
async_write_transaction: |
|
||||
CREATE (v:TEMPLATE(node_label,Node) {id: $id, embedding: $vector})
|
||||
query_params:
|
||||
id: '{id}'
|
||||
vector: '{train_vector}'
|
||||
|
||||
rampup_batch:
|
||||
ops:
|
||||
# Reference: https://community.neo4j.com/t/unwind-multiple-arrays-to-set-property/59908/5
|
||||
insert_nodes:
|
||||
async_write_transaction: |
|
||||
WITH $id_list as ids, $vector_list as vectors
|
||||
UNWIND RANGE(0, size(ids) - 1) as idx
|
||||
CREATE (v:TEMPLATE(node_label,Node) {id: ids[idx], embedding: vectors[idx]})
|
||||
query_params:
|
||||
id_list: '{id_batch}'
|
||||
vector_list: '{train_vector_batch}'
|
||||
|
||||
search_and_verify:
|
||||
ops:
|
||||
search:
|
||||
async_read_transaction: |
|
||||
WITH $query_vector AS queryVector
|
||||
CALL db.index.vector.queryNodes($index_name, $k, queryVector)
|
||||
YIELD node
|
||||
RETURN node.id
|
||||
query_params:
|
||||
query_vector: '{test_vector}'
|
||||
index_name: vector_index
|
||||
k: TEMPLATE(k,100)
|
||||
verifier-init: |
|
||||
relevancy = new io.nosqlbench.nb.api.engine.metrics.wrappers.RelevancyMeasures(_parsed_op);
|
||||
for (int k in List.of(100)) {
|
||||
relevancy.addFunction(io.nosqlbench.engine.extensions.computefunctions.RelevancyFunctions.recall("recall",k));
|
||||
relevancy.addFunction(io.nosqlbench.engine.extensions.computefunctions.RelevancyFunctions.precision("precision",k));
|
||||
relevancy.addFunction(io.nosqlbench.engine.extensions.computefunctions.RelevancyFunctions.F1("F1",k));
|
||||
relevancy.addFunction(io.nosqlbench.engine.extensions.computefunctions.RelevancyFunctions.reciprocal_rank("RR",k));
|
||||
relevancy.addFunction(io.nosqlbench.engine.extensions.computefunctions.RelevancyFunctions.average_precision("AP",k));
|
||||
}
|
||||
verifier: |
|
||||
// result is a Record[]
|
||||
values = io.nosqlbench.adapter.neo4j.Neo4JAdapterUtils.getFieldForAllRecords(result, "node.id")
|
||||
ann = values.collect { it.toString().toInteger() }.toArray(new Integer[values.size()])
|
||||
knn = {relevant_indices}
|
||||
relevancy.accept(knn, ann);
|
||||
return true;
|
@ -72,7 +72,7 @@ public class QdrantSpace implements AutoCloseable {
|
||||
boolean useTls = cfg.getOptional("use_tls").map(Boolean::parseBoolean).orElse(true);
|
||||
|
||||
var builder = QdrantGrpcClient.newBuilder(uri, grpcPort, useTls);
|
||||
var Optional<requiredToken> = cfg.getOptional("token_file")
|
||||
String requiredToken = cfg.getOptional("token_file")
|
||||
.map(Paths::get)
|
||||
.map(
|
||||
tokenFilePath -> {
|
||||
|
@ -53,7 +53,7 @@ public class FieldDestructuringMapper implements Function<Map<String, Object>, M
|
||||
return stringObjectMap;
|
||||
}
|
||||
} else {
|
||||
throw new RuntimeException("During op mapping, can't parse something that is not a CharSequence: '" + fieldname + "' (type is " + o.getClass().getCanonicalName() + ")");
|
||||
throw new RuntimeException("During op mapping, can't parse something that is not a CharSequence: '" + fieldname + "' (type is " + o.getClass().getCanonicalName() + ") (value is:" + o.toString());
|
||||
}
|
||||
} else {
|
||||
return stringObjectMap;
|
||||
|
@ -99,6 +99,31 @@ public class NBCreators {
|
||||
}
|
||||
|
||||
|
||||
public WindowSummaryGauge windowSummaryGauge(
|
||||
String name,
|
||||
int window,
|
||||
List<String> statspecs,
|
||||
MetricCategory category,
|
||||
String description
|
||||
) {
|
||||
List<WindowSummaryGauge.Stat> stats =
|
||||
statspecs.stream().map(WindowSummaryGauge.Stat::valueOf).toList();
|
||||
DoubleSummaryStatistics reservoir = new DoubleSummaryStatistics();
|
||||
WindowSummaryGauge anyGauge = null;
|
||||
for (WindowSummaryGauge.Stat stat : stats) {
|
||||
anyGauge = new WindowSummaryGauge(
|
||||
window,
|
||||
base.getLabels().and(NBLabels.forKV("name", name+"_w"+window, "stat", stat)),
|
||||
stat,
|
||||
description,
|
||||
category
|
||||
);
|
||||
base.addComponentMetric(anyGauge, category, description);
|
||||
}
|
||||
return anyGauge;
|
||||
|
||||
}
|
||||
|
||||
public DoubleSummaryGauge summaryGauge(String name, List<String> statspecs, MetricCategory category, String description) {
|
||||
List<DoubleSummaryGauge.Stat> stats = statspecs.stream().map(DoubleSummaryGauge.Stat::valueOf).toList();
|
||||
DoubleSummaryStatistics reservoir = new DoubleSummaryStatistics();
|
||||
@ -113,6 +138,7 @@ public class NBCreators {
|
||||
public NBMetricHistogram histogram(String metricFamilyName, MetricCategory category, String description) {
|
||||
return histogram(metricFamilyName,4, category, description);
|
||||
}
|
||||
|
||||
public NBMetricHistogram histogram(String metricFamilyName, int hdrdigits, MetricCategory category, String description) {
|
||||
NBLabels labels = base.getLabels().and("name", metricFamilyName);
|
||||
NBMetricHistogram histogram = new NBMetricHistogram(labels, new DeltaHdrHistogramReservoir(labels, hdrdigits), description, category);
|
||||
@ -206,34 +232,42 @@ public class NBCreators {
|
||||
public Log4jReporterBuilder(NBComponent component) {
|
||||
this.component = component;
|
||||
}
|
||||
|
||||
public Log4jReporterBuilder oneLastTime(final boolean oneLastTime) {
|
||||
this.oneLastTime = oneLastTime;
|
||||
return this;
|
||||
}
|
||||
|
||||
public Log4jReporterBuilder interval(final int interval) {
|
||||
this.millis = interval;
|
||||
return this;
|
||||
}
|
||||
|
||||
public Log4jReporterBuilder outputTo(final Logger logger) {
|
||||
this.logger = logger;
|
||||
return this;
|
||||
}
|
||||
|
||||
public Log4jReporterBuilder markWith(final Marker marker) {
|
||||
this.marker = marker;
|
||||
return this;
|
||||
}
|
||||
|
||||
public Log4jReporterBuilder labels(final NBLabels labels) {
|
||||
this.labels = labels;
|
||||
return this;
|
||||
}
|
||||
|
||||
public Log4jReporterBuilder filter(final MetricFilter filter) {
|
||||
this.filter = filter;
|
||||
return this;
|
||||
}
|
||||
|
||||
public Log4jReporterBuilder withLoggingLevel(final Log4JMetricsReporter.LoggingLevel loggingLevel) {
|
||||
this.loggingLevel = loggingLevel;
|
||||
return this;
|
||||
}
|
||||
|
||||
public Log4JMetricsReporter build() {
|
||||
final LoggerProxy loggerProxy = switch (this.loggingLevel) {
|
||||
case TRACE -> new TraceLoggerProxy(this.logger);
|
||||
@ -245,6 +279,7 @@ public class NBCreators {
|
||||
return new Log4JMetricsReporter(this.component, loggerProxy, this.marker, this.filter, this.labels, this.millis, this.oneLastTime);
|
||||
}
|
||||
}
|
||||
|
||||
/* private class to allow logger configuration */
|
||||
public abstract static class LoggerProxy {
|
||||
protected final Logger logger;
|
||||
@ -356,22 +391,27 @@ public class NBCreators {
|
||||
this.component = component;
|
||||
this.output = output;
|
||||
}
|
||||
|
||||
public ConsoleReporterBuilder labels(NBLabels labels) {
|
||||
this.labels = labels;
|
||||
return this;
|
||||
}
|
||||
|
||||
public ConsoleReporterBuilder interval(int interval) {
|
||||
this.interval = interval;
|
||||
return this;
|
||||
}
|
||||
|
||||
public ConsoleReporterBuilder oneLastTime(boolean oneLastTime) {
|
||||
this.oneLastTime = oneLastTime;
|
||||
return this;
|
||||
}
|
||||
|
||||
public ConsoleReporterBuilder disabledMetricAttributes(Set<MetricAttribute> disabledMetricAttributes) {
|
||||
this.disabledMetricAttributes = disabledMetricAttributes;
|
||||
return this;
|
||||
}
|
||||
|
||||
public ConsoleReporter build() {
|
||||
return new ConsoleReporter(component, labels, interval, oneLastTime, output, disabledMetricAttributes);
|
||||
}
|
||||
@ -387,10 +427,12 @@ public class NBCreators {
|
||||
this.component = component;
|
||||
this.filename = filename;
|
||||
}
|
||||
|
||||
public CsvOutputWriterBuilder headers(String... headers) {
|
||||
this.headers = headers;
|
||||
return this;
|
||||
}
|
||||
|
||||
public CsvOutputPluginWriter build() {
|
||||
return new CsvOutputPluginWriter(component, filename, headers);
|
||||
}
|
||||
@ -406,26 +448,32 @@ public class NBCreators {
|
||||
public CsvReporterBuilder(NBComponent component) {
|
||||
this.component = component;
|
||||
}
|
||||
|
||||
public CsvReporterBuilder labels(NBLabels labels) {
|
||||
this.labels = labels;
|
||||
return this;
|
||||
}
|
||||
|
||||
public CsvReporterBuilder path(Path reportTo) {
|
||||
this.reportTo = reportTo;
|
||||
return this;
|
||||
}
|
||||
|
||||
public CsvReporterBuilder path(String reportTo) {
|
||||
this.reportTo = Path.of(reportTo);
|
||||
return this;
|
||||
}
|
||||
|
||||
public CsvReporterBuilder interval(int interval) {
|
||||
this.interval = interval;
|
||||
return this;
|
||||
}
|
||||
|
||||
public CsvReporterBuilder filter(MetricInstanceFilter filter) {
|
||||
this.filter = filter;
|
||||
return this;
|
||||
}
|
||||
|
||||
public CsvReporter build() {
|
||||
return new CsvReporter(component, reportTo, interval, filter, labels);
|
||||
}
|
||||
|
@ -0,0 +1,97 @@
|
||||
/*
|
||||
* Copyright (c) 2023 nosqlbench
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package io.nosqlbench.nb.api.engine.metrics;
|
||||
|
||||
import io.nosqlbench.nb.api.engine.metrics.instruments.MetricCategory;
|
||||
import io.nosqlbench.nb.api.engine.metrics.instruments.NBMetricGauge;
|
||||
import io.nosqlbench.nb.api.labels.NBLabels;
|
||||
import io.nosqlbench.nb.api.stats.StatBucket;
|
||||
|
||||
import java.util.DoubleSummaryStatistics;
|
||||
import java.util.function.DoubleConsumer;
|
||||
|
||||
|
||||
/**
|
||||
* Create a discrete stat reservoir as a gauge.
|
||||
*/
|
||||
public class WindowSummaryGauge implements NBMetricGauge, DoubleConsumer {
|
||||
private final NBLabels labels;
|
||||
private final Stat stat;
|
||||
private final StatBucket stats;
|
||||
private final String description;
|
||||
private final MetricCategory[] categories;
|
||||
private final int window;
|
||||
|
||||
@Override
|
||||
public String typeName() {
|
||||
return "gauge";
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getDescription() {
|
||||
return this.description;
|
||||
}
|
||||
|
||||
@Override
|
||||
public MetricCategory[] getCategories() {
|
||||
return this.categories;
|
||||
}
|
||||
|
||||
public enum Stat {
|
||||
Min,
|
||||
Max,
|
||||
Average,
|
||||
Count,
|
||||
Sum
|
||||
}
|
||||
|
||||
|
||||
public WindowSummaryGauge(int window, NBLabels labels, Stat stat, String description,
|
||||
MetricCategory... categories) {
|
||||
this.labels = labels;
|
||||
this.stat = stat;
|
||||
this.description = description;
|
||||
this.categories = categories;
|
||||
this.window = window;
|
||||
this.stats = new StatBucket(window);
|
||||
}
|
||||
|
||||
public synchronized void accept(double value) {
|
||||
stats.apply(value);
|
||||
}
|
||||
|
||||
@Override
|
||||
public synchronized Double getValue() {
|
||||
return switch(stat) {
|
||||
case Min -> stats.getMin();
|
||||
case Max -> stats.getMax();
|
||||
case Average -> stats.getAverage();
|
||||
case Count -> (double) stats.getCount();
|
||||
case Sum -> stats.getSum();
|
||||
};
|
||||
}
|
||||
|
||||
@Override
|
||||
public NBLabels getLabels() {
|
||||
return labels;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return this.labels.toString()+":"+this.stats.toString();
|
||||
}
|
||||
}
|
@ -0,0 +1,94 @@
|
||||
/*
|
||||
* Copyright (c) 2023 nosqlbench
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package io.nosqlbench.nb.api.engine.metrics.wrappers;
|
||||
|
||||
import io.nosqlbench.nb.api.components.core.NBComponent;
|
||||
import io.nosqlbench.nb.api.engine.metrics.WindowSummaryGauge;
|
||||
import io.nosqlbench.nb.api.engine.metrics.instruments.MetricCategory;
|
||||
import io.nosqlbench.nb.api.labels.NBLabeledElement;
|
||||
import io.nosqlbench.nb.api.labels.NBLabels;
|
||||
import io.nosqlbench.nb.api.stats.StatBucket;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
public class WindowedRelevancyMeasures implements NBLabeledElement {
|
||||
|
||||
private final NBComponent parent;
|
||||
private final NBLabels labels;
|
||||
private final List<RelevancyFunction> functions = new ArrayList<>();
|
||||
private final List<WindowSummaryGauge> gauges = new ArrayList<>();
|
||||
private final int window;
|
||||
private int offset = 0;
|
||||
|
||||
public WindowedRelevancyMeasures(NBComponent parent, int window) {
|
||||
this(parent, NBLabels.forKV(),window);
|
||||
}
|
||||
|
||||
public WindowedRelevancyMeasures(NBComponent parent, NBLabels labels, int window) {
|
||||
this.parent = parent;
|
||||
this.labels = labels;
|
||||
this.window = window;
|
||||
}
|
||||
|
||||
public WindowedRelevancyMeasures(NBComponent parent, Map<String, String> labels, int window) {
|
||||
this(parent, NBLabels.forMap(labels), window);
|
||||
}
|
||||
|
||||
@Override
|
||||
public NBLabels getLabels() {
|
||||
return parent.getLabels().and(labels);
|
||||
}
|
||||
|
||||
public WindowedRelevancyMeasures addFunction(RelevancyFunction... f) {
|
||||
for (RelevancyFunction function : f) {
|
||||
this.functions.add(function);
|
||||
function.prependLabels(this);
|
||||
WindowSummaryGauge gauge = parent.create().windowSummaryGauge(
|
||||
function.getUniqueName(),
|
||||
window,
|
||||
List.of("Average"),
|
||||
MetricCategory.Accuracy,
|
||||
WindowSummaryGauge.Stat.Average.toString()
|
||||
);
|
||||
this.gauges.add(gauge);
|
||||
}
|
||||
return this;
|
||||
}
|
||||
|
||||
public void accept(int[] relevant, int[] actual) {
|
||||
offset++;
|
||||
if (offset >= window) {
|
||||
offset = 0;
|
||||
}
|
||||
|
||||
for (int i = 0; i < functions.size(); i++) {
|
||||
double metricValue = functions.get(i).apply(relevant, actual);
|
||||
gauges.get(i).accept(metricValue);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
StringBuilder sb = new StringBuilder();
|
||||
for (WindowSummaryGauge gauge : gauges) {
|
||||
sb.append(gauge.toString()).append("\n");
|
||||
}
|
||||
return sb.toString();
|
||||
}
|
||||
}
|
@ -14,7 +14,7 @@
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package io.nosqlbench.scenarios.simframe.stabilization;
|
||||
package io.nosqlbench.nb.api.stats;
|
||||
|
||||
public class DoubleRing {
|
||||
private final double[] dbuf;
|
||||
@ -47,4 +47,21 @@ public class DoubleRing {
|
||||
public int count() {
|
||||
return count;
|
||||
}
|
||||
|
||||
public double min() {
|
||||
double min = Double.MAX_VALUE;
|
||||
for (int i = 0; i < count; i++) {
|
||||
min = Math.min(min,dbuf[i]);
|
||||
}
|
||||
return min;
|
||||
}
|
||||
|
||||
public double max() {
|
||||
double max = Double.MIN_VALUE;
|
||||
for (int i = 0; i < count; i++) {
|
||||
max = Math.max(max,dbuf[i]);
|
||||
}
|
||||
return max;
|
||||
}
|
||||
|
||||
}
|
@ -14,10 +14,16 @@
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package io.nosqlbench.scenarios.simframe.stabilization;
|
||||
package io.nosqlbench.nb.api.stats;
|
||||
|
||||
import java.util.Objects;
|
||||
|
||||
/**
|
||||
* This is a relatively efficient statistics bucket which can maintain moving
|
||||
* aggregates over a window of samples for count, mean, variance, stddev, sum.
|
||||
* This is particularly useful when you know that each update to the data
|
||||
* will likely be used in a query.
|
||||
*/
|
||||
public final class StatBucket {
|
||||
DoubleRing ringbuf;
|
||||
private double mean;
|
||||
@ -98,10 +104,31 @@ public final class StatBucket {
|
||||
return "StatBucket[" +
|
||||
"count=" + ringbuf.count() + ", " +
|
||||
"mean=" + mean + ", " +
|
||||
"stddev=" + stddev() + ']';
|
||||
"stddev=" + stddev() + ", " +
|
||||
"variance=" + variance() + ']';
|
||||
}
|
||||
|
||||
public boolean primed() {
|
||||
return this.count()== ringbuf.size();
|
||||
}
|
||||
|
||||
public double getMin() {
|
||||
return ringbuf.min();
|
||||
}
|
||||
|
||||
public double getMax() {
|
||||
return ringbuf.max();
|
||||
}
|
||||
|
||||
public double getAverage() {
|
||||
return this.mean();
|
||||
}
|
||||
|
||||
public double getCount() {
|
||||
return count();
|
||||
}
|
||||
|
||||
public double getSum() {
|
||||
return this.mean() * this.count();
|
||||
}
|
||||
}
|
@ -0,0 +1,70 @@
|
||||
/*
|
||||
* Copyright (c) 2023 nosqlbench
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package io.nosqlbench.nb.api.stats;
|
||||
|
||||
import org.assertj.core.data.Offset;
|
||||
import org.junit.jupiter.api.Test;
|
||||
|
||||
import static org.assertj.core.api.Assertions.assertThat;
|
||||
|
||||
class StatBucketTest {
|
||||
@Test
|
||||
public void testStreamingMean() {
|
||||
var bucket = new StatBucket();
|
||||
bucket.apply(5.0d);
|
||||
assertThat(bucket.mean()).isCloseTo(5.0d, Offset.offset(0.001d));
|
||||
bucket.apply(10.0d);
|
||||
assertThat(bucket.mean()).isCloseTo(7.5d, Offset.offset(0.001d));
|
||||
bucket.apply(15.0d);
|
||||
assertThat(bucket.mean()).isCloseTo(10.0d, Offset.offset(0.001d));
|
||||
bucket.apply(20.0d);
|
||||
assertThat(bucket.mean()).isCloseTo(12.5d, Offset.offset(0.001d));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testStreamingComputations() {
|
||||
double[] samples = new double[]{2, 4, 4, 4, 5, 5, 7, 9};
|
||||
|
||||
var bucket = new StatBucket(8);
|
||||
for (int i = 0; i < samples.length * 10; i++) {
|
||||
bucket.apply(samples[i % samples.length]);
|
||||
if (i > 0 && (i % samples.length) == 0) {
|
||||
assertThat(bucket.mean()).isCloseTo(5, Offset.offset(0.001d));
|
||||
assertThat(bucket.stddev()).isCloseTo(2.0, Offset.offset(0.001d));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testErrorAccumulation1() {
|
||||
var bucket = new StatBucket(11);
|
||||
for (long base = 1; base <10000000000000000L ; base*=10) {
|
||||
for (int i = 0; i< 10; i++) {
|
||||
long value = base+i;
|
||||
bucket.apply(value);
|
||||
}
|
||||
for (int i = 10; i < 20; i++) {
|
||||
long value = base+i;
|
||||
bucket.apply(value);
|
||||
double streamingMean = bucket.mean();
|
||||
assertThat(streamingMean).isCloseTo((double)(value-5), Offset.offset(0.03d));
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
}
|
@ -21,6 +21,7 @@ import io.nosqlbench.nb.api.nbio.NBIO;
|
||||
import io.nosqlbench.nb.api.nbio.NBPathsAPI;
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.jetbrains.annotations.NotNull;
|
||||
import org.snakeyaml.engine.v2.api.Load;
|
||||
import org.snakeyaml.engine.v2.api.LoadSettings;
|
||||
|
||||
@ -49,6 +50,12 @@ public class NBAtFile {
|
||||
* <LI>{@code >-- } asserts each value starts with global option syntax (--)</LI>
|
||||
* </UL>
|
||||
*
|
||||
* <P>Files can be included recursively using a format like <PRE>{@code
|
||||
* - include:${DIR}/somefile.yaml
|
||||
* }</PRE></P>
|
||||
*
|
||||
* Standard formatting specifiers above should work in this mode as well.
|
||||
*
|
||||
* @param processInPlace The linked list which is statefully modified. If you need
|
||||
* an unmodified copy, then this is the responsibility of the caller.
|
||||
* @return An updated list with all values expanded and injected
|
||||
@ -59,10 +66,10 @@ public class NBAtFile {
|
||||
ListIterator<String> iter = processInPlace.listIterator();
|
||||
while (iter.hasNext()) {
|
||||
String spec = iter.next();
|
||||
if (spec.startsWith("@")) {
|
||||
if (spec.startsWith("@") || spec.startsWith("include=")|| spec.startsWith("include:")) {
|
||||
iter.previous();
|
||||
iter.remove();
|
||||
LinkedList<String> spliceIn = includeAt(spec);
|
||||
LinkedList<String> spliceIn = includeAt(spec.replaceFirst("include=","@").replaceFirst("include:","@"));
|
||||
for (String s : spliceIn) {
|
||||
iter.add(s);
|
||||
}
|
||||
@ -89,6 +96,22 @@ public class NBAtFile {
|
||||
* @return The linked list of arguments which is to be spliced into the caller's command list
|
||||
*/
|
||||
public static LinkedList<String> includeAt(String spec) {
|
||||
LinkedList<String> toInclude = doInclude(spec);
|
||||
boolean recurse = false;
|
||||
for (String s : toInclude) {
|
||||
if (s.startsWith("include=")||s.startsWith("include:")) {
|
||||
recurse=true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
if (recurse) {
|
||||
toInclude=includeAt(toInclude);
|
||||
}
|
||||
return toInclude;
|
||||
|
||||
}
|
||||
|
||||
private static @NotNull LinkedList<String> doInclude(String spec) {
|
||||
Matcher matcher = includePattern.matcher(spec);
|
||||
if (matcher.matches()) {
|
||||
String filepathSpec = matcher.group("filepath");
|
||||
@ -96,12 +119,11 @@ public class NBAtFile {
|
||||
String formatSpec = matcher.group("formatter");
|
||||
String[] datapath = (dataPathSpec!=null && !dataPathSpec.isBlank()) ? dataPathSpec.split("(/|\\.)") : new String[] {};
|
||||
|
||||
String[] parts = filepathSpec.split("\\.",2);
|
||||
if (parts.length==2 && !parts[1].toLowerCase().matches("yaml")) {
|
||||
String filename = Path.of(filepathSpec).getFileName().toString();
|
||||
if (filename.contains(".") && !(filename.toLowerCase().endsWith("yaml"))) {
|
||||
throw new RuntimeException("Only the yaml format and extension is supported for at-files." +
|
||||
" You specified " + parts[1]);
|
||||
" You specified " + filepathSpec);
|
||||
}
|
||||
|
||||
filepathSpec=(filepathSpec.endsWith(".yaml") ? filepathSpec : filepathSpec+".yaml");
|
||||
Path atPath = Path.of(filepathSpec);
|
||||
|
||||
@ -135,7 +157,6 @@ public class NBAtFile {
|
||||
} else {
|
||||
throw new RuntimeException("Unable to match at-file specifier: " + spec + " to pattern '" + includePattern.pattern() + "'");
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
private static LinkedList<String> interposePath(LinkedList<String> formatted, Path atPath) {
|
||||
|
@ -79,4 +79,16 @@ class NBAtFileTest {
|
||||
assertThat(strings).containsExactly("--option1", "--option2=value2", "--option3=value3", "--option4=value4");
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testAtfileSimpleRecursion() {
|
||||
LinkedList<String> strings = NBAtFile.includeAt("@src/test/resources/atfiles/simple_recursion.yaml");
|
||||
assertThat(strings).containsExactly("arg1","arg1","arg2","arg3","arg3");
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testAtfileDoubleRecursion() {
|
||||
LinkedList<String> strings = NBAtFile.includeAt("@src/test/resources/atfiles/double_recursion.yaml");
|
||||
assertThat(strings).containsExactly("arg1","arg1","arg1","arg2","arg3","arg3","arg3","deepval");
|
||||
}
|
||||
|
||||
}
|
||||
|
@ -0,0 +1 @@
|
||||
- deepval
|
@ -0,0 +1,4 @@
|
||||
- arg1
|
||||
- include:${DIR}/simple_recursion.yaml
|
||||
- arg3
|
||||
- include:${DIR}/deeper/deeper_recursion.yaml
|
@ -0,0 +1,3 @@
|
||||
- arg1
|
||||
- include:${DIR}/simple_list.yaml
|
||||
- arg3
|
@ -75,6 +75,7 @@ printf "getting appimage tool and building image...\n";
|
||||
chmod +x appimagetool-x86_64.AppImage
|
||||
fi
|
||||
|
||||
# note if your linux has errors with the following then see https://docs.appimage.org/user-guide/troubleshooting/fuse.html
|
||||
ARCH=x86_64 ./appimagetool-x86_64.AppImage NB.AppDir ${BIN_NAME}
|
||||
# && chmod +x ${BIN_NAME}
|
||||
)
|
||||
|
@ -75,6 +75,7 @@ printf "getting appimage tool and building image...\n";
|
||||
chmod +x appimagetool-x86_64.AppImage
|
||||
fi
|
||||
|
||||
# note if your linux has errors with the following then see https://docs.appimage.org/user-guide/troubleshooting/fuse.html
|
||||
ARCH=x86_64 ./appimagetool-x86_64.AppImage NB.AppDir ${BIN_NAME}
|
||||
# && chmod +x ${BIN_NAME}
|
||||
)
|
||||
|
@ -16,12 +16,12 @@
|
||||
|
||||
package io.nosqlbench.scenarios.simframe.stabilization;
|
||||
|
||||
import io.nosqlbench.nb.api.stats.StatBucket;
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
|
||||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
import java.util.OptionalDouble;
|
||||
import java.util.function.DoubleSupplier;
|
||||
import java.util.function.Supplier;
|
||||
|
||||
|
@ -1,53 +0,0 @@
|
||||
/*
|
||||
* Copyright (c) 2023 nosqlbench
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package io.nosqlbench.scenarios.simframe.stats;
|
||||
|
||||
import io.nosqlbench.scenarios.simframe.stabilization.StatBucket;
|
||||
import org.assertj.core.data.Offset;
|
||||
import org.junit.jupiter.api.Test;
|
||||
|
||||
import static org.assertj.core.api.Assertions.assertThat;
|
||||
|
||||
class StatBucketTest {
|
||||
@Test
|
||||
public void testStreamingMean() {
|
||||
var bucket = new StatBucket();
|
||||
bucket.apply(5.0d);
|
||||
assertThat(bucket.mean()).isCloseTo(5.0d,Offset.offset(0.001d));
|
||||
bucket.apply(10.0d);
|
||||
assertThat(bucket.mean()).isCloseTo(7.5d,Offset.offset(0.001d));
|
||||
bucket.apply(15.0d);
|
||||
assertThat(bucket.mean()).isCloseTo(10.0d,Offset.offset(0.001d));
|
||||
bucket.apply(20.0d);
|
||||
assertThat(bucket.mean()).isCloseTo(12.5d,Offset.offset(0.001d));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testStreamingComputations() {
|
||||
double[] samples = new double[]{2,4,4,4,5,5,7,9};
|
||||
|
||||
var bucket = new StatBucket(8);
|
||||
for (int i = 0; i < samples.length * 10; i++) {
|
||||
bucket.apply(samples[i%samples.length]);
|
||||
if (i>0&&(i%samples.length)==0) {
|
||||
assertThat(bucket.mean()).isCloseTo(5,Offset.offset(0.001d));
|
||||
assertThat(bucket.stddev()).isCloseTo(2.0,Offset.offset(0.001d));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
}
|
Loading…
Reference in New Issue
Block a user