diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index b8c1b1c53..88b624b00 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -2,6 +2,8 @@ name: release on: push: + branches: + - main paths: - RELEASENOTES.** branches: diff --git a/RELEASENOTES.md b/RELEASENOTES.md index 09cc2a274..424fd0c7c 100644 --- a/RELEASENOTES.md +++ b/RELEASENOTES.md @@ -1,3 +1,8 @@ +- fa78ca54 (HEAD -> main, origin/main) Merge pull request #372 from lhotari/lh-detect-duplicates-after-gap +- 71c3b190 Detect delayed out-of-order delivery +- e694eaec Merge pull request #373 from lhotari/lh-upgrade-pulsar-2.8.1 +- 85e1f8a6 Upgrade Pulsar client to 2.8.1 version +- 6b50060a Detect duplicates after a gap - 3b674983 (HEAD -> main, origin/main) Merge pull request #362 from yabinmeng/main - bf98d644 Merge branch 'nosqlbench:main' into main - 793af965 Ignore abnormal message processing error for Shared and Key_Shared subscription type. diff --git a/adapter-cqld4/pom.xml b/adapter-cqld4/pom.xml index cfd47119d..8b8de5128 100644 --- a/adapter-cqld4/pom.xml +++ b/adapter-cqld4/pom.xml @@ -1,10 +1,12 @@ - + 4.0.0 io.nosqlbench mvn-defaults - 4.15.58-SNAPSHOT + 4.15.64-SNAPSHOT ../mvn-defaults @@ -23,13 +25,13 @@ io.nosqlbench drivers-api - 4.15.58-SNAPSHOT + 4.15.64-SNAPSHOT io.nosqlbench virtdata-lib-basics - 4.15.58-SNAPSHOT + 4.15.64-SNAPSHOT compile diff --git a/adapters-api/pom.xml b/adapters-api/pom.xml index 8ff209de4..c1c9c2558 100644 --- a/adapters-api/pom.xml +++ b/adapters-api/pom.xml @@ -7,7 +7,7 @@ mvn-defaults io.nosqlbench - 4.15.58-SNAPSHOT + 4.15.64-SNAPSHOT ../mvn-defaults @@ -25,20 +25,14 @@ io.nosqlbench nb-api - 4.15.58-SNAPSHOT + 4.15.64-SNAPSHOT io.nosqlbench virtdata-userlibs - 4.15.58-SNAPSHOT + 4.15.64-SNAPSHOT - - io.nosqlbench - nb-api - 4.15.58-SNAPSHOT - compile - diff --git a/category.txt b/category.txt deleted file mode 100644 index ae9304576..000000000 --- a/category.txt +++ /dev/null @@ -1 +0,0 @@ -c1 diff --git a/docsys/pom.xml b/docsys/pom.xml index 00542e0ef..5a78c4285 100644 --- a/docsys/pom.xml +++ b/docsys/pom.xml @@ -12,7 +12,7 @@ mvn-defaults io.nosqlbench - 4.15.58-SNAPSHOT + 4.15.64-SNAPSHOT ../mvn-defaults @@ -21,7 +21,7 @@ io.nosqlbench nb-api - 4.15.58-SNAPSHOT + 4.15.64-SNAPSHOT @@ -117,7 +117,7 @@ io.nosqlbench virtdata-api - 4.15.58-SNAPSHOT + 4.15.64-SNAPSHOT diff --git a/driver-cockroachdb/pom.xml b/driver-cockroachdb/pom.xml index 1a7f2dac6..a198c0100 100644 --- a/driver-cockroachdb/pom.xml +++ b/driver-cockroachdb/pom.xml @@ -5,7 +5,7 @@ io.nosqlbench mvn-defaults - 4.15.58-SNAPSHOT + 4.15.64-SNAPSHOT ../mvn-defaults @@ -21,7 +21,7 @@ io.nosqlbench driver-jdbc - 4.15.58-SNAPSHOT + 4.15.64-SNAPSHOT org.postgresql diff --git a/driver-cql-shaded/pom.xml b/driver-cql-shaded/pom.xml index 2e94a577c..fe4f74fb0 100644 --- a/driver-cql-shaded/pom.xml +++ b/driver-cql-shaded/pom.xml @@ -4,7 +4,7 @@ io.nosqlbench mvn-defaults - 4.15.58-SNAPSHOT + 4.15.64-SNAPSHOT ../mvn-defaults @@ -23,13 +23,13 @@ io.nosqlbench engine-api - 4.15.58-SNAPSHOT + 4.15.64-SNAPSHOT io.nosqlbench drivers-api - 4.15.58-SNAPSHOT + 4.15.64-SNAPSHOT @@ -81,15 +81,13 @@ io.netty netty-codec-haproxy - 4.1.54.Final - - - - - - + + io.netty + netty-transport-native-epoll + linux-x86_64 + @@ -185,21 +183,11 @@ false true true - - com.google.common com.datastax.internal.com_google_common - - - - - - - - @@ -212,7 +200,6 @@ - *:* diff --git a/driver-cql-shaded/src/main/resources/activities/baselinesv2/cql-tabular2.yaml b/driver-cql-shaded/src/main/resources/activities/baselinesv2/cql-tabular2.yaml index 643582cc3..03615b461 100644 --- a/driver-cql-shaded/src/main/resources/activities/baselinesv2/cql-tabular2.yaml +++ b/driver-cql-shaded/src/main/resources/activities/baselinesv2/cql-tabular2.yaml @@ -94,7 +94,8 @@ blocks: create table if not exists <>.<> ( part text, clust text, - data text, + data0 text, data1 text, data2 text, data3 text, + data4 text, data5 text, data6 text, data7 text, PRIMARY KEY (part,clust) ); tags: diff --git a/driver-cqld3-shaded/pom.xml b/driver-cqld3-shaded/pom.xml index fd7b12cfc..087b04a47 100644 --- a/driver-cqld3-shaded/pom.xml +++ b/driver-cqld3-shaded/pom.xml @@ -4,7 +4,7 @@ io.nosqlbench mvn-defaults - 4.15.58-SNAPSHOT + 4.15.64-SNAPSHOT ../mvn-defaults @@ -24,13 +24,13 @@ io.nosqlbench engine-api - 4.15.58-SNAPSHOT + 4.15.64-SNAPSHOT io.nosqlbench drivers-api - 4.15.58-SNAPSHOT + 4.15.64-SNAPSHOT @@ -71,15 +71,14 @@ io.netty netty-codec-haproxy - 4.1.54.Final - - - - - - + + io.netty + netty-transport-native-epoll + linux-x86_64 + + @@ -198,10 +197,10 @@ io.nosqlbench.generators.cql io.nosqlbench.generators.cql3.shaded - - - - + + io.netty + dse19.io.netty + diff --git a/driver-cqlverify/pom.xml b/driver-cqlverify/pom.xml index 57b3702c6..feab7d1f8 100644 --- a/driver-cqlverify/pom.xml +++ b/driver-cqlverify/pom.xml @@ -1,38 +1,38 @@ - 4.0.0 + 4.0.0 - - io.nosqlbench - mvn-defaults - 4.15.58-SNAPSHOT - ../mvn-defaults - + + io.nosqlbench + mvn-defaults + 4.15.64-SNAPSHOT + ../mvn-defaults + - driver-cqlverify - jar - ${project.artifactId} + driver-cqlverify + jar + ${project.artifactId} - - A CQL content verifier ActivityType, based on the CQL ActivityType - built on http://nosqlbench.io/ - + + A CQL content verifier ActivityType, based on the CQL ActivityType + built on http://nosqlbench.io/ + - + - + - - io.nosqlbench - driver-cql-shaded - 4.15.58-SNAPSHOT - + + io.nosqlbench + driver-cql-shaded + 4.15.64-SNAPSHOT + - - io.nosqlbench - drivers-api - 4.15.58-SNAPSHOT - + + io.nosqlbench + drivers-api + 4.15.64-SNAPSHOT + - + diff --git a/driver-diag/pom.xml b/driver-diag/pom.xml index 33626470f..80530bda6 100644 --- a/driver-diag/pom.xml +++ b/driver-diag/pom.xml @@ -5,7 +5,7 @@ mvn-defaults io.nosqlbench - 4.15.58-SNAPSHOT + 4.15.64-SNAPSHOT ../mvn-defaults @@ -21,14 +21,14 @@ io.nosqlbench engine-api - 4.15.58-SNAPSHOT + 4.15.64-SNAPSHOT compile io.nosqlbench drivers-api - 4.15.58-SNAPSHOT + 4.15.64-SNAPSHOT compile diff --git a/driver-dsegraph-shaded/pom.xml b/driver-dsegraph-shaded/pom.xml index 0c0630bfe..f3fee40f4 100644 --- a/driver-dsegraph-shaded/pom.xml +++ b/driver-dsegraph-shaded/pom.xml @@ -1,263 +1,263 @@ - 4.0.0 + 4.0.0 - - io.nosqlbench - mvn-defaults - 4.15.58-SNAPSHOT - ../mvn-defaults - + + io.nosqlbench + mvn-defaults + 4.15.64-SNAPSHOT + ../mvn-defaults + - driver-dsegraph-shaded - jar - ${project.artifactId} + driver-dsegraph-shaded + jar + ${project.artifactId} - - A DSE Graph ActivityType driver for nosqlbench, based on http://nosqlbench.io/ - + + A DSE Graph ActivityType driver for nosqlbench, based on http://nosqlbench.io/ + - + - + - - io.nosqlbench - engine-api - 4.15.58-SNAPSHOT - + + io.nosqlbench + engine-api + 4.15.64-SNAPSHOT + - - io.nosqlbench - drivers-api - 4.15.58-SNAPSHOT - + + io.nosqlbench + drivers-api + 4.15.64-SNAPSHOT + - - com.datastax.dse - dse-java-driver-graph - 1.9.0 - - - org.slf4j - jcl-over-slf4j - - - + + com.datastax.dse + dse-java-driver-graph + 1.9.0 + + + org.slf4j + jcl-over-slf4j + + + - - com.datastax.dse - dse-java-driver-core - 1.9.0 - + + com.datastax.dse + dse-java-driver-core + 1.9.0 + - - com.datastax.dse - dse-java-driver-extras - 1.9.0 - - - com.datastax.dse - dse-java-driver-mapping - 1.9.0 - + + com.datastax.dse + dse-java-driver-extras + 1.9.0 + + + com.datastax.dse + dse-java-driver-mapping + 1.9.0 + - - - org.lz4 - lz4-java - + + + org.lz4 + lz4-java + - - - org.xerial.snappy - snappy-java - + + + org.xerial.snappy + snappy-java + - - org.antlr - antlr4-runtime - + + org.antlr + antlr4-runtime + - - - - - - + + + + + + - + - + - - io.dropwizard.metrics - metrics-core - 3.2.2 - - - - - - + + io.dropwizard.metrics + metrics-core + 3.2.2 + + + + + + - + - - org.testng - testng - 6.13.1 - test - + + org.testng + testng + 6.13.1 + test + - - org.assertj - assertj-core-java8 - 1.0.0m1 - test - + + org.assertj + assertj-core-java8 + 1.0.0m1 + test + - + - - org.yaml - snakeyaml - 1.23 - + + org.yaml + snakeyaml + 1.23 + - + - - + + - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + (lack of composite key syntax, nested type syntax, etc) + --> + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + - - org.antlr - antlr4-maven-plugin - - src/main/grammars/cql3 - - - -package - io.nosqlbench.generators.cql.generated - - - - src/main/java/io/nosqlbench/generators/cql/generated - - - - - antlr - - antlr4 - - generate-sources - - - + + org.antlr + antlr4-maven-plugin + + src/main/grammars/cql3 + + + -package + io.nosqlbench.generators.cql.generated + + + + src/main/java/io/nosqlbench/generators/cql/generated + + + + + antlr + + antlr4 + + generate-sources + + + - - maven-shade-plugin - 3.2.3 - - - package - - shade - - - - - false - true - true - - - - - com.google.common - com.datastax.internal.com_google_common - - - - - - - - - - - - - *:* - - - - - io.nosqlbench.engine.cli.NBCLI - - - - - - *:* - - META-INF/*.SF - META-INF/*.DSA - META-INF/*.RSA - - - - - - - + + maven-shade-plugin + 3.2.3 + + + package + + shade + + + + + false + true + true + + + + + com.google.common + com.datastax.internal.com_google_common + + + + + + + + + + + + + *:* + + + + + io.nosqlbench.engine.cli.NBCLI + + + + + + *:* + + META-INF/*.SF + META-INF/*.DSA + META-INF/*.RSA + + + + + + + diff --git a/driver-grpc/pom.xml b/driver-grpc/pom.xml index 14ab9226e..73396c9a7 100644 --- a/driver-grpc/pom.xml +++ b/driver-grpc/pom.xml @@ -4,7 +4,7 @@ mvn-defaults io.nosqlbench - 4.15.58-SNAPSHOT + 4.15.64-SNAPSHOT ../mvn-defaults/pom.xml @@ -21,7 +21,7 @@ io.nosqlbench drivers-api - 4.15.58-SNAPSHOT + 4.15.64-SNAPSHOT diff --git a/driver-http/pom.xml b/driver-http/pom.xml index 97ab03de2..85bb4cfee 100644 --- a/driver-http/pom.xml +++ b/driver-http/pom.xml @@ -4,7 +4,7 @@ mvn-defaults io.nosqlbench - 4.15.58-SNAPSHOT + 4.15.64-SNAPSHOT ../mvn-defaults @@ -22,14 +22,14 @@ io.nosqlbench engine-api - 4.15.58-SNAPSHOT + 4.15.64-SNAPSHOT compile io.nosqlbench drivers-api - 4.15.58-SNAPSHOT + 4.15.64-SNAPSHOT compile diff --git a/driver-jdbc/pom.xml b/driver-jdbc/pom.xml index 9703ccd21..77128bc85 100644 --- a/driver-jdbc/pom.xml +++ b/driver-jdbc/pom.xml @@ -3,7 +3,7 @@ nosqlbench io.nosqlbench - 4.15.58-SNAPSHOT + 4.15.64-SNAPSHOT 4.0.0 @@ -19,7 +19,7 @@ io.nosqlbench engine-api - 4.15.58-SNAPSHOT + 4.15.64-SNAPSHOT compile diff --git a/driver-jms/pom.xml b/driver-jms/pom.xml index d3e7406c1..e0ab5eeb0 100644 --- a/driver-jms/pom.xml +++ b/driver-jms/pom.xml @@ -4,7 +4,7 @@ mvn-defaults io.nosqlbench - 4.15.58-SNAPSHOT + 4.15.64-SNAPSHOT ../mvn-defaults @@ -17,36 +17,36 @@ into a pulsar system via JMS 2.0 compatibile APIs. NOTE: this is JMS compatible driver from DataStax that allows using a Pulsar cluster - as the potential JMS Destination + as the potential JMS Destination - - - - - - - - - - - - - - + + + + + + + + + + + + + + io.nosqlbench engine-api - 4.15.58-SNAPSHOT + 4.15.64-SNAPSHOT io.nosqlbench driver-stdout - 4.15.58-SNAPSHOT + 4.15.64-SNAPSHOT diff --git a/driver-jmx/pom.xml b/driver-jmx/pom.xml index 23b4708e8..7e9192de0 100644 --- a/driver-jmx/pom.xml +++ b/driver-jmx/pom.xml @@ -5,7 +5,7 @@ mvn-defaults io.nosqlbench - 4.15.58-SNAPSHOT + 4.15.64-SNAPSHOT ../mvn-defaults @@ -22,13 +22,13 @@ io.nosqlbench drivers-api - 4.15.58-SNAPSHOT + 4.15.64-SNAPSHOT compile io.nosqlbench engine-api - 4.15.58-SNAPSHOT + 4.15.64-SNAPSHOT compile diff --git a/driver-kafka/pom.xml b/driver-kafka/pom.xml index e5eb23fee..a044914ba 100644 --- a/driver-kafka/pom.xml +++ b/driver-kafka/pom.xml @@ -4,7 +4,7 @@ mvn-defaults io.nosqlbench - 4.15.58-SNAPSHOT + 4.15.64-SNAPSHOT ../mvn-defaults @@ -37,27 +37,27 @@ io.confluent - kafka-avro-serializer - 5.5.1 + kafka-avro-serializer + 5.5.1 - - io.nosqlbench - engine-api - 4.15.58-SNAPSHOT - + + io.nosqlbench + engine-api + 4.15.64-SNAPSHOT + - - io.nosqlbench - driver-stdout - 4.15.58-SNAPSHOT - + + io.nosqlbench + driver-stdout + 4.15.64-SNAPSHOT + - - - - - + + + + + @@ -71,23 +71,23 @@ - - - - - - - - - - - - - - - - - - + + + + + + + + + + + + + + + + + + diff --git a/driver-mongodb/pom.xml b/driver-mongodb/pom.xml index 312a71876..8c25ad80d 100644 --- a/driver-mongodb/pom.xml +++ b/driver-mongodb/pom.xml @@ -7,7 +7,7 @@ mvn-defaults io.nosqlbench - 4.15.58-SNAPSHOT + 4.15.64-SNAPSHOT ../mvn-defaults @@ -21,13 +21,13 @@ io.nosqlbench engine-api - 4.15.58-SNAPSHOT + 4.15.64-SNAPSHOT io.nosqlbench drivers-api - 4.15.58-SNAPSHOT + 4.15.64-SNAPSHOT diff --git a/driver-pulsar/pom.xml b/driver-pulsar/pom.xml index 07b9f1519..9bef01304 100644 --- a/driver-pulsar/pom.xml +++ b/driver-pulsar/pom.xml @@ -4,7 +4,7 @@ mvn-defaults io.nosqlbench - 4.15.58-SNAPSHOT + 4.15.64-SNAPSHOT ../mvn-defaults @@ -18,7 +18,7 @@ - 2.8.0 + 2.8.1 @@ -40,13 +40,13 @@ io.nosqlbench engine-api - 4.15.58-SNAPSHOT + 4.15.64-SNAPSHOT io.nosqlbench driver-stdout - 4.15.58-SNAPSHOT + 4.15.64-SNAPSHOT diff --git a/driver-pulsar/src/main/java/io/nosqlbench/driver/pulsar/PulsarActivity.java b/driver-pulsar/src/main/java/io/nosqlbench/driver/pulsar/PulsarActivity.java index 6d24f312a..af7f64744 100644 --- a/driver-pulsar/src/main/java/io/nosqlbench/driver/pulsar/PulsarActivity.java +++ b/driver-pulsar/src/main/java/io/nosqlbench/driver/pulsar/PulsarActivity.java @@ -10,6 +10,8 @@ import io.nosqlbench.driver.pulsar.util.PulsarNBClientConf; import io.nosqlbench.engine.api.activityapi.core.ActivityDefObserver; import io.nosqlbench.engine.api.activityapi.errorhandling.modular.NBErrorHandler; import io.nosqlbench.engine.api.activityapi.planning.OpSequence; +import io.nosqlbench.engine.api.activityapi.ratelimits.RateLimiter; +import io.nosqlbench.engine.api.activityapi.ratelimits.RateLimiters; import io.nosqlbench.engine.api.activityimpl.ActivityDef; import io.nosqlbench.engine.api.activityimpl.OpDispenser; import io.nosqlbench.engine.api.activityimpl.SimpleActivity; @@ -38,6 +40,12 @@ public class PulsarActivity extends SimpleActivity implements ActivityDefObserve // Metrics for NB Pulsar driver milestone: https://github.com/nosqlbench/nosqlbench/milestone/11 // - end-to-end latency private Histogram e2eMsgProcLatencyHistogram; + // - message out of sequence error counter + private Counter msgErrOutOfSeqCounter; + // - message loss counter + private Counter msgErrLossCounter; + // - message duplicate (when dedup is enabled) error counter + private Counter msgErrDuplicateCounter; private PulsarSpaceCache pulsarCache; @@ -51,6 +59,7 @@ public class PulsarActivity extends SimpleActivity implements ActivityDefObserve private NBErrorHandler errorHandler; private OpSequence> sequencer; private volatile Throwable asyncOperationFailure; + private boolean cycleratePerThread; public PulsarActivity(ActivityDef activityDef) { super(activityDef); @@ -76,6 +85,9 @@ public class PulsarActivity extends SimpleActivity implements ActivityDefObserve commitTransactionTimer = ActivityMetrics.timer(activityDef, "commit_transaction"); e2eMsgProcLatencyHistogram = ActivityMetrics.histogram(activityDef, "e2e_msg_latency"); + msgErrOutOfSeqCounter = ActivityMetrics.counter(activityDef, "err_msg_oos"); + msgErrLossCounter = ActivityMetrics.counter(activityDef, "err_msg_loss"); + msgErrDuplicateCounter = ActivityMetrics.counter(activityDef, "err_msg_dup"); String pulsarClntConfFile = activityDef.getParams().getOptionalString("config").orElse("config.properties"); @@ -99,11 +111,26 @@ public class PulsarActivity extends SimpleActivity implements ActivityDefObserve () -> activityDef.getParams().getOptionalString("errors").orElse("stop"), this::getExceptionMetrics ); + + cycleratePerThread = activityDef.getParams().takeBoolOrDefault("cyclerate_per_thread", false); } + private final ThreadLocal cycleLimiterThreadLocal = ThreadLocal.withInitial(() -> { + if (super.getCycleLimiter() != null) { + return RateLimiters.createOrUpdate(this.getActivityDef(), "cycles", null, + super.getCycleLimiter().getRateSpec()); + } else { + return null; + } + }); + @Override - public synchronized void onActivityDefUpdate(ActivityDef activityDef) { - super.onActivityDefUpdate(activityDef); + public RateLimiter getCycleLimiter() { + if (cycleratePerThread) { + return cycleLimiterThreadLocal.get(); + } else { + return super.getCycleLimiter(); + } } public NBErrorHandler getErrorHandler() { return errorHandler; } @@ -231,4 +258,7 @@ public class PulsarActivity extends SimpleActivity implements ActivityDefObserve public Timer getCommitTransactionTimer() { return commitTransactionTimer; } public Histogram getE2eMsgProcLatencyHistogram() { return e2eMsgProcLatencyHistogram; } + public Counter getMsgErrOutOfSeqCounter() { return msgErrOutOfSeqCounter; } + public Counter getMsgErrLossCounter() { return msgErrLossCounter; } + public Counter getMsgErrDuplicateCounter() { return msgErrDuplicateCounter; } } diff --git a/driver-pulsar/src/main/java/io/nosqlbench/driver/pulsar/exception/PulsarMsgDuplicateException.java b/driver-pulsar/src/main/java/io/nosqlbench/driver/pulsar/exception/PulsarMsgDuplicateException.java deleted file mode 100644 index 8847ee6a4..000000000 --- a/driver-pulsar/src/main/java/io/nosqlbench/driver/pulsar/exception/PulsarMsgDuplicateException.java +++ /dev/null @@ -1,10 +0,0 @@ -package io.nosqlbench.driver.pulsar.exception; - -public class PulsarMsgDuplicateException extends RuntimeException { - - public PulsarMsgDuplicateException(boolean asyncPulsarOp, long nbCycleNum, long curMsgSeqId, long prevMsgSeqId) { - super("" + (asyncPulsarOp ? "[AsyncAPI]" : "[SyncAPI]") + - " Detected duplicate message when message deduplication is enabled (curCycleNum=" + nbCycleNum + - ", curMsgSeqId=" + curMsgSeqId + ", prevMsgSeqId=" + prevMsgSeqId + ")."); - } -} diff --git a/driver-pulsar/src/main/java/io/nosqlbench/driver/pulsar/exception/PulsarMsgLossException.java b/driver-pulsar/src/main/java/io/nosqlbench/driver/pulsar/exception/PulsarMsgLossException.java deleted file mode 100644 index 268d0651f..000000000 --- a/driver-pulsar/src/main/java/io/nosqlbench/driver/pulsar/exception/PulsarMsgLossException.java +++ /dev/null @@ -1,11 +0,0 @@ -package io.nosqlbench.driver.pulsar.exception; - -public class PulsarMsgLossException extends RuntimeException { - - public PulsarMsgLossException(boolean asyncPulsarOp, long nbCycleNum, long curMsgSeqId, long prevMsgSeqId) { - super("" + (asyncPulsarOp ? "[AsyncAPI]" : "[SyncAPI]") + - " Detected message sequence id gap (curCycleNum=" + nbCycleNum + - ", curMsgSeqId=" + curMsgSeqId + ", prevMsgSeqId=" + prevMsgSeqId + "). " + - "Some published messages are not received!"); - } -} diff --git a/driver-pulsar/src/main/java/io/nosqlbench/driver/pulsar/exception/PulsarMsgOutOfOrderException.java b/driver-pulsar/src/main/java/io/nosqlbench/driver/pulsar/exception/PulsarMsgOutOfOrderException.java deleted file mode 100644 index 88812b493..000000000 --- a/driver-pulsar/src/main/java/io/nosqlbench/driver/pulsar/exception/PulsarMsgOutOfOrderException.java +++ /dev/null @@ -1,11 +0,0 @@ -package io.nosqlbench.driver.pulsar.exception; - -public class PulsarMsgOutOfOrderException extends RuntimeException { - - public PulsarMsgOutOfOrderException(boolean asyncPulsarOp, long nbCycleNum, long curMsgSeqId, long prevMsgSeqId) { - super("" + (asyncPulsarOp ? "[AsyncAPI]" : "[SyncAPI]" ) + - " Detected message ordering is not guaranteed (curCycleNum=" + nbCycleNum + - ", curMsgSeqId=" + curMsgSeqId + ", prevMsgSeqId=" + prevMsgSeqId + "). " + - "Older messages are received earlier!"); - } -} diff --git a/driver-pulsar/src/main/java/io/nosqlbench/driver/pulsar/ops/MessageSequenceNumberSendingHandler.java b/driver-pulsar/src/main/java/io/nosqlbench/driver/pulsar/ops/MessageSequenceNumberSendingHandler.java new file mode 100644 index 000000000..a7d813de2 --- /dev/null +++ b/driver-pulsar/src/main/java/io/nosqlbench/driver/pulsar/ops/MessageSequenceNumberSendingHandler.java @@ -0,0 +1,87 @@ +package io.nosqlbench.driver.pulsar.ops; + +import io.nosqlbench.driver.pulsar.util.PulsarActivityUtil; +import java.util.*; +import org.apache.commons.lang3.RandomUtils; + +/** + * Handles adding a monotonic sequence number to message properties of sent messages + */ +class MessageSequenceNumberSendingHandler { + static final int SIMULATED_ERROR_PROBABILITY_PERCENTAGE = 10; + long number = 1; + Queue outOfOrderNumbers; + + public long getNextSequenceNumber(Set simulatedErrorTypes) { + return getNextSequenceNumber(simulatedErrorTypes, SIMULATED_ERROR_PROBABILITY_PERCENTAGE); + } + + long getNextSequenceNumber(Set simulatedErrorTypes, int errorProbabilityPercentage) { + simulateError(simulatedErrorTypes, errorProbabilityPercentage); + return nextNumber(); + } + + private void simulateError(Set simulatedErrorTypes, int errorProbabilityPercentage) { + if (!simulatedErrorTypes.isEmpty() && shouldSimulateError(errorProbabilityPercentage)) { + int selectIndex = 0; + int numberOfErrorTypes = simulatedErrorTypes.size(); + if (numberOfErrorTypes > 1) { + // pick one of the simulated error type randomly + selectIndex = RandomUtils.nextInt(0, numberOfErrorTypes); + } + PulsarActivityUtil.SEQ_ERROR_SIMU_TYPE errorType = simulatedErrorTypes.stream() + .skip(selectIndex) + .findFirst() + .get(); + switch (errorType) { + case OutOfOrder: + // simulate message out of order + injectMessagesOutOfOrder(); + break; + case MsgDup: + // simulate message duplication + injectMessageDuplication(); + break; + case MsgLoss: + // simulate message loss + injectMessageLoss(); + break; + } + } + } + + private boolean shouldSimulateError(int errorProbabilityPercentage) { + // Simulate error with the specified probability + return RandomUtils.nextInt(0, 100) < errorProbabilityPercentage; + } + + long nextNumber() { + if (outOfOrderNumbers != null) { + long nextNumber = outOfOrderNumbers.poll(); + if (outOfOrderNumbers.isEmpty()) { + outOfOrderNumbers = null; + } + return nextNumber; + } + return number++; + } + + void injectMessagesOutOfOrder() { + if (outOfOrderNumbers == null) { + outOfOrderNumbers = new ArrayDeque<>(Arrays.asList(number + 2, number, number + 1)); + number += 3; + } + } + + void injectMessageDuplication() { + if (outOfOrderNumbers == null) { + number--; + } + } + + void injectMessageLoss() { + if (outOfOrderNumbers == null) { + number++; + } + } +} diff --git a/driver-pulsar/src/main/java/io/nosqlbench/driver/pulsar/ops/PulsarConsumerMapper.java b/driver-pulsar/src/main/java/io/nosqlbench/driver/pulsar/ops/PulsarConsumerMapper.java index cae3afd42..3cd58fb88 100644 --- a/driver-pulsar/src/main/java/io/nosqlbench/driver/pulsar/ops/PulsarConsumerMapper.java +++ b/driver-pulsar/src/main/java/io/nosqlbench/driver/pulsar/ops/PulsarConsumerMapper.java @@ -1,15 +1,13 @@ package io.nosqlbench.driver.pulsar.ops; -import com.codahale.metrics.Counter; -import com.codahale.metrics.Histogram; -import com.codahale.metrics.Timer; import io.nosqlbench.driver.pulsar.PulsarActivity; import io.nosqlbench.driver.pulsar.PulsarSpace; import io.nosqlbench.engine.api.templating.CommandTemplate; +import java.util.HashMap; +import java.util.Map; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.apache.pulsar.client.api.Consumer; -import org.apache.pulsar.client.api.Schema; import org.apache.pulsar.client.api.transaction.Transaction; import java.util.function.LongFunction; @@ -54,15 +52,16 @@ public class PulsarConsumerMapper extends PulsarTransactOpMapper { @Override public PulsarOp apply(long value) { + boolean seqTracking = seqTrackingFunc.apply(value); Consumer consumer = consumerFunc.apply(value); boolean asyncApi = asyncApiFunc.apply(value); boolean useTransaction = useTransactionFunc.apply(value); - boolean seqTracking = seqTrackingFunc.apply(value); Supplier transactionSupplier = transactionSupplierFunc.apply(value); boolean topicMsgDedup = topicMsgDedupFunc.apply(value); String subscriptionType = subscriptionTypeFunc.apply(value); return new PulsarConsumerOp( + this, pulsarActivity, asyncApi, useTransaction, @@ -74,6 +73,23 @@ public class PulsarConsumerMapper extends PulsarTransactOpMapper { clientSpace.getPulsarSchema(), clientSpace.getPulsarClientConf().getConsumerTimeoutSeconds(), value, - e2eMsProc); + e2eMsProc, + this::getReceivedMessageSequenceTracker); } + + + private ReceivedMessageSequenceTracker getReceivedMessageSequenceTracker(String topicName) { + return receivedMessageSequenceTrackersForTopicThreadLocal.get() + .computeIfAbsent(topicName, k -> createReceivedMessageSequenceTracker()); + } + + private ReceivedMessageSequenceTracker createReceivedMessageSequenceTracker() { + return new ReceivedMessageSequenceTracker(pulsarActivity.getMsgErrOutOfSeqCounter(), + pulsarActivity.getMsgErrDuplicateCounter(), + pulsarActivity.getMsgErrLossCounter()); + } + + private final ThreadLocal> receivedMessageSequenceTrackersForTopicThreadLocal = + ThreadLocal.withInitial(HashMap::new); + } diff --git a/driver-pulsar/src/main/java/io/nosqlbench/driver/pulsar/ops/PulsarConsumerOp.java b/driver-pulsar/src/main/java/io/nosqlbench/driver/pulsar/ops/PulsarConsumerOp.java index 24f61acac..cf5155c51 100644 --- a/driver-pulsar/src/main/java/io/nosqlbench/driver/pulsar/ops/PulsarConsumerOp.java +++ b/driver-pulsar/src/main/java/io/nosqlbench/driver/pulsar/ops/PulsarConsumerOp.java @@ -7,6 +7,7 @@ import io.nosqlbench.driver.pulsar.PulsarActivity; import io.nosqlbench.driver.pulsar.exception.*; import io.nosqlbench.driver.pulsar.util.AvroUtil; import io.nosqlbench.driver.pulsar.util.PulsarActivityUtil; +import java.util.function.Function; import org.apache.commons.lang3.StringUtils; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; @@ -22,6 +23,7 @@ public class PulsarConsumerOp implements PulsarOp { private final static Logger logger = LogManager.getLogger(PulsarConsumerOp.class); + private final PulsarConsumerMapper consumerMapper; private final PulsarActivity pulsarActivity; private final boolean asyncPulsarOp; @@ -37,17 +39,16 @@ public class PulsarConsumerOp implements PulsarOp { private final boolean e2eMsgProc; private final long curCycleNum; - private long curMsgSeqId; - private long prevMsgSeqId; - private final Counter bytesCounter; private final Histogram messageSizeHistogram; private final Timer transactionCommitTimer; // keep track of end-to-end message latency private final Histogram e2eMsgProcLatencyHistogram; + private final Function receivedMessageSequenceTrackerForTopic; public PulsarConsumerOp( + PulsarConsumerMapper consumerMapper, PulsarActivity pulsarActivity, boolean asyncPulsarOp, boolean useTransaction, @@ -59,8 +60,10 @@ public class PulsarConsumerOp implements PulsarOp { Schema schema, int timeoutSeconds, long curCycleNum, - boolean e2eMsgProc) + boolean e2eMsgProc, + Function receivedMessageSequenceTrackerForTopic) { + this.consumerMapper = consumerMapper; this.pulsarActivity = pulsarActivity; this.asyncPulsarOp = asyncPulsarOp; @@ -76,14 +79,22 @@ public class PulsarConsumerOp implements PulsarOp { this.curCycleNum = curCycleNum; this.e2eMsgProc = e2eMsgProc; - this.curMsgSeqId = 0; - this.prevMsgSeqId = (curCycleNum - 1); - this.bytesCounter = pulsarActivity.getBytesCounter(); this.messageSizeHistogram = pulsarActivity.getMessageSizeHistogram(); this.transactionCommitTimer = pulsarActivity.getCommitTransactionTimer(); this.e2eMsgProcLatencyHistogram = pulsarActivity.getE2eMsgProcLatencyHistogram(); + this.receivedMessageSequenceTrackerForTopic = receivedMessageSequenceTrackerForTopic; + } + + private void checkAndUpdateMessageErrorCounter(Message message) { + String msgSeqIdStr = message.getProperty(PulsarActivityUtil.MSG_SEQUENCE_NUMBER); + + if ( !StringUtils.isBlank(msgSeqIdStr) ) { + long sequenceNumber = Long.parseLong(msgSeqIdStr); + ReceivedMessageSequenceTracker receivedMessageSequenceTracker = receivedMessageSequenceTrackerForTopic.apply(message.getTopicName()); + receivedMessageSequenceTracker.sequenceNumberReceived(sequenceNumber); + } } @Override @@ -124,13 +135,15 @@ public class PulsarConsumerOp implements PulsarOp { org.apache.avro.generic.GenericRecord avroGenericRecord = AvroUtil.GetGenericRecord_ApacheAvro(avroSchema, message.getData()); - logger.debug("Sync message received: msg-key={}; msg-properties={}; msg-payload={}", + logger.debug("({}) Sync message received: msg-key={}; msg-properties={}; msg-payload={}", + consumer.getConsumerName(), message.getKey(), message.getProperties(), avroGenericRecord.toString()); } else { - logger.debug("Sync message received: msg-key={}; msg-properties={}; msg-payload={}", + logger.debug("({}) Sync message received: msg-key={}; msg-properties={}; msg-payload={}", + consumer.getConsumerName(), message.getKey(), message.getProperties(), new String(message.getData())); @@ -143,47 +156,17 @@ public class PulsarConsumerOp implements PulsarOp { e2eMsgProcLatencyHistogram.update(e2eMsgLatency); } - // keep track of message ordering and message loss - String msgSeqIdStr = message.getProperties().get(PulsarActivityUtil.MSG_SEQUENCE_ID); - if ( (seqTracking) && !StringUtils.isBlank(msgSeqIdStr) ) { - curMsgSeqId = Long.parseLong(msgSeqIdStr); - - if ( prevMsgSeqId > -1) { - // normal case: message sequence id is monotonically increasing by 1 - if ((curMsgSeqId - prevMsgSeqId) != 1) { - // abnormal case: out of ordering - // - for any subscription type, this check should always hold - if (curMsgSeqId < prevMsgSeqId) { - throw new PulsarMsgOutOfOrderException( - false, curCycleNum, curMsgSeqId, prevMsgSeqId); - } - // - this sequence based message loss and message duplicate check can't be used for - // "Shared" subscription (ignore this check) - // - TODO: for Key_Shared subscription type, this logic needs to be improved on - // per-key basis - else { - if ( !StringUtils.equalsAnyIgnoreCase(subscriptionType, - PulsarActivityUtil.SUBSCRIPTION_TYPE.Shared.label, - PulsarActivityUtil.SUBSCRIPTION_TYPE.Key_Shared.label)) { - // abnormal case: message loss - if ((curMsgSeqId - prevMsgSeqId) > 1) { - throw new PulsarMsgLossException( - false, curCycleNum, curMsgSeqId, prevMsgSeqId); - } else if (topicMsgDedup && (curMsgSeqId == prevMsgSeqId)) { - throw new PulsarMsgDuplicateException( - false, curCycleNum, curMsgSeqId, prevMsgSeqId); - } - } - } - } - } - } + // keep track of message errors and update error counters + if (seqTracking) checkAndUpdateMessageErrorCounter(message); int messageSize = message.getData().length; bytesCounter.inc(messageSize); messageSizeHistogram.update(messageSize); - if (useTransaction) { + if (!useTransaction) { + consumer.acknowledge(message.getMessageId()); + } + else { consumer.acknowledgeAsync(message.getMessageId(), transaction).get(); // little problem: here we are counting the "commit" time @@ -194,14 +177,12 @@ public class PulsarConsumerOp implements PulsarOp { transaction.commit().get(); } } - else { - consumer.acknowledge(message.getMessageId()); - } } catch (Exception e) { logger.error( "Sync message receiving failed - timeout value: {} seconds ", timeoutSeconds); + e.printStackTrace(); throw new PulsarDriverUnexpectedException("" + "Sync message receiving failed - timeout value: " + timeoutSeconds + " seconds "); } @@ -236,13 +217,15 @@ public class PulsarConsumerOp implements PulsarOp { org.apache.avro.generic.GenericRecord avroGenericRecord = AvroUtil.GetGenericRecord_ApacheAvro(avroSchema, message.getData()); - logger.debug("Async message received: msg-key={}; msg-properties={}; msg-payload={})", + logger.debug("({}) Async message received: msg-key={}; msg-properties={}; msg-payload={})", + consumer.getConsumerName(), message.getKey(), message.getProperties(), avroGenericRecord.toString()); } else { - logger.debug("Async message received: msg-key={}; msg-properties={}; msg-payload={})", + logger.debug("({}) Async message received: msg-key={}; msg-properties={}; msg-payload={})", + consumer.getConsumerName(), message.getKey(), message.getProperties(), new String(message.getData())); @@ -254,47 +237,14 @@ public class PulsarConsumerOp implements PulsarOp { e2eMsgProcLatencyHistogram.update(e2eMsgLatency); } - // keep track of message ordering, message loss, and message duplication - String msgSeqIdStr = message.getProperties().get(PulsarActivityUtil.MSG_SEQUENCE_ID); - if ( (seqTracking) && !StringUtils.isBlank(msgSeqIdStr) ) { - curMsgSeqId = Long.parseLong(msgSeqIdStr); + // keep track of message errors and update error counters + if (seqTracking) checkAndUpdateMessageErrorCounter(message); - if (prevMsgSeqId > -1) { - // normal case: message sequence id is monotonically increasing by 1 - if ((curMsgSeqId - prevMsgSeqId) != 1) { - // abnormal case: out of ordering - // - for any subscription type, this check should always hold - if (curMsgSeqId < prevMsgSeqId) { - throw new PulsarMsgOutOfOrderException( - false, curCycleNum, curMsgSeqId, prevMsgSeqId); - } - // - this sequence based message loss and message duplicate check can't be used for - // "Shared" subscription (ignore this check) - // - TODO: for Key_Shared subscription type, this logic needs to be improved on - // per-key basis - else { - if ( !StringUtils.equalsAnyIgnoreCase(subscriptionType, - PulsarActivityUtil.SUBSCRIPTION_TYPE.Shared.label, - PulsarActivityUtil.SUBSCRIPTION_TYPE.Key_Shared.label)) { - // abnormal case: message loss - if ((curMsgSeqId - prevMsgSeqId) > 1) { - throw new PulsarMsgLossException( - false, curCycleNum, curMsgSeqId, prevMsgSeqId); - } else if (topicMsgDedup && (curMsgSeqId == prevMsgSeqId)) { - throw new PulsarMsgDuplicateException( - false, curCycleNum, curMsgSeqId, prevMsgSeqId); - } - } - } - } - } - } - - if (useTransaction) { - consumer.acknowledgeAsync(message.getMessageId(), transaction); + if (!useTransaction) { + consumer.acknowledgeAsync(message); } else { - consumer.acknowledgeAsync(message); + consumer.acknowledgeAsync(message.getMessageId(), transaction); } timeTracker.run(); @@ -304,8 +254,9 @@ public class PulsarConsumerOp implements PulsarOp { }); } catch (Exception e) { - throw new PulsarDriverUnexpectedException("Async message receiving failed"); + throw new PulsarDriverUnexpectedException(e); } } } + } diff --git a/driver-pulsar/src/main/java/io/nosqlbench/driver/pulsar/ops/PulsarProducerMapper.java b/driver-pulsar/src/main/java/io/nosqlbench/driver/pulsar/ops/PulsarProducerMapper.java index 9bdf63881..db3abd062 100644 --- a/driver-pulsar/src/main/java/io/nosqlbench/driver/pulsar/ops/PulsarProducerMapper.java +++ b/driver-pulsar/src/main/java/io/nosqlbench/driver/pulsar/ops/PulsarProducerMapper.java @@ -4,18 +4,17 @@ import io.nosqlbench.driver.pulsar.PulsarActivity; import io.nosqlbench.driver.pulsar.PulsarSpace; import io.nosqlbench.driver.pulsar.util.PulsarActivityUtil; import io.nosqlbench.engine.api.templating.CommandTemplate; -import org.apache.commons.lang3.RandomUtils; +import java.util.HashMap; +import java.util.Map; +import java.util.Set; +import java.util.function.LongFunction; +import java.util.function.Supplier; import org.apache.commons.lang3.StringUtils; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.apache.pulsar.client.api.Producer; import org.apache.pulsar.client.api.transaction.Transaction; -import java.util.HashMap; -import java.util.Map; -import java.util.function.LongFunction; -import java.util.function.Supplier; - /** * This maps a set of specifier functions to a pulsar operation. The pulsar operation contains * enough state to define a pulsar operation such that it can be executed, measured, and possibly @@ -31,7 +30,7 @@ public class PulsarProducerMapper extends PulsarTransactOpMapper { private final static Logger logger = LogManager.getLogger(PulsarProducerMapper.class); private final LongFunction> producerFunc; - private final LongFunction seqErrSimuTypeFunc; + private final Set seqErrSimuTypes; private final LongFunction keyFunc; private final LongFunction propFunc; private final LongFunction payloadFunc; @@ -44,14 +43,14 @@ public class PulsarProducerMapper extends PulsarTransactOpMapper { LongFunction seqTrackingFunc, LongFunction> transactionSupplierFunc, LongFunction> producerFunc, - LongFunction seqErrSimuTypeFunc, + Set seqErrSimuTypes, LongFunction keyFunc, LongFunction propFunc, LongFunction payloadFunc) { super(cmdTpl, clientSpace, pulsarActivity, asyncApiFunc, useTransactionFunc, seqTrackingFunc, transactionSupplierFunc); this.producerFunc = producerFunc; - this.seqErrSimuTypeFunc = seqErrSimuTypeFunc; + this.seqErrSimuTypes = seqErrSimuTypes; this.keyFunc = keyFunc; this.propFunc = propFunc; this.payloadFunc = payloadFunc; @@ -61,25 +60,10 @@ public class PulsarProducerMapper extends PulsarTransactOpMapper { public PulsarOp apply(long value) { boolean asyncApi = asyncApiFunc.apply(value); boolean useTransaction = useTransactionFunc.apply(value); - boolean seqTracking = seqTrackingFunc.apply(value); Supplier transactionSupplier = transactionSupplierFunc.apply(value); Producer producer = producerFunc.apply(value); - // Simulate error 10% of the time - float rndVal = RandomUtils.nextFloat(0, 1.0f); - boolean simulationError = (rndVal >= 0) && (rndVal < 0.1f); - String seqErrSimuType = seqErrSimuTypeFunc.apply(value); - boolean simulateMsgOutofOrder = simulationError && - !StringUtils.isBlank(seqErrSimuType) && - StringUtils.equalsIgnoreCase(seqErrSimuType, PulsarActivityUtil.SEQ_ERROR_SIMU_TYPE.OutOfOrder.label); - boolean simulateMsgLoss = simulationError && - !StringUtils.isBlank(seqErrSimuType) && - StringUtils.equalsIgnoreCase(seqErrSimuType, PulsarActivityUtil.SEQ_ERROR_SIMU_TYPE.MsgLoss.label); - boolean simulateMsgDup = simulationError && - !StringUtils.isBlank(seqErrSimuType) && - StringUtils.equalsIgnoreCase(seqErrSimuType, PulsarActivityUtil.SEQ_ERROR_SIMU_TYPE.MsgDup.label); - String msgKey = keyFunc.apply(value); String msgPayload = payloadFunc.apply(value); @@ -99,24 +83,11 @@ public class PulsarProducerMapper extends PulsarTransactOpMapper { } } - // Set message sequence tracking property - if (seqTracking) { - // normal case - if (!simulateMsgOutofOrder && !simulateMsgDup) { - msgProperties.put(PulsarActivityUtil.MSG_SEQUENCE_ID, String.valueOf(value)); - } - // simulate message out of order - else if ( simulateMsgOutofOrder ) { - int rndmOffset = 2; - msgProperties.put(PulsarActivityUtil.MSG_SEQUENCE_ID, - String.valueOf((value > rndmOffset) ? (value-rndmOffset) : value)); - } - // simulate message duplication - else { - msgProperties.put(PulsarActivityUtil.MSG_SEQUENCE_ID, String.valueOf(value-1)); - } - // message loss simulation is not done by message property - // we simply skip sending message in the current NB cycle + boolean sequenceTrackingEnabled = seqTrackingFunc.apply(value); + if (sequenceTrackingEnabled) { + long nextSequenceNumber = getMessageSequenceNumberSendingHandler(producer.getTopic()) + .getNextSequenceNumber(seqErrSimuTypes); + msgProperties.put(PulsarActivityUtil.MSG_SEQUENCE_NUMBER, String.valueOf(nextSequenceNumber)); } return new PulsarProducerOp( @@ -128,7 +99,15 @@ public class PulsarProducerMapper extends PulsarTransactOpMapper { clientSpace.getPulsarSchema(), msgKey, msgProperties, - msgPayload, - simulateMsgLoss); + msgPayload); } + + private MessageSequenceNumberSendingHandler getMessageSequenceNumberSendingHandler(String topicName) { + return MessageSequenceNumberSendingHandlersThreadLocal.get() + .computeIfAbsent(topicName, k -> new MessageSequenceNumberSendingHandler()); + } + + private final ThreadLocal> MessageSequenceNumberSendingHandlersThreadLocal = + ThreadLocal.withInitial(HashMap::new); + } diff --git a/driver-pulsar/src/main/java/io/nosqlbench/driver/pulsar/ops/PulsarProducerOp.java b/driver-pulsar/src/main/java/io/nosqlbench/driver/pulsar/ops/PulsarProducerOp.java index 51c1336b2..9875d2586 100644 --- a/driver-pulsar/src/main/java/io/nosqlbench/driver/pulsar/ops/PulsarProducerOp.java +++ b/driver-pulsar/src/main/java/io/nosqlbench/driver/pulsar/ops/PulsarProducerOp.java @@ -38,7 +38,6 @@ public class PulsarProducerOp implements PulsarOp { private final String msgKey; private final Map msgProperties; private final String msgPayload; - private final boolean simulateMsgLoss; private final Counter bytesCounter; private final Histogram messageSizeHistogram; @@ -52,8 +51,7 @@ public class PulsarProducerOp implements PulsarOp { Schema schema, String key, Map msgProperties, - String payload, - boolean simulateMsgLoss) { + String payload) { this.pulsarActivity = pulsarActivity; this.asyncPulsarOp = asyncPulsarOp; @@ -65,7 +63,6 @@ public class PulsarProducerOp implements PulsarOp { this.msgKey = key; this.msgProperties = msgProperties; this.msgPayload = payload; - this.simulateMsgLoss = simulateMsgLoss; this.bytesCounter = pulsarActivity.getBytesCounter(); this.messageSizeHistogram = pulsarActivity.getMessageSizeHistogram(); @@ -74,11 +71,6 @@ public class PulsarProducerOp implements PulsarOp { @Override public void run(Runnable timeTracker) { - // Skip this cycle (without sending messages) if we're doing message loss simulation - if (simulateMsgLoss) { - return; - } - if ( StringUtils.isBlank(msgPayload)) { throw new PulsarDriverParamException("Message payload (\"msg-value\") can't be empty!"); } @@ -146,13 +138,15 @@ public class PulsarProducerOp implements PulsarOp { org.apache.avro.generic.GenericRecord avroGenericRecord = AvroUtil.GetGenericRecord_ApacheAvro(avroSchema, msgPayload); - logger.debug("Sync message sent: msg-key={}; msg-properties={}; msg-payload={})", + logger.debug("({}) Sync message sent: msg-key={}; msg-properties={}; msg-payload={})", + producer.getProducerName(), msgKey, msgProperties, avroGenericRecord.toString()); } else { - logger.debug("Sync message sent: msg-key={}; msg-properties={}; msg-payload={}", + logger.debug("({}) Sync message sent; msg-key={}; msg-properties={}; msg-payload={}", + producer.getProducerName(), msgKey, msgProperties, msgPayload); @@ -199,13 +193,15 @@ public class PulsarProducerOp implements PulsarOp { org.apache.avro.generic.GenericRecord avroGenericRecord = AvroUtil.GetGenericRecord_ApacheAvro(avroSchema, msgPayload); - logger.debug("Aysnc message sent: msg-key={}; msg-properties={}; msg-payload={})", + logger.debug("({}) Aysnc message sent: msg-key={}; msg-properties={}; msg-payload={})", + producer.getProducerName(), msgKey, msgProperties, avroGenericRecord.toString()); } else { - logger.debug("Aysnc message sent: msg-key={}; msg-properties={}; msg-payload={}", + logger.debug("({}) Aysnc message sent: msg-key={}; msg-properties={}; msg-payload={}", + producer.getProducerName(), msgKey, msgProperties, msgPayload); diff --git a/driver-pulsar/src/main/java/io/nosqlbench/driver/pulsar/ops/ReadyPulsarOp.java b/driver-pulsar/src/main/java/io/nosqlbench/driver/pulsar/ops/ReadyPulsarOp.java index 57fd56c07..69c29c470 100644 --- a/driver-pulsar/src/main/java/io/nosqlbench/driver/pulsar/ops/ReadyPulsarOp.java +++ b/driver-pulsar/src/main/java/io/nosqlbench/driver/pulsar/ops/ReadyPulsarOp.java @@ -1,6 +1,8 @@ package io.nosqlbench.driver.pulsar.ops; -import io.nosqlbench.driver.pulsar.*; +import io.nosqlbench.driver.pulsar.PulsarActivity; +import io.nosqlbench.driver.pulsar.PulsarSpace; +import io.nosqlbench.driver.pulsar.PulsarSpaceCache; import io.nosqlbench.driver.pulsar.exception.PulsarDriverParamException; import io.nosqlbench.driver.pulsar.exception.PulsarDriverUnsupportedOpException; import io.nosqlbench.driver.pulsar.util.PulsarActivityUtil; @@ -11,18 +13,15 @@ import org.apache.commons.lang3.BooleanUtils; import org.apache.commons.lang3.StringUtils; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; -import org.apache.pulsar.client.admin.PulsarAdmin; -import org.apache.pulsar.client.admin.PulsarAdminException; -import org.apache.pulsar.client.api.Producer; import org.apache.pulsar.client.api.Consumer; +import org.apache.pulsar.client.api.Producer; import org.apache.pulsar.client.api.Reader; import org.apache.pulsar.client.api.transaction.Transaction; -import java.util.Arrays; -import java.util.HashSet; -import java.util.Set; +import java.util.*; import java.util.function.LongFunction; import java.util.function.Supplier; +import java.util.stream.Collectors; public class ReadyPulsarOp implements OpDispenser { @@ -354,10 +353,10 @@ public class ReadyPulsarOp implements OpDispenser { // check if we're going to simulate producer message out-of-sequence error // - message ordering // - message loss - LongFunction seqErrSimuTypeFunc = (l) -> null; + Set seqErrSimuTypes = Collections.emptySet(); if (cmdTpl.containsKey("seqerr_simu")) { if (cmdTpl.isStatic("seqerr_simu")) { - seqErrSimuTypeFunc = (l) -> cmdTpl.getStatic("seqerr_simu"); + seqErrSimuTypes = parseSimulatedErrorTypes(cmdTpl.getStatic("seqerr_simu")); } else { throw new PulsarDriverParamException("[resolveMsgSend()] \"seqerr_simu\" parameter cannot be dynamic!"); } @@ -405,12 +404,23 @@ public class ReadyPulsarOp implements OpDispenser { seqTrackingFunc, transactionSupplierFunc, producerFunc, - seqErrSimuTypeFunc, + seqErrSimuTypes, keyFunc, propFunc, valueFunc); } + private Set parseSimulatedErrorTypes(String sequenceErrorSimulatedTypeString) { + if (StringUtils.isBlank(sequenceErrorSimulatedTypeString)) { + return Collections.emptySet(); + } + return Arrays.stream(StringUtils.split(sequenceErrorSimulatedTypeString, ',')) + .map(PulsarActivityUtil.SEQ_ERROR_SIMU_TYPE::parseSimuType) + .filter(Optional::isPresent) + .map(Optional::get) + .collect(Collectors.toSet()); + } + private LongFunction resolveMsgConsume( PulsarSpace clientSpace, LongFunction topic_uri_func, @@ -450,34 +460,11 @@ public class ReadyPulsarOp implements OpDispenser { LongFunction> transactionSupplierFunc = (l) -> clientSpace.getTransactionSupplier(); //TODO make it dependant on current cycle? - LongFunction topicMsgDedupFunc = (l) -> { - String topic = topic_uri_func.apply(l); - String namespace = PulsarActivityUtil.getFullNamespaceName(topic); - PulsarAdmin pulsarAdmin = pulsarActivity.getPulsarAdmin(); - - // Check namespace-level deduplication setting - // - default to broker level deduplication setting - boolean nsMsgDedup = brokerMsgDupFunc.apply(l); - try { - nsMsgDedup = pulsarAdmin.namespaces().getDeduplicationStatus(namespace); - } - catch (PulsarAdminException pae) { - // it is fine if we're unable to check namespace level setting; use default - } - - // Check topic-level deduplication setting - // - default to namespace level deduplication setting - boolean topicMsgDedup = nsMsgDedup; - try { - topicMsgDedup = pulsarAdmin.topics().getDeduplicationStatus(topic); - } - catch (PulsarAdminException pae) { - // it is fine if we're unable to check topic level setting; use default - } - - return topicMsgDedup; - }; - + // TODO: Ignore namespace and topic level dedup check on the fly + // this will impact the consumer performance significantly + // Consider using caching or Memoizer in the future? + // (https://www.baeldung.com/guava-memoizer) + LongFunction topicMsgDedupFunc = brokerMsgDupFunc; LongFunction> consumerFunc = (l) -> clientSpace.getConsumer( diff --git a/driver-pulsar/src/main/java/io/nosqlbench/driver/pulsar/ops/ReceivedMessageSequenceTracker.java b/driver-pulsar/src/main/java/io/nosqlbench/driver/pulsar/ops/ReceivedMessageSequenceTracker.java new file mode 100644 index 000000000..fe3a27d6d --- /dev/null +++ b/driver-pulsar/src/main/java/io/nosqlbench/driver/pulsar/ops/ReceivedMessageSequenceTracker.java @@ -0,0 +1,150 @@ +package io.nosqlbench.driver.pulsar.ops; + +import com.codahale.metrics.Counter; +import java.util.Iterator; +import java.util.SortedSet; +import java.util.TreeSet; + +/** + * Detects message loss, message duplication and out-of-order message delivery + * based on a monotonic sequence number that each received message contains. + *

+ * Out-of-order messages are detected with a maximum look behind of 1000 sequence number entries. + * This is currently defined as a constant, {@link ReceivedMessageSequenceTracker#DEFAULT_MAX_TRACK_OUT_OF_ORDER_SEQUENCE_NUMBERS}. + */ +class ReceivedMessageSequenceTracker implements AutoCloseable { + private static final int DEFAULT_MAX_TRACK_OUT_OF_ORDER_SEQUENCE_NUMBERS = 1000; + private static final int DEFAULT_MAX_TRACK_SKIPPED_SEQUENCE_NUMBERS = 1000; + // message out-of-sequence error counter + private final Counter msgErrOutOfSeqCounter; + // duplicate message error counter + private final Counter msgErrDuplicateCounter; + // message loss error counter + private final Counter msgErrLossCounter; + private final SortedSet pendingOutOfSeqNumbers; + private final int maxTrackOutOfOrderSequenceNumbers; + private final SortedSet skippedSeqNumbers; + private final int maxTrackSkippedSequenceNumbers; + private long expectedNumber = -1; + + public ReceivedMessageSequenceTracker(Counter msgErrOutOfSeqCounter, Counter msgErrDuplicateCounter, Counter msgErrLossCounter) { + this(msgErrOutOfSeqCounter, msgErrDuplicateCounter, msgErrLossCounter, + DEFAULT_MAX_TRACK_OUT_OF_ORDER_SEQUENCE_NUMBERS, DEFAULT_MAX_TRACK_SKIPPED_SEQUENCE_NUMBERS); + } + + public ReceivedMessageSequenceTracker(Counter msgErrOutOfSeqCounter, Counter msgErrDuplicateCounter, Counter msgErrLossCounter, + int maxTrackOutOfOrderSequenceNumbers, int maxTrackSkippedSequenceNumbers) { + this.msgErrOutOfSeqCounter = msgErrOutOfSeqCounter; + this.msgErrDuplicateCounter = msgErrDuplicateCounter; + this.msgErrLossCounter = msgErrLossCounter; + this.maxTrackOutOfOrderSequenceNumbers = maxTrackOutOfOrderSequenceNumbers; + this.maxTrackSkippedSequenceNumbers = maxTrackSkippedSequenceNumbers; + this.pendingOutOfSeqNumbers = new TreeSet<>(); + this.skippedSeqNumbers = new TreeSet<>(); + } + + /** + * Notifies the tracker about a received sequence number + * + * @param sequenceNumber the sequence number of the received message + */ + public void sequenceNumberReceived(long sequenceNumber) { + if (expectedNumber == -1) { + expectedNumber = sequenceNumber + 1; + return; + } + + if (sequenceNumber < expectedNumber) { + if (skippedSeqNumbers.remove(sequenceNumber)) { + // late out-of-order delivery was detected + // decrease the loss counter + msgErrLossCounter.dec(); + // increment the out-of-order counter + msgErrOutOfSeqCounter.inc(); + } else { + msgErrDuplicateCounter.inc(); + } + return; + } + + boolean messagesSkipped = false; + if (sequenceNumber > expectedNumber) { + if (pendingOutOfSeqNumbers.size() == maxTrackOutOfOrderSequenceNumbers) { + messagesSkipped = processLowestPendingOutOfSequenceNumber(); + } + if (!pendingOutOfSeqNumbers.add(sequenceNumber)) { + msgErrDuplicateCounter.inc(); + } + } else { + // sequenceNumber == expectedNumber + expectedNumber++; + } + processPendingOutOfSequenceNumbers(messagesSkipped); + cleanUpTooFarBehindOutOfSequenceNumbers(); + } + + private boolean processLowestPendingOutOfSequenceNumber() { + // remove the lowest pending out of sequence number + Long lowestOutOfSeqNumber = pendingOutOfSeqNumbers.first(); + pendingOutOfSeqNumbers.remove(lowestOutOfSeqNumber); + if (lowestOutOfSeqNumber > expectedNumber) { + // skip the expected number ahead to the number after the lowest sequence number + // increment the counter with the amount of sequence numbers that got skipped + // keep track of the skipped sequence numbers to detect late out-of-order message delivery + for (long l = expectedNumber; l < lowestOutOfSeqNumber; l++) { + msgErrLossCounter.inc(); + skippedSeqNumbers.add(l); + if (skippedSeqNumbers.size() > maxTrackSkippedSequenceNumbers) { + skippedSeqNumbers.remove(skippedSeqNumbers.first()); + } + } + expectedNumber = lowestOutOfSeqNumber + 1; + return true; + } else { + msgErrLossCounter.inc(); + } + return false; + } + + private void processPendingOutOfSequenceNumbers(boolean messagesSkipped) { + // check if there are previously received out-of-order sequence number that have been received + while (pendingOutOfSeqNumbers.remove(expectedNumber)) { + expectedNumber++; + if (!messagesSkipped) { + msgErrOutOfSeqCounter.inc(); + } + } + } + + private void cleanUpTooFarBehindOutOfSequenceNumbers() { + // remove sequence numbers that are too far behind + for (Iterator iterator = pendingOutOfSeqNumbers.iterator(); iterator.hasNext(); ) { + Long number = iterator.next(); + if (number < expectedNumber - maxTrackOutOfOrderSequenceNumbers) { + msgErrLossCounter.inc(); + iterator.remove(); + } else { + break; + } + } + } + + /** + * Handles the possible pending out of sequence numbers. Mainly needed in unit tests to assert the + * counter values. + */ + @Override + public void close() { + while (!pendingOutOfSeqNumbers.isEmpty()) { + processPendingOutOfSequenceNumbers(processLowestPendingOutOfSequenceNumber()); + } + } + + public int getMaxTrackOutOfOrderSequenceNumbers() { + return maxTrackOutOfOrderSequenceNumbers; + } + + public int getMaxTrackSkippedSequenceNumbers() { + return maxTrackSkippedSequenceNumbers; + } +} diff --git a/driver-pulsar/src/main/java/io/nosqlbench/driver/pulsar/util/PulsarActivityUtil.java b/driver-pulsar/src/main/java/io/nosqlbench/driver/pulsar/util/PulsarActivityUtil.java index 56b0e5f64..d47a856f6 100644 --- a/driver-pulsar/src/main/java/io/nosqlbench/driver/pulsar/util/PulsarActivityUtil.java +++ b/driver-pulsar/src/main/java/io/nosqlbench/driver/pulsar/util/PulsarActivityUtil.java @@ -1,6 +1,7 @@ package io.nosqlbench.driver.pulsar.util; import com.fasterxml.jackson.databind.ObjectMapper; +import java.util.*; import org.apache.commons.lang3.StringUtils; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; @@ -12,9 +13,6 @@ import java.nio.charset.StandardCharsets; import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.Paths; -import java.util.Arrays; -import java.util.Base64; -import java.util.Map; import java.util.stream.Collectors; import java.util.stream.Stream; @@ -48,8 +46,7 @@ public class PulsarActivityUtil { return Arrays.stream(OP_TYPES.values()).anyMatch(t -> t.label.equals(type)); } - public static final String MSG_SEQUENCE_ID = "sequence_id"; - public static final String MSG_SEQUENCE_TGTMAX = "sequence_tgtmax"; + public static final String MSG_SEQUENCE_NUMBER = "sequence_number"; /////// // Valid document level parameters for Pulsar NB yaml file @@ -314,6 +311,23 @@ public class PulsarActivityUtil { SEQ_ERROR_SIMU_TYPE(String label) { this.label = label; } + + private static final Map MAPPING = new HashMap<>(); + + static { + for (SEQ_ERROR_SIMU_TYPE simuType : values()) { + MAPPING.put(simuType.label, simuType); + MAPPING.put(simuType.label.toLowerCase(), simuType); + MAPPING.put(simuType.label.toUpperCase(), simuType); + MAPPING.put(simuType.name(), simuType); + MAPPING.put(simuType.name().toLowerCase(), simuType); + MAPPING.put(simuType.name().toUpperCase(), simuType); + } + } + + public static Optional parseSimuType(String simuTypeString) { + return Optional.ofNullable(MAPPING.get(simuTypeString.trim())); + } } public static boolean isValidSeqErrSimuType(String item) { return Arrays.stream(SEQ_ERROR_SIMU_TYPE.values()).anyMatch(t -> t.label.equals(item)); diff --git a/driver-pulsar/src/main/resources/activities/pulsar_client_sanity_seqloss.yaml b/driver-pulsar/src/main/resources/activities/pulsar_client_sanity_seqloss.yaml index 49a139f05..024bccd0b 100644 --- a/driver-pulsar/src/main/resources/activities/pulsar_client_sanity_seqloss.yaml +++ b/driver-pulsar/src/main/resources/activities/pulsar_client_sanity_seqloss.yaml @@ -5,7 +5,7 @@ bindings: # document level parameters that apply to all Pulsar client types: params: - topic_uri: "persistent://public/default/sanity_seqloss2" + topic_uri: "persistent://tnt0/ns0/sanity_seqloss12" # Only applicable to producer and consumer # - used for message ordering and message loss check async_api: "true" @@ -23,6 +23,7 @@ blocks: #seqerr_simu: "out_of_order" #seqerr_simu: "msg_loss" #seqerr_simu: "msg_dup" + #seqerr_simu: "out_of_order, msg_loss" msg_key: msg_property: msg_value: "{myvalue}" @@ -35,5 +36,5 @@ blocks: - name: s1 optype: msg-consume subscription_name: "mysub" - subscription_type: + subscription_type: "Shared" consumer_name: diff --git a/driver-pulsar/src/test/java/io/nosqlbench/driver/pulsar/ops/MessageSequenceNumberSendingHandlerTest.java b/driver-pulsar/src/test/java/io/nosqlbench/driver/pulsar/ops/MessageSequenceNumberSendingHandlerTest.java new file mode 100644 index 000000000..ded7971d8 --- /dev/null +++ b/driver-pulsar/src/test/java/io/nosqlbench/driver/pulsar/ops/MessageSequenceNumberSendingHandlerTest.java @@ -0,0 +1,74 @@ +package io.nosqlbench.driver.pulsar.ops; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; + +import io.nosqlbench.driver.pulsar.util.PulsarActivityUtil; +import java.util.Arrays; +import java.util.Collections; +import java.util.HashSet; +import java.util.Set; +import org.junit.jupiter.api.Test; + +class MessageSequenceNumberSendingHandlerTest { + MessageSequenceNumberSendingHandler sequenceNumberSendingHandler = new MessageSequenceNumberSendingHandler(); + + @Test + void shouldAddMonotonicSequence() { + for (long l = 1; l <= 100; l++) { + assertEquals(l, sequenceNumberSendingHandler.getNextSequenceNumber(Collections.emptySet())); + } + } + + @Test + void shouldInjectMessageLoss() { + assertEquals(1L, sequenceNumberSendingHandler.getNextSequenceNumber(Collections.emptySet())); + assertEquals(3L, sequenceNumberSendingHandler.getNextSequenceNumber(Collections.singleton(PulsarActivityUtil.SEQ_ERROR_SIMU_TYPE.MsgLoss), 100)); + } + + @Test + void shouldInjectMessageDuplication() { + assertEquals(1L, sequenceNumberSendingHandler.getNextSequenceNumber(Collections.emptySet())); + assertEquals(1L, sequenceNumberSendingHandler.getNextSequenceNumber(Collections.singleton(PulsarActivityUtil.SEQ_ERROR_SIMU_TYPE.MsgDup), 100)); + } + + @Test + void shouldInjectMessageOutOfOrder() { + assertEquals(1L, sequenceNumberSendingHandler.getNextSequenceNumber(Collections.emptySet())); + assertEquals(4L, sequenceNumberSendingHandler.getNextSequenceNumber(Collections.singleton(PulsarActivityUtil.SEQ_ERROR_SIMU_TYPE.OutOfOrder), 100)); + assertEquals(2L, sequenceNumberSendingHandler.getNextSequenceNumber(Collections.emptySet())); + assertEquals(3L, sequenceNumberSendingHandler.getNextSequenceNumber(Collections.emptySet())); + assertEquals(5L, sequenceNumberSendingHandler.getNextSequenceNumber(Collections.emptySet())); + assertEquals(6, sequenceNumberSendingHandler.getNextSequenceNumber(Collections.emptySet())); + } + + @Test + void shouldInjectOneOfTheSimulatedErrorsRandomly() { + Set allErrorTypes = new HashSet<>(Arrays.asList(PulsarActivityUtil.SEQ_ERROR_SIMU_TYPE.values())); + + assertEquals(1L, sequenceNumberSendingHandler.getNextSequenceNumber(Collections.emptySet())); + long previousSequenceNumber = 1L; + int outOfSequenceInjectionCounter = 0; + int messageDupCounter = 0; + int messageLossCounter = 0; + int successCounter = 0; + for (int i = 0; i < 1000; i++) { + long nextSequenceNumber = sequenceNumberSendingHandler.getNextSequenceNumber(allErrorTypes); + if (nextSequenceNumber >= previousSequenceNumber + 3) { + outOfSequenceInjectionCounter++; + } else if (nextSequenceNumber <= previousSequenceNumber) { + messageDupCounter++; + } else if (nextSequenceNumber >= previousSequenceNumber + 2) { + messageLossCounter++; + } else if (nextSequenceNumber == previousSequenceNumber + 1) { + successCounter++; + } + previousSequenceNumber = nextSequenceNumber; + } + assertTrue(outOfSequenceInjectionCounter > 0); + assertTrue(messageDupCounter > 0); + assertTrue(messageLossCounter > 0); + assertEquals(1000, outOfSequenceInjectionCounter + messageDupCounter + messageLossCounter + successCounter); + } + +} diff --git a/driver-pulsar/src/test/java/io/nosqlbench/driver/pulsar/ops/ReceivedMessageSequenceTrackerTest.java b/driver-pulsar/src/test/java/io/nosqlbench/driver/pulsar/ops/ReceivedMessageSequenceTrackerTest.java new file mode 100644 index 000000000..d2391d67a --- /dev/null +++ b/driver-pulsar/src/test/java/io/nosqlbench/driver/pulsar/ops/ReceivedMessageSequenceTrackerTest.java @@ -0,0 +1,230 @@ +package io.nosqlbench.driver.pulsar.ops; + +import static org.junit.jupiter.api.Assertions.assertEquals; + +import com.codahale.metrics.Counter; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.ValueSource; + +class ReceivedMessageSequenceTrackerTest { + Counter msgErrOutOfSeqCounter = new Counter(); + Counter msgErrDuplicateCounter = new Counter(); + Counter msgErrLossCounter = new Counter(); + ReceivedMessageSequenceTracker messageSequenceTracker = new ReceivedMessageSequenceTracker(msgErrOutOfSeqCounter, msgErrDuplicateCounter, msgErrLossCounter, 20, 20); + + @Test + void shouldCountersBeZeroWhenSequenceDoesntContainGaps() { + // when + for (long l = 0; l < 100L; l++) { + messageSequenceTracker.sequenceNumberReceived(l); + } + messageSequenceTracker.close(); + // then + assertEquals(0, msgErrOutOfSeqCounter.getCount()); + assertEquals(0, msgErrDuplicateCounter.getCount()); + assertEquals(0, msgErrLossCounter.getCount()); + } + + @ParameterizedTest + @ValueSource(longs = {10L, 11L, 19L, 20L, 21L, 100L}) + void shouldDetectMsgLossWhenEverySecondMessageIsLost(long totalMessages) { + doShouldDetectMsgLoss(totalMessages, 2); + } + + @ParameterizedTest + @ValueSource(longs = {10L, 11L, 19L, 20L, 21L, 100L}) + void shouldDetectMsgLossWhenEveryThirdMessageIsLost(long totalMessages) { + doShouldDetectMsgLoss(totalMessages, 3); + } + + @ParameterizedTest + @ValueSource(longs = {20L, 21L, 40L, 41L, 42L, 43L, 100L}) + void shouldDetectMsgLossWhenEvery21stMessageIsLost(long totalMessages) { + doShouldDetectMsgLoss(totalMessages, 21); + } + + private void doShouldDetectMsgLoss(long totalMessages, int looseEveryNthMessage) { + int messagesLost = 0; + // when + boolean lastMessageWasLost = false; + for (long l = 0; l < totalMessages; l++) { + if (l % looseEveryNthMessage == 1) { + messagesLost++; + lastMessageWasLost = true; + continue; + } else { + lastMessageWasLost = false; + } + messageSequenceTracker.sequenceNumberReceived(l); + } + if (lastMessageWasLost) { + messageSequenceTracker.sequenceNumberReceived(totalMessages); + } + messageSequenceTracker.close(); + // then + assertEquals(0, msgErrOutOfSeqCounter.getCount()); + assertEquals(0, msgErrDuplicateCounter.getCount()); + assertEquals(messagesLost, msgErrLossCounter.getCount()); + } + + @ParameterizedTest + @ValueSource(longs = {10L, 11L, 19L, 20L, 21L, 100L}) + void shouldDetectMsgDuplication(long totalMessages) { + int messagesDuplicated = 0; + // when + for (long l = 0; l < totalMessages; l++) { + if (l % 2 == 1) { + messagesDuplicated++; + messageSequenceTracker.sequenceNumberReceived(l); + } + messageSequenceTracker.sequenceNumberReceived(l); + } + if (totalMessages % 2 == 0) { + messageSequenceTracker.sequenceNumberReceived(totalMessages); + } + if (totalMessages < 2 * messageSequenceTracker.getMaxTrackOutOfOrderSequenceNumbers()) { + messageSequenceTracker.close(); + } + + // then + assertEquals(0, msgErrOutOfSeqCounter.getCount()); + assertEquals(messagesDuplicated, msgErrDuplicateCounter.getCount()); + assertEquals(0, msgErrLossCounter.getCount()); + } + + @Test + void shouldDetectSingleMessageOutOfSequence() { + // when + for (long l = 0; l < 10L; l++) { + messageSequenceTracker.sequenceNumberReceived(l); + } + messageSequenceTracker.sequenceNumberReceived(10L); + messageSequenceTracker.sequenceNumberReceived(12L); + messageSequenceTracker.sequenceNumberReceived(11L); + for (long l = 13L; l < 100L; l++) { + messageSequenceTracker.sequenceNumberReceived(l); + } + + // then + assertEquals(1, msgErrOutOfSeqCounter.getCount()); + assertEquals(0, msgErrDuplicateCounter.getCount()); + assertEquals(0, msgErrLossCounter.getCount()); + } + + @Test + void shouldDetectMultipleMessagesOutOfSequence() { + // when + for (long l = 0; l < 10L; l++) { + messageSequenceTracker.sequenceNumberReceived(l); + } + messageSequenceTracker.sequenceNumberReceived(10L); + messageSequenceTracker.sequenceNumberReceived(14L); + messageSequenceTracker.sequenceNumberReceived(13L); + messageSequenceTracker.sequenceNumberReceived(11L); + messageSequenceTracker.sequenceNumberReceived(12L); + for (long l = 15L; l < 100L; l++) { + messageSequenceTracker.sequenceNumberReceived(l); + } + + // then + assertEquals(2, msgErrOutOfSeqCounter.getCount()); + assertEquals(0, msgErrDuplicateCounter.getCount()); + assertEquals(0, msgErrLossCounter.getCount()); + } + + @Test + void shouldDetectIndividualMessageLoss() { + // when + for (long l = 0; l < 100L; l++) { + if (l != 11L) { + messageSequenceTracker.sequenceNumberReceived(l); + } + } + messageSequenceTracker.close(); + + // then + assertEquals(0, msgErrOutOfSeqCounter.getCount()); + assertEquals(0, msgErrDuplicateCounter.getCount()); + assertEquals(1, msgErrLossCounter.getCount()); + } + + @Test + void shouldDetectGapAndMessageDuplication() { + // when + for (long l = 0; l < 100L; l++) { + if (l != 11L) { + messageSequenceTracker.sequenceNumberReceived(l); + } + if (l == 12L) { + messageSequenceTracker.sequenceNumberReceived(l); + } + } + messageSequenceTracker.close(); + + // then + assertEquals(0, msgErrOutOfSeqCounter.getCount()); + assertEquals(1, msgErrDuplicateCounter.getCount()); + assertEquals(1, msgErrLossCounter.getCount()); + } + + @Test + void shouldDetectGapAndMessageDuplicationTimes2() { + // when + for (long l = 0; l < 100L; l++) { + if (l != 11L) { + messageSequenceTracker.sequenceNumberReceived(l); + } + if (l == 12L) { + messageSequenceTracker.sequenceNumberReceived(l); + messageSequenceTracker.sequenceNumberReceived(l); + } + } + messageSequenceTracker.close(); + + // then + assertEquals(0, msgErrOutOfSeqCounter.getCount()); + assertEquals(2, msgErrDuplicateCounter.getCount()); + assertEquals(1, msgErrLossCounter.getCount()); + } + + + @Test + void shouldDetectDelayedOutOfOrderDelivery() { + // when + for (long l = 0; l < 5 * messageSequenceTracker.getMaxTrackOutOfOrderSequenceNumbers(); l++) { + if (l != 10) { + messageSequenceTracker.sequenceNumberReceived(l); + } + if (l == messageSequenceTracker.getMaxTrackOutOfOrderSequenceNumbers() * 2) { + messageSequenceTracker.sequenceNumberReceived(10); + } + } + messageSequenceTracker.close(); + + // then + assertEquals(1, msgErrOutOfSeqCounter.getCount()); + assertEquals(0, msgErrDuplicateCounter.getCount()); + assertEquals(0, msgErrLossCounter.getCount()); + } + + @Test + void shouldDetectDelayedOutOfOrderDeliveryOf2ConsecutiveSequenceNumbers() { + // when + for (long l = 0; l < 5 * messageSequenceTracker.getMaxTrackOutOfOrderSequenceNumbers(); l++) { + if (l != 10 && l != 11) { + messageSequenceTracker.sequenceNumberReceived(l); + } + if (l == messageSequenceTracker.getMaxTrackOutOfOrderSequenceNumbers() * 2) { + messageSequenceTracker.sequenceNumberReceived(10); + messageSequenceTracker.sequenceNumberReceived(11); + } + } + messageSequenceTracker.close(); + + // then + assertEquals(2, msgErrOutOfSeqCounter.getCount()); + assertEquals(0, msgErrDuplicateCounter.getCount()); + assertEquals(0, msgErrLossCounter.getCount()); + } +} diff --git a/driver-stdout/pom.xml b/driver-stdout/pom.xml index 52c4feca7..2876bcc1c 100644 --- a/driver-stdout/pom.xml +++ b/driver-stdout/pom.xml @@ -7,7 +7,7 @@ mvn-defaults io.nosqlbench - 4.15.58-SNAPSHOT + 4.15.64-SNAPSHOT ../mvn-defaults @@ -22,14 +22,14 @@ io.nosqlbench engine-api - 4.15.58-SNAPSHOT + 4.15.64-SNAPSHOT compile io.nosqlbench drivers-api - 4.15.58-SNAPSHOT + 4.15.64-SNAPSHOT compile diff --git a/driver-tcp/pom.xml b/driver-tcp/pom.xml index c69534b67..94fe0e554 100644 --- a/driver-tcp/pom.xml +++ b/driver-tcp/pom.xml @@ -7,7 +7,7 @@ mvn-defaults io.nosqlbench - 4.15.58-SNAPSHOT + 4.15.64-SNAPSHOT ../mvn-defaults @@ -24,19 +24,19 @@ io.nosqlbench engine-api - 4.15.58-SNAPSHOT + 4.15.64-SNAPSHOT io.nosqlbench drivers-api - 4.15.58-SNAPSHOT + 4.15.64-SNAPSHOT io.nosqlbench driver-stdout - 4.15.58-SNAPSHOT + 4.15.64-SNAPSHOT diff --git a/driver-web/pom.xml b/driver-web/pom.xml index 659b7fdb3..086324a02 100644 --- a/driver-web/pom.xml +++ b/driver-web/pom.xml @@ -7,7 +7,7 @@ mvn-defaults io.nosqlbench - 4.15.58-SNAPSHOT + 4.15.64-SNAPSHOT ../mvn-defaults @@ -22,13 +22,13 @@ io.nosqlbench engine-api - 4.15.58-SNAPSHOT + 4.15.64-SNAPSHOT io.nosqlbench drivers-api - 4.15.58-SNAPSHOT + 4.15.64-SNAPSHOT diff --git a/drivers-api/pom.xml b/drivers-api/pom.xml index 6e38aed12..f17d08603 100644 --- a/drivers-api/pom.xml +++ b/drivers-api/pom.xml @@ -5,7 +5,7 @@ mvn-defaults io.nosqlbench - 4.15.58-SNAPSHOT + 4.15.64-SNAPSHOT ../mvn-defaults @@ -23,19 +23,19 @@ io.nosqlbench nb-api - 4.15.58-SNAPSHOT + 4.15.64-SNAPSHOT io.nosqlbench virtdata-userlibs - 4.15.58-SNAPSHOT + 4.15.64-SNAPSHOT io.nosqlbench adapters-api - 4.15.58-SNAPSHOT + 4.15.64-SNAPSHOT diff --git a/engine-api/pom.xml b/engine-api/pom.xml index 6dfa2b260..c344dc1c4 100644 --- a/engine-api/pom.xml +++ b/engine-api/pom.xml @@ -5,7 +5,7 @@ mvn-defaults io.nosqlbench - 4.15.58-SNAPSHOT + 4.15.64-SNAPSHOT ../mvn-defaults @@ -23,25 +23,25 @@ io.nosqlbench nb-api - 4.15.58-SNAPSHOT + 4.15.64-SNAPSHOT io.nosqlbench drivers-api - 4.15.58-SNAPSHOT + 4.15.64-SNAPSHOT io.nosqlbench nb-annotations - 4.15.58-SNAPSHOT + 4.15.64-SNAPSHOT io.nosqlbench virtdata-userlibs - 4.15.58-SNAPSHOT + 4.15.64-SNAPSHOT diff --git a/engine-api/src/main/java/io/nosqlbench/engine/api/scenarios/NBCLIScenarioParser.java b/engine-api/src/main/java/io/nosqlbench/engine/api/scenarios/NBCLIScenarioParser.java index be519dcc2..0a275ce78 100644 --- a/engine-api/src/main/java/io/nosqlbench/engine/api/scenarios/NBCLIScenarioParser.java +++ b/engine-api/src/main/java/io/nosqlbench/engine/api/scenarios/NBCLIScenarioParser.java @@ -145,10 +145,12 @@ public class NBCLIScenarioParser { undefKeys.forEach(buildingCmd::remove); if (!buildingCmd.containsKey("workload")) { - String relativeWorkloadPathFromRoot = yamlWithNamedScenarios.asPath().toString(); - relativeWorkloadPathFromRoot = relativeWorkloadPathFromRoot.startsWith("/") ? - relativeWorkloadPathFromRoot.substring(1) : relativeWorkloadPathFromRoot; - buildingCmd.put("workload", "workload=" + relativeWorkloadPathFromRoot); +// The logic to remove the leading slash was likely used to fix a nuisance bug before, +// although it is clearly not correct as-is. Leaving temporarily for context. +// String relativeWorkloadPathFromRoot = yamlWithNamedScenarios.asPath().toString(); +// relativeWorkloadPathFromRoot = relativeWorkloadPathFromRoot.startsWith("/") ? +// relativeWorkloadPathFromRoot.substring(1) : relativeWorkloadPathFromRoot; + buildingCmd.put("workload", "workload=" + workloadName); } if (!buildingCmd.containsKey("alias")) { diff --git a/engine-cli/pom.xml b/engine-cli/pom.xml index b02078f9b..d72013baf 100644 --- a/engine-cli/pom.xml +++ b/engine-cli/pom.xml @@ -4,7 +4,7 @@ mvn-defaults io.nosqlbench - 4.15.58-SNAPSHOT + 4.15.64-SNAPSHOT ../mvn-defaults @@ -23,13 +23,13 @@ io.nosqlbench engine-core - 4.15.58-SNAPSHOT + 4.15.64-SNAPSHOT io.nosqlbench engine-docker - 4.15.58-SNAPSHOT + 4.15.64-SNAPSHOT diff --git a/engine-cli/src/main/java/io/nosqlbench/engine/cli/NBCLI.java b/engine-cli/src/main/java/io/nosqlbench/engine/cli/NBCLI.java index 1c00ae202..efe2317aa 100644 --- a/engine-cli/src/main/java/io/nosqlbench/engine/cli/NBCLI.java +++ b/engine-cli/src/main/java/io/nosqlbench/engine/cli/NBCLI.java @@ -1,6 +1,7 @@ package io.nosqlbench.engine.cli; import io.nosqlbench.docsys.core.NBWebServerApp; +import io.nosqlbench.nb.api.metadata.SessionNamer; import io.nosqlbench.engine.api.activityapi.core.ActivityType; import io.nosqlbench.engine.api.activityapi.cyclelog.outputs.cyclelog.CycleLogDumperUtility; import io.nosqlbench.engine.api.activityapi.cyclelog.outputs.cyclelog.CycleLogImporterUtility; @@ -18,7 +19,7 @@ import io.nosqlbench.engine.core.script.Scenario; import io.nosqlbench.engine.core.script.ScenariosExecutor; import io.nosqlbench.engine.core.script.ScriptParams; import io.nosqlbench.engine.docker.DockerMetricsManager; -import io.nosqlbench.nb.api.SystemId; +import io.nosqlbench.nb.api.metadata.SystemId; import io.nosqlbench.nb.api.annotations.Annotation; import io.nosqlbench.nb.api.annotations.Layer; import io.nosqlbench.nb.api.content.Content; @@ -98,7 +99,7 @@ public class NBCLI { loggerConfig.setConsoleLevel(NBLogLevel.ERROR); NBCLIOptions globalOptions = new NBCLIOptions(args, NBCLIOptions.Mode.ParseGlobalsOnly); - String sessionName = new SessionNamer().format(globalOptions.getSessionName()); + String sessionName = SessionNamer.format(globalOptions.getSessionName()); loggerConfig .setSessionName(sessionName) diff --git a/engine-cli/src/main/java/io/nosqlbench/engine/cli/NBCLIOptions.java b/engine-cli/src/main/java/io/nosqlbench/engine/cli/NBCLIOptions.java index ddb17e727..f04a1b6e2 100644 --- a/engine-cli/src/main/java/io/nosqlbench/engine/cli/NBCLIOptions.java +++ b/engine-cli/src/main/java/io/nosqlbench/engine/cli/NBCLIOptions.java @@ -371,7 +371,7 @@ public class NBCLIOptions { private Path setStatePath() { if (statePathAccesses.size() > 0) { - throw new BasicError("The statedir must be set before it is used by other\n" + + throw new BasicError("The state dir must be set before it is used by other\n" + " options. If you want to change the statedir, be sure you do it before\n" + " dependent options. These parameters were called before this --statedir:\n" + statePathAccesses.stream().map(s -> "> " + s).collect(Collectors.joining("\n"))); @@ -380,7 +380,7 @@ public class NBCLIOptions { return this.statepath; } - List paths = NBEnvironment.INSTANCE.interpolate(":", statedirs); + List paths = NBEnvironment.INSTANCE.interpolateEach(":", statedirs); Path selected = null; for (String pathName : paths) { @@ -395,7 +395,7 @@ public class NBCLIOptions { } } if (selected == null) { - selected = Path.of(paths.get(0)); + selected = Path.of(paths.get(paths.size()-1)); } if (!Files.exists(selected)) { diff --git a/engine-cli/src/main/resources/docker-metrics.md b/engine-cli/src/main/resources/docker-metrics.md index b4039b678..45392aa20 100644 --- a/engine-cli/src/main/resources/docker-metrics.md +++ b/engine-cli/src/main/resources/docker-metrics.md @@ -1,19 +1,19 @@ # docker-metrics -Enlist nosqlbench to stand up your metrics infrastructure using a local +Enlist nosqlbench to stand up your metrics infrastructure using a local docker runtime: --docker-metrics -When this option is set, nosqlbench will start graphite, prometheus, +When this option is set, nosqlbench will start graphite, prometheus, and grafana dockers (if-needed) automatically on your local system , configure them to work together, and point nosqlbench to send metrics and annotations to the system automatically. -The inclued NoSQLBench dashboard uses the default grafana credentials of +The included NoSQLBench dashboard uses the default grafana credentials of admin:admin. You can find this dashboard by browsing to the "manage dashboards" section of grafana. - + # remote docker-metrics It is possible to use `--docker-metrics` to set up a metrics collector @@ -30,7 +30,7 @@ and other nodes, you can use this pattern: # on the collector node ... --pin --docker-metrics - + # on other nodes ... --pin --docker-metrics-at @@ -46,7 +46,7 @@ of running the following by hand: # pull and run the graphite-exporter container docker run -d -p 9108:9108 -p 9109:9109 -p 9109:9109/udp prom/graphite-exporter -Configuration files which are used by the docker containers are stored in: +Configuration files which are used by the docker containers are stored in: $HOME/.nosqlbench @@ -55,14 +55,14 @@ Configuration files which are used by the docker containers are stored in: If you need to clear the state for a local docker metrics stack, you can remove these directories. - # DASHBOARDS AND METRICS WILL BE LOST IF YOU DO THIS + # DASHBOARDS AND METRICS WILL BE LOST IF YOU DO THIS rm ~/.nosqlbench/{grafana,prometheus,prometheus-conf,graphite-exporter} ## Manually installing dockers # pull and run the prometheus container docker run -d -p 9090:9090 -v '/.prometheus:/etc/prometheus' prom/prometheus --config.file=/etc/prometheus/prometheus.yml" --storage.tsdb.path=/prometheus" --storage.tsdb.retention=183d --web.enable-lifecycle - + # pull and run the grafana container docker run -d -p 3000:3000 -v grafana/grafana @@ -70,7 +70,7 @@ If you need to clear the state for a local docker metrics stack, you These may allow you to send snapshot data to a specially configured remote grafana instance. - + GF_SECURITY_ADMIN_PASSWORD=admin GF_AUTH_ANONYMOUS_ENABLED="true" GF_SNAPSHOTS_EXTERNAL_SNAPSHOT_URL=http://54.165.144.56:3001 @@ -82,11 +82,11 @@ You can use the grafana api to set up the datasource and dashboard if you have other tools which integrate with grafana: # These are not commands, they are only provides API parameters - + POST http://localhost:3000/api/dashboards/db analysis.json # (found in resources/docker/dashboards/analysis.json) - + POST http://localhost:3000/api/datasources prometheus-datasource.yaml # (found in resources/docker/datasources/prometheus-datasource.yaml) diff --git a/engine-cli/src/main/resources/statedir.md b/engine-cli/src/main/resources/statedir.md new file mode 100644 index 000000000..b1a364aef --- /dev/null +++ b/engine-cli/src/main/resources/statedir.md @@ -0,0 +1,38 @@ +# State Directory + +In order to maintain state for a NoSQLBench client instance, +a directory is used. The default directory will be auto-created +for you if you do not specify one. + +You can always override the state directory location by providing +an option like `--statedir=/tmp/testdir`, or `--statedir=$HOME/.nosqlbench`. + +Within the --statedir parameter, the following values will be expanded +automatically: + +- $HOME: the current user's home directory +- $USER: the current user's name +- $varname: Any other environment variable + +`$NBSTATEDIR` is a mechanism for setting and finding the local state +directory for NoSQLBench. It is a search path, delimited by +the ':' character. It allows both Java system properties +and shell environment variables. + +Multiple values may be specified, like with the PATH environment variable, +separated by colons. When none of the directories is found, +the last one in the list will be created. This is based on the convention +that more specific "override" directories are searched first, whereas more +global state is allowed as the fall-through case. Generally users will +want to keep their state in a single and uniform location, like +`$HOME/.nosqlbench`, but they will want the option of localizing configs for +directory-based test management. Thus, the default value for +--statedir is '$NBSTATEDIR:$PWD/.nosqlbench:$HOME/.nosqlbench'. + +Once NoSQLBench is past the CLI processing stage, the NBSTATEDIR becomes +a valid system property, and any internal access to environment variables +can also use this property. + +NoSQLBench developers should take care to use the +NBEnvironment class as the method to access environment variables. +(TODO: Add this to the developer guide) diff --git a/engine-cli/src/test/java/io/nosqlbench/engine/cli/NBCLIScenarioParserTest.java b/engine-cli/src/test/java/io/nosqlbench/engine/cli/NBCLIScenarioParserTest.java index 587cb4733..8a0527bfd 100644 --- a/engine-cli/src/test/java/io/nosqlbench/engine/cli/NBCLIScenarioParserTest.java +++ b/engine-cli/src/test/java/io/nosqlbench/engine/cli/NBCLIScenarioParserTest.java @@ -4,6 +4,7 @@ import io.nosqlbench.engine.api.scenarios.NBCLIScenarioParser; import io.nosqlbench.nb.api.errors.BasicError; import org.junit.jupiter.api.Test; +import java.nio.file.Path; import java.util.List; import java.util.Map; @@ -77,7 +78,7 @@ public class NBCLIScenarioParserTest { assertThat(cmds.size()).isEqualTo(1); assertThat(cmds.get(0).getArg("driver")).isEqualTo("stdout"); assertThat(cmds.get(0).getArg("cycles")).isEqualTo("10"); - assertThat(cmds.get(0).getArg("workload")).isEqualTo("target/test-classes/activities/scenario-test.yaml"); + assertThat(cmds.get(0).getArg("workload")).isEqualTo("scenario-test"); } @Test @@ -88,7 +89,7 @@ public class NBCLIScenarioParserTest { assertThat(cmds.get(0).getArg("driver")).isEqualTo("stdout"); assertThat(cmds.get(0).getArg("cycles")).isEqualTo("20"); assertThat(cmds.get(0).getArg("cycles-test")).isEqualTo("20"); - assertThat(cmds.get(0).getArg("workload")).isEqualTo("target/test-classes/activities/scenario-test.yaml"); + assertThat(cmds.get(0).getArg("workload")).isEqualTo("scenario-test"); } @Test @@ -106,9 +107,23 @@ public class NBCLIScenarioParserTest { List cmds1 = opts1.getCommands(); assertThat(cmds1.size()).isEqualTo(1); assertThat(cmds1.get(0).getArg("cycles-test")).isNull(); - } + @Test + public void testThatFullyQualifiedScenarioFilesAreSupported() { + Path cwd = Path.of(".").toAbsolutePath(); + System.out.println("cwd: '" + cwd + "'"); + + Path rel = Path.of("src/test/resources/activities/scenario-test.yaml"); + + assertThat(rel).exists(); + Path absolute = rel.toAbsolutePath(); + assertThat(absolute).exists(); + + NBCLIOptions opts = new NBCLIOptions(new String[]{ absolute.toString(), "schema-only", "cycles-test=20"}); + List cmds = opts.getCommands(); + assertThat(cmds.size()).isGreaterThan(0); + } @Test public void testSanitizer() { diff --git a/engine-cli/src/test/java/io/nosqlbench/engine/cli/SessionNamerTest.java b/engine-cli/src/test/java/io/nosqlbench/engine/cli/SessionNamerTest.java index 326822339..318ea02c0 100644 --- a/engine-cli/src/test/java/io/nosqlbench/engine/cli/SessionNamerTest.java +++ b/engine-cli/src/test/java/io/nosqlbench/engine/cli/SessionNamerTest.java @@ -17,6 +17,7 @@ package io.nosqlbench.engine.cli; +import io.nosqlbench.nb.api.metadata.SessionNamer; import org.junit.jupiter.api.Test; import static org.assertj.core.api.Assertions.assertThat; @@ -26,18 +27,18 @@ public class SessionNamerTest { @Test public void testDefaultFormat() { SessionNamer namer = new SessionNamer(); - String name1 = namer.format(null); + String name1 = SessionNamer.format(null); assertThat(name1).matches("scenario_\\d{8}_\\d{6}_\\d{3}"); - String name2 = namer.format(""); + String name2 = SessionNamer.format(""); assertThat(name2).matches("scenario_\\d{8}_\\d{6}_\\d{3}"); } @Test public void testCustomFormat() { SessionNamer namer = new SessionNamer(); - String name1 = namer.format("Custom_session_name"); + String name1 = SessionNamer.format("Custom_session_name"); assertThat(name1).matches("Custom_session_name"); - String name2 = namer.format("TEST--%tQ"); + String name2 = SessionNamer.format("TEST--%tQ"); assertThat(name2).matches("TEST--\\d{13}"); } diff --git a/engine-clients/pom.xml b/engine-clients/pom.xml index ad032d4e8..da52fe229 100644 --- a/engine-clients/pom.xml +++ b/engine-clients/pom.xml @@ -1,42 +1,42 @@ - 4.0.0 + 4.0.0 - - mvn-defaults - io.nosqlbench - 4.15.58-SNAPSHOT - ../mvn-defaults - + + mvn-defaults + io.nosqlbench + 4.15.64-SNAPSHOT + ../mvn-defaults + - engine-clients - jar - ${project.artifactId} - - A set of clients for calling nosqlbench and related services. - + engine-clients + jar + ${project.artifactId} + + A set of clients for calling nosqlbench and related services. + - + - - io.nosqlbench - engine-api - 4.15.58-SNAPSHOT - + + io.nosqlbench + engine-api + 4.15.64-SNAPSHOT + - + - - - - src/test/resources - - examples/** - - - - - + + + + src/test/resources + + examples/** + + + + + diff --git a/engine-clients/src/main/java/io/nosqlbench/engine/clients/grafana/analyzer/GrafanaRegionAnalyzer.java b/engine-clients/src/main/java/io/nosqlbench/engine/clients/grafana/analyzer/GrafanaRegionAnalyzer.java index 071499e2f..177318364 100644 --- a/engine-clients/src/main/java/io/nosqlbench/engine/clients/grafana/analyzer/GrafanaRegionAnalyzer.java +++ b/engine-clients/src/main/java/io/nosqlbench/engine/clients/grafana/analyzer/GrafanaRegionAnalyzer.java @@ -5,7 +5,7 @@ import io.nosqlbench.engine.clients.grafana.GStitcher; import io.nosqlbench.engine.clients.grafana.GrafanaClient; import io.nosqlbench.engine.clients.grafana.GrafanaClientConfig; import io.nosqlbench.engine.clients.grafana.transfer.*; -import io.nosqlbench.nb.api.SystemId; +import io.nosqlbench.nb.api.metadata.SystemId; import java.nio.file.Path; import java.time.Instant; diff --git a/engine-clients/src/main/java/io/nosqlbench/engine/clients/grafana/annotator/GrafanaMetricsAnnotator.java b/engine-clients/src/main/java/io/nosqlbench/engine/clients/grafana/annotator/GrafanaMetricsAnnotator.java index 38e909801..f9ad517ad 100644 --- a/engine-clients/src/main/java/io/nosqlbench/engine/clients/grafana/annotator/GrafanaMetricsAnnotator.java +++ b/engine-clients/src/main/java/io/nosqlbench/engine/clients/grafana/annotator/GrafanaMetricsAnnotator.java @@ -5,7 +5,7 @@ import io.nosqlbench.engine.clients.grafana.GrafanaClientConfig; import io.nosqlbench.engine.clients.grafana.transfer.GAnnotation; import io.nosqlbench.nb.annotations.Service; import io.nosqlbench.nb.api.OnError; -import io.nosqlbench.nb.api.SystemId; +import io.nosqlbench.nb.api.metadata.SystemId; import io.nosqlbench.nb.api.annotations.Annotation; import io.nosqlbench.nb.api.annotations.Annotator; import io.nosqlbench.nb.api.config.params.ParamsParser; diff --git a/engine-core/pom.xml b/engine-core/pom.xml index 9e53400c3..8ad2e61f3 100644 --- a/engine-core/pom.xml +++ b/engine-core/pom.xml @@ -5,7 +5,7 @@ mvn-defaults io.nosqlbench - 4.15.58-SNAPSHOT + 4.15.64-SNAPSHOT ../mvn-defaults @@ -20,7 +20,7 @@ - + @@ -28,13 +28,13 @@ io.nosqlbench engine-api - 4.15.58-SNAPSHOT + 4.15.64-SNAPSHOT io.nosqlbench drivers-api - 4.15.58-SNAPSHOT + 4.15.64-SNAPSHOT @@ -72,25 +72,25 @@ org.graalvm.js js-scriptengine - - org.graalvm.tools - profiler - runtime - - - org.graalvm.tools - chromeinspector - runtime - - - io.nosqlbench - engine-clients - 4.15.58-SNAPSHOT - compile - + + org.graalvm.tools + profiler + runtime + + + org.graalvm.tools + chromeinspector + runtime + + + io.nosqlbench + engine-clients + 4.15.64-SNAPSHOT + compile + - + diff --git a/engine-core/src/main/java/io/nosqlbench/engine/core/lifecycle/ActivityTypeLoader.java b/engine-core/src/main/java/io/nosqlbench/engine/core/lifecycle/ActivityTypeLoader.java index 558978c7c..8f2c151fb 100644 --- a/engine-core/src/main/java/io/nosqlbench/engine/core/lifecycle/ActivityTypeLoader.java +++ b/engine-core/src/main/java/io/nosqlbench/engine/core/lifecycle/ActivityTypeLoader.java @@ -31,7 +31,7 @@ public class ActivityTypeLoader { public ActivityTypeLoader() { - List libpaths = NBEnvironment.INSTANCE.interpolate(":", "$" + NBEnvironment.NBLIBS); + List libpaths = NBEnvironment.INSTANCE.interpolateEach(":", "$" + NBEnvironment.NBLIBS); Set urlsToAdd = new HashSet<>(); for (String libpaths_entry : libpaths) { diff --git a/engine-core/src/main/java/io/nosqlbench/engine/core/script/Scenario.java b/engine-core/src/main/java/io/nosqlbench/engine/core/script/Scenario.java index 6b82d7917..6f8e98a5a 100644 --- a/engine-core/src/main/java/io/nosqlbench/engine/core/script/Scenario.java +++ b/engine-core/src/main/java/io/nosqlbench/engine/core/script/Scenario.java @@ -25,6 +25,9 @@ import io.nosqlbench.engine.core.lifecycle.ScenarioController; import io.nosqlbench.engine.core.lifecycle.ScenarioResult; import io.nosqlbench.engine.core.annotation.Annotators; import io.nosqlbench.engine.core.metrics.PolyglotMetricRegistryBindings; +import io.nosqlbench.nb.api.metadata.ScenarioMetadata; +import io.nosqlbench.nb.api.metadata.ScenarioMetadataAware; +import io.nosqlbench.nb.api.metadata.SystemId; import io.nosqlbench.nb.api.annotations.Layer; import io.nosqlbench.nb.api.annotations.Annotation; import org.apache.logging.log4j.LogManager; @@ -46,10 +49,7 @@ import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.Paths; import java.nio.file.StandardOpenOption; -import java.util.ArrayList; -import java.util.List; -import java.util.Map; -import java.util.Optional; +import java.util.*; import java.util.concurrent.Callable; import java.util.stream.Collectors; @@ -63,6 +63,7 @@ public class Scenario implements Callable { private State state = State.Scheduled; private volatile ScenarioShutdownHook scenarioShutdownHook; private Exception error; + private ScenarioMetadata scenarioMetadata; public enum State { @@ -236,12 +237,23 @@ public class Scenario implements Callable { metricRegistry, scriptEnv ); + ScenarioMetadataAware.apply(extensionObject,getScenarioMetadata()); logger.trace("Adding extension object: name=" + extensionDescriptor.getBaseVariableName() + " class=" + extensionObject.getClass().getSimpleName()); scriptEngine.put(extensionDescriptor.getBaseVariableName(), extensionObject); } + } - + private synchronized ScenarioMetadata getScenarioMetadata() { + if (this.scenarioMetadata==null) { + this.scenarioMetadata = new ScenarioMetadata( + this.startedAtMillis, + this.scenarioName, + SystemId.getNodeId(), + SystemId.getNodeFingerprint() + ); + } + return scenarioMetadata; } public void runScenario() { diff --git a/engine-docker/pom.xml b/engine-docker/pom.xml index 4dda1728b..4ff591661 100644 --- a/engine-docker/pom.xml +++ b/engine-docker/pom.xml @@ -4,7 +4,7 @@ mvn-defaults io.nosqlbench - 4.15.58-SNAPSHOT + 4.15.64-SNAPSHOT ../mvn-defaults @@ -56,7 +56,7 @@ io.nosqlbench engine-api - 4.15.58-SNAPSHOT + 4.15.64-SNAPSHOT diff --git a/engine-docs/pom.xml b/engine-docs/pom.xml index 0b70b8080..8ba06de56 100644 --- a/engine-docs/pom.xml +++ b/engine-docs/pom.xml @@ -1,46 +1,46 @@ - 4.0.0 + 4.0.0 - - mvn-defaults - io.nosqlbench - 4.15.58-SNAPSHOT - ../mvn-defaults - + + mvn-defaults + io.nosqlbench + 4.15.64-SNAPSHOT + ../mvn-defaults + - engine-docs - jar - ${project.artifactId} - CLI for nosqlbench. + engine-docs + jar + ${project.artifactId} + CLI for nosqlbench. - - nosqlbench Docs - + + nosqlbench Docs + - + - - - - - + + + + + - - io.nosqlbench - docsys - 4.15.58-SNAPSHOT - + + io.nosqlbench + docsys + 4.15.64-SNAPSHOT + - + - - - - src/main/resources - true - - - + + + + src/main/resources + true + + + diff --git a/engine-extensions/pom.xml b/engine-extensions/pom.xml index 5905de195..d893d4464 100644 --- a/engine-extensions/pom.xml +++ b/engine-extensions/pom.xml @@ -4,7 +4,7 @@ mvn-defaults io.nosqlbench - 4.15.58-SNAPSHOT + 4.15.64-SNAPSHOT ../mvn-defaults @@ -22,7 +22,7 @@ io.nosqlbench engine-api - 4.15.58-SNAPSHOT + 4.15.64-SNAPSHOT @@ -40,23 +40,23 @@ - - - - - - - - - - - - - - - - - + + + + + + + + + + + + + + + + + diff --git a/engine-extensions/src/main/java/io/nosqlbench/engine/extensions/globalvars/globalvars.md b/engine-extensions/src/main/java/io/nosqlbench/engine/extensions/globalvars/globalvars.md new file mode 100644 index 000000000..f33539f5b --- /dev/null +++ b/engine-extensions/src/main/java/io/nosqlbench/engine/extensions/globalvars/globalvars.md @@ -0,0 +1,9 @@ +globalvars extension +=================== + +Allows access to the global object map from SharedState.gl_ObjectMap, which allows +for cross-binding and cross-thread data sharing. + +``` +var result = globalvars.get("result"); +``` diff --git a/engine-extensions/src/main/java/io/nosqlbench/engine/extensions/http/http.md b/engine-extensions/src/main/java/io/nosqlbench/engine/extensions/http/http.md new file mode 100644 index 000000000..742d90596 --- /dev/null +++ b/engine-extensions/src/main/java/io/nosqlbench/engine/extensions/http/http.md @@ -0,0 +1,26 @@ +http extension +============== + +Allow access to HTTP URLs from within scripts, supporting both basic +get and post methods. In all cases, the returned type is the full +response object, from which the body content can be accessed. + +## Examples + +Get content from a URL into a string variable: + +``` +var response= http.get("http://google.com/") +``` + +Post an empty body to a URL, useful for state-changing calls where +all of the control data is in the URL: + +``` +var response= http.post("http://some.server/path/to/resource?query=foobarbaz") +``` + +Post content to a URL, specifying the URL, content value, and content type: +``` +var response= http.post("http://some.server/path/to/resource", "this is the data", "text/plain"); +``` diff --git a/engine-extensions/src/main/java/io/nosqlbench/engine/extensions/s3uploader/S3Uploader.java b/engine-extensions/src/main/java/io/nosqlbench/engine/extensions/s3uploader/S3Uploader.java new file mode 100644 index 000000000..c236cbf87 --- /dev/null +++ b/engine-extensions/src/main/java/io/nosqlbench/engine/extensions/s3uploader/S3Uploader.java @@ -0,0 +1,96 @@ +package io.nosqlbench.engine.extensions.s3uploader; + +import com.amazonaws.services.s3.AmazonS3; +import com.amazonaws.services.s3.transfer.MultipleFileUpload; +import com.amazonaws.services.s3.transfer.TransferManager; +import com.amazonaws.services.s3.transfer.TransferManagerBuilder; +import com.codahale.metrics.MetricRegistry; +import io.nosqlbench.nb.addins.s3.s3urlhandler.S3ClientCache; +import io.nosqlbench.nb.addins.s3.s3urlhandler.S3UrlFields; +import io.nosqlbench.nb.api.NBEnvironment; +import io.nosqlbench.nb.api.metadata.ScenarioMetadata; +import io.nosqlbench.nb.api.metadata.ScenarioMetadataAware; +import org.apache.logging.log4j.Logger; + +import javax.script.ScriptContext; +import java.io.File; +import java.nio.file.FileSystems; +import java.nio.file.Files; +import java.nio.file.LinkOption; +import java.nio.file.Path; +import java.util.LinkedHashMap; +import java.util.Map; + +public class S3Uploader implements ScenarioMetadataAware { + private final Logger logger; + private final MetricRegistry metricRegistry; + private final ScriptContext scriptContext; + private ScenarioMetadata scenarioMetadata; + + public S3Uploader(Logger logger, MetricRegistry metricRegistry, ScriptContext scriptContext) { + this.logger = logger; + this.metricRegistry = metricRegistry; + this.scriptContext = scriptContext; + } + + /** + * Upload the local file path to the specified S3 URL, then return the URL of the bucket + * in its fully expanded form. See the details on token expansions in the s3.md help docs. + * @param localFilePath The path to the local directory + * @param urlTemplate A template that is expanded to a valid S3 URL + * @return The fully expanded name of the URL used for upload + */ + public String uploadDirToUrl(String localFilePath, String urlTemplate) { + return uploadDirToUrlTokenized(localFilePath, urlTemplate, Map.of()); + } + + /** + * Upload the local file path to the specified S3 URL, then return the URL of the bucket + * in its fully expanded form. See the details on token expansions in the s3.md help docs. + * Any params which are provided supersede the normally provided values from the system. + * @param localFilePath The path to the local directory + * @param urlTemplate A template that is expanded to a valid S3 URL + * @param params Additional token expansions which will take precedence over other available values. + * @return The fully expanded name of the URL used for upload + */ + public String uploadDirToUrlTokenized(String localFilePath, String urlTemplate, Map params) { + + + Path sourcePath = Path.of(localFilePath); + if (!FileSystems.getDefault().equals(sourcePath.getFileSystem())) { + throw new RuntimeException("The file must reside on the default filesystem to be uploaded by S3."); + } + if (!Files.isDirectory(sourcePath, LinkOption.NOFOLLOW_LINKS)) { + throw new RuntimeException("path '" + sourcePath + "' is not a directory."); + } + File sourceDir = sourcePath.toFile(); + + Map combined = new LinkedHashMap<>(params); + combined.putAll(scenarioMetadata.asMap()); + String url = NBEnvironment.INSTANCE.interpolateWithTimestamp( + urlTemplate, + scenarioMetadata.getStartedAt(), + combined + ) + .orElseThrow(); + logger.debug("S3 composite URL is '" + url + "'"); + + S3UrlFields fields = S3UrlFields.fromURLString(url); + S3ClientCache s3ClientCache = new S3ClientCache(); + AmazonS3 s3 = s3ClientCache.get(fields); + TransferManager xfers = TransferManagerBuilder.standard().withS3Client(s3).build(); + String prefix = fields.key; + MultipleFileUpload mfu = xfers.uploadDirectory(fields.bucket, prefix, sourceDir, true); + try { + mfu.waitForCompletion(); + } catch (InterruptedException e) { + throw new RuntimeException("Multi-file upload was interrupted."); + } + return url; + } + + @Override + public void setScenarioMetadata(ScenarioMetadata metadata) { + this.scenarioMetadata = metadata; + } +} diff --git a/engine-extensions/src/main/java/io/nosqlbench/engine/extensions/s3uploader/S3UploaderPluginData.java b/engine-extensions/src/main/java/io/nosqlbench/engine/extensions/s3uploader/S3UploaderPluginData.java new file mode 100644 index 000000000..9f59f1acd --- /dev/null +++ b/engine-extensions/src/main/java/io/nosqlbench/engine/extensions/s3uploader/S3UploaderPluginData.java @@ -0,0 +1,32 @@ +package io.nosqlbench.engine.extensions.s3uploader; + +import com.codahale.metrics.MetricRegistry; +import io.nosqlbench.engine.api.extensions.ScriptingPluginInfo; +import io.nosqlbench.nb.annotations.Service; +import io.nosqlbench.nb.api.metadata.ScenarioMetadata; +import io.nosqlbench.nb.api.metadata.ScenarioMetadataAware; +import org.apache.logging.log4j.Logger; + +import javax.script.ScriptContext; + +@Service(value = ScriptingPluginInfo.class, selector = "s3") +public class S3UploaderPluginData implements ScriptingPluginInfo, ScenarioMetadataAware { + private ScenarioMetadata scenarioMetadata; + + @Override + public String getDescription() { + return "Allow for uploading or downloading a directory from S3"; + } + + @Override + public S3Uploader getExtensionObject(Logger logger, MetricRegistry metricRegistry, ScriptContext scriptContext) { + S3Uploader uploader = new S3Uploader(logger, metricRegistry, scriptContext); + ScenarioMetadataAware.apply(uploader,scenarioMetadata); + return uploader; + } + + @Override + public void setScenarioMetadata(ScenarioMetadata metadata) { + this.scenarioMetadata = metadata; + } +} diff --git a/engine-extensions/src/main/java/io/nosqlbench/engine/extensions/s3uploader/s3.md b/engine-extensions/src/main/java/io/nosqlbench/engine/extensions/s3uploader/s3.md new file mode 100644 index 000000000..b7742a88d --- /dev/null +++ b/engine-extensions/src/main/java/io/nosqlbench/engine/extensions/s3uploader/s3.md @@ -0,0 +1,68 @@ +S3 extension +============== + +Allow uploading of a local directory on the default filesystem +to an S3 bucket, using an S3 URI to specify the bucket, location, and so on. + +The URL is specified in the standard S3 format, such as: + +1. `s3://mybucket/mypath-as-a-key/with-any-level-of-depth` +2. `s3://myuser:mypass@mybucket/mypath-as-a-key/with-any-level-of-depth` + +In addition, any tokens which are supported by the standard NoSQLBench +token substitution mechanism will be used to construct a URL at the time +of usage. These forms include the following: + +- Scenario Metadata - There are several key fields initialized for a scenario which can be used as common + reference points. These occlude the environment variables of the same name. These are: + - SESSION_NAME - The name auto-generated for a session, used in the logfile names, and so on. + - SYSTEM_ID - The string form of the most canonically identifying IP address, excluding + known symbolic interface names (docker*, for example) and all localhost addresses. + - SYSTEM_FINGERPRINT - a stable and anonymized identifier for a given system. This will be + stable as long as the networking configuration does not change. +- System Properties + - Any parameter in `$word1.word2...` form -- any multi-part variable name with separating dots + is taken as a system property to the JVM. These are expanded in place. Both `$word1.word2` + and `${word1.word2}` patterns are supported, whereas the latter is more strict and thus safer. +- Environment Variables + - As with System Properties, environment variable form the shell are also supported, as long + as they do not include a dot. +- Temporal Fields from the Scenario start time + - Any field specifier that you can use with the temporal types in Java's standard String. + format can be used. The reference time for these is always the scenario start time. + - Example: The default session name template looks like `scenario_%tY%tm%td_%tH%tM%tS_%tL` + +## Examples +``` +// If you have local logical identifiers in your scenario script which you want +// to templatize into your upload paths, you can provide your own js object +// as the third parameter +s3.uploadDirToUrlTokenized( + 'metrics', + 's3://test-results/${HOSTNAME}/${testid}-${testversion}/metrics', + { + 'testid':'20210343', + 'testversion':'v2' + } +); + +// Otherwise, use the two-parameter version: +s3.uploadDirToUrl('metrics','s3://test-results/${HOSTNAME}/metrics'); +``` + +## Post-Hoc upload + +Scripting extensions only run if the scenario is not halted before they are invoked +in the main scenario script. If you want to ensure that this one runs after a test, +regardless of when or why the test stopped, it is possible to wrap it within +a shutdown hook which will run after scenario completion. + +This is an example of how to do so: + +``` +shutdown.addShutdownHook('upload_metrics', function f() { + s3.uploadDirToUrl('metrics','s3://test-results/${HOSTNAME}/metrics'); +}); + +``` + diff --git a/engine-extensions/src/main/java/io/nosqlbench/engine/shutdown/ShutdownHookPlugin.java b/engine-extensions/src/main/java/io/nosqlbench/engine/shutdown/ShutdownHookPlugin.java new file mode 100644 index 000000000..9177fee5d --- /dev/null +++ b/engine-extensions/src/main/java/io/nosqlbench/engine/shutdown/ShutdownHookPlugin.java @@ -0,0 +1,32 @@ +package io.nosqlbench.engine.shutdown; + +import com.codahale.metrics.MetricRegistry; +import org.apache.logging.log4j.Logger; + +import javax.script.ScriptContext; +import java.util.function.Function; + +public class ShutdownHookPlugin { + private final Logger logger; + private final MetricRegistry metricRegistry; + private final ScriptContext scriptContext; + + public ShutdownHookPlugin(Logger logger, MetricRegistry metricRegistry, ScriptContext scriptContext) { + + this.logger = logger; + this.metricRegistry = metricRegistry; + this.scriptContext = scriptContext; + } + + public void addShutdownHook(String name, Object f) { + if (!(f instanceof Function)) { + throw new RuntimeException("The object provided to the shutdown hook plugin was not recognized as a function."); + } + String shutdownName = "shutdown-function-" + name; + Thread runnable = new ShutdownRunnableFunction(logger, name, (Function)f); + runnable.setName(shutdownName); + Runtime.getRuntime().addShutdownHook(runnable); + logger.info("Registered shutdown hook to run under name '" + shutdownName + "'"); + + } +} diff --git a/engine-extensions/src/main/java/io/nosqlbench/engine/shutdown/ShutdownHookPluginMetadata.java b/engine-extensions/src/main/java/io/nosqlbench/engine/shutdown/ShutdownHookPluginMetadata.java new file mode 100644 index 000000000..a1bfba834 --- /dev/null +++ b/engine-extensions/src/main/java/io/nosqlbench/engine/shutdown/ShutdownHookPluginMetadata.java @@ -0,0 +1,22 @@ +package io.nosqlbench.engine.shutdown; + +import com.codahale.metrics.MetricRegistry; +import io.nosqlbench.engine.api.extensions.ScriptingPluginInfo; +import io.nosqlbench.nb.annotations.Service; +import org.apache.logging.log4j.Logger; + +import javax.script.ScriptContext; + +@Service(value=ScriptingPluginInfo.class,selector = "shutdown") +public class ShutdownHookPluginMetadata implements ScriptingPluginInfo { + + @Override + public String getDescription() { + return "Register shutdown hooks in the form of javascript functions."; + } + + @Override + public ShutdownHookPlugin getExtensionObject(Logger logger, MetricRegistry metricRegistry, ScriptContext scriptContext) { + return new ShutdownHookPlugin(logger,metricRegistry,scriptContext); + } +} diff --git a/engine-extensions/src/main/java/io/nosqlbench/engine/shutdown/ShutdownRunnableFunction.java b/engine-extensions/src/main/java/io/nosqlbench/engine/shutdown/ShutdownRunnableFunction.java new file mode 100644 index 000000000..45b4a85ed --- /dev/null +++ b/engine-extensions/src/main/java/io/nosqlbench/engine/shutdown/ShutdownRunnableFunction.java @@ -0,0 +1,32 @@ +package io.nosqlbench.engine.shutdown; + +import org.apache.logging.log4j.Logger; + +import java.util.function.Function; + + +public class ShutdownRunnableFunction extends Thread { + private final String name; + private final Function function; + private final Logger logger; + + public ShutdownRunnableFunction(Logger logger, String name, Function function) { + this.logger = logger; + this.name = name; + this.function = (Function)function; + } + + @Override + public void run() { + logger.info("Running shutdown hook '" + name + "'..."); + try { + Object result = function.apply(new Object[0]); + if (result instanceof CharSequence) { + logger.info("shutdown hook returned output:\n" + ((CharSequence) result)); + } + logger.info("Completed shutdown hook '" + name + "'..."); + } catch (Exception e) { + throw new RuntimeException(e); + } + } +} diff --git a/engine-rest/pom.xml b/engine-rest/pom.xml index fbb87b2f8..20a9ae397 100644 --- a/engine-rest/pom.xml +++ b/engine-rest/pom.xml @@ -4,7 +4,7 @@ mvn-defaults io.nosqlbench - 4.15.58-SNAPSHOT + 4.15.64-SNAPSHOT ../mvn-defaults @@ -35,7 +35,7 @@ io.nosqlbench engine-cli - 4.15.58-SNAPSHOT + 4.15.64-SNAPSHOT diff --git a/jmxtest4.yaml b/jmxtest4.yaml deleted file mode 100644 index a5aedb2f5..000000000 --- a/jmxtest4.yaml +++ /dev/null @@ -1,10 +0,0 @@ -# (src/test/resources/activities/) jmx-test-1.yaml -statements: - - read1: - url: service:jmx:rmi:///jndi/rmi://10.101.33.50:7199/jmxrmi - object: org.apache.cassandra.metrics:type=Compaction,name=PendingTasks - readvar: Value - as_type: int - as_name: pending_tasks - - diff --git a/mvn-defaults/pom.xml b/mvn-defaults/pom.xml index 4466393d8..7af64f79f 100644 --- a/mvn-defaults/pom.xml +++ b/mvn-defaults/pom.xml @@ -1,680 +1,684 @@ - 4.0.0 + 4.0.0 - io.nosqlbench - mvn-defaults - 4.15.58-SNAPSHOT - pom + io.nosqlbench + mvn-defaults + 4.15.64-SNAPSHOT + pom - - UTF-8 - UTF-8 - nosqlbench + + UTF-8 + UTF-8 + nosqlbench - - 1.2.0 - 4.9.2 - 1.21 - 1.15 - 1.8 - 3.9 - 3.6.1 - 1.9 - 3.2.1 - 3.0.0-RC1 - 1.11.1 - 2.4.0-b180830.0359 - 2.3.0.1 - 2.4.0-b180830.0438 - 1.22 - 2.9.9 - 5.7.2 + + 1.2.0 + 4.9.2 + 1.21 + 1.15 + 1.8 + 3.9 + 3.6.1 + 1.9 + 3.2.1 + 3.0.0-RC1 + 1.11.1 + 2.4.0-b180830.0359 + 2.3.0.1 + 2.4.0-b180830.0438 + 1.22 + 2.9.9 + 5.7.2 - 1.4.1 - - 2.4.10.Final - - 4.1.47.Final - - 1.0.0 - - 1.26 - 1.1.2.6 + 1.4.1 + + 2.4.10.Final + 1.0.0 + + 1.26 + 1.1.2.6 - - 3.1.0 - 3.8.1 - 3.0.0-M1 - 3.0.0-M3 - 1.6.0 - 3.0.0-M4 - 1.6 - 3.0.0-M1 - 3.1.1 - 3.1.1 - 1.6.8 - 2.5.3 - 3.1.0 - 3.0.1 - 3.0.0-M4 - + + 3.1.0 + 3.8.1 + 3.0.0-M1 + 3.0.0-M3 + 1.6.0 + 3.0.0-M4 + 1.6 + 3.0.0-M1 + 3.1.1 + 3.1.1 + 1.6.8 + 2.5.3 + 3.1.0 + 3.0.1 + 3.0.0-M4 + - ${project.artifactId} - http://nosqlbench.io/ - - nosqlbench is the core of a programmable workload simulation runtime. - This module acts as the parent pom for nosqlbench Maven modules. - + ${project.artifactId} + http://nosqlbench.io/ + + nosqlbench is the core of a programmable workload simulation runtime. + This module acts as the parent pom for nosqlbench Maven modules. + - - - The Apache License, Version 2.0 - http://www.apache.org/licenses/LICENSE-2.0.txt - - + + + The Apache License, Version 2.0 + http://www.apache.org/licenses/LICENSE-2.0.txt + + - - GitHub - https://github.com/nosqlbench/nosqlbench/issues - + + GitHub + https://github.com/nosqlbench/nosqlbench/issues + - - scm:git:https://github.com/nosqlbench/nosqlbench.git - - HEAD - + + scm:git:https://github.com/nosqlbench/nosqlbench.git + + HEAD + + + + + + + org.junit.jupiter + ${junit.jupiter.version} + junit-jupiter + test + + + + org.mpierce.metrics.reservoir + hdrhistogram-metrics-reservoir + 1.1.0 + + + + org.hdrhistogram + HdrHistogram + 2.1.11 + + + + io.dropwizard.metrics + metrics-graphite + 4.1.9 + + + + io.dropwizard.metrics + metrics-core + 4.1.9 + + + + org.apache.commons + commons-text + ${commons.text.version} + + + + org.openjdk.jmh + jmh-core + ${jmh.version} + + + org.openjdk.jmh + jmh-generator-annprocess + ${jmh.version} + + + + org.yaml + snakeyaml + ${snakeyaml.version} + + + com.mitchtalmadge + ascii-data + ${ascii.data.version} + + + + + org.lz4 + lz4-java + ${lz4.version} + + + + + org.xerial.snappy + snappy-java + ${snappy.version} + + + + io.netty + netty-handler + 4.1.51.Final + + + + io.netty + netty-transport-native-epoll + 4.1.51.Final + linux-x86_64 + + + + io.netty + netty-transport-native-kqueue + 4.1.46.Final + linux-x86_64 + + + + io.netty + netty-codec-haproxy + 4.1.54.Final + + + + com.github.docker-java + docker-java-api + ${docker.java.version} + + + org.slf4j + jcl-over-slf4j + + + + + + com.github.docker-java + docker-java-core + ${docker.java.version} + + + org.slf4j + jcl-over-slf4j + + + + + + com.github.docker-java + docker-java-transport-okhttp + ${docker.java.version} + + + org.slf4j + jcl-over-slf4j + + + + + + + com.github.docker-java + docker-java + ${docker.java.version} + + + org.slf4j + jcl-over-slf4j + + + + + + + com.github.oshi + oshi-core + 5.2.2 + + + + com.google.code.gson + gson + 2.8.5 + + + + com.elega9t + number-to-words + ${number.to.words.version} + + + + org.greenrobot + essentials + ${greenrobot.version} + + + + org.apache.commons + commons-lang3 + ${commons.lang.version} + + + + com.squareup + javapoet + ${javapoet.version} + + + + joda-time + joda-time + ${joda.time.version} + + + + org.apache.commons + commons-math3 + ${commons.math3.version} + + + + org.apache.commons + commons-csv + ${commons.csv.version} + + + commons-codec + commons-codec + ${commons.codec.version} + + + org.mvel + mvel2 + ${mvel2.version} + + + + org.antlr + antlr4-runtime + ${antlr4.version} + + + + org.apache.commons + commons-compress + ${commons.compress.version} + + + + com.fasterxml.jackson.jaxrs + jackson-jaxrs-json-provider + 2.9.8 + + + + com.sun.xml.bind + jaxb-core + ${jaxb.core.version} + + + + com.sun.xml.bind + jaxb-impl + ${jaxb.impl.version} + + + + + org.graalvm.sdk + graal-sdk + 20.3.0 + + + org.graalvm.js + js + 20.3.0 + runtime + + + org.graalvm.js + js-scriptengine + 20.3.0 + + + org.graalvm.tools + profiler + 20.3.0 + runtime + + + org.graalvm.tools + chromeinspector + 20.3.0 + runtime + + + + - - - org.junit.jupiter - ${junit.jupiter.version} - junit-jupiter - test - + + org.apache.logging.log4j + log4j-api + 2.14.0 + - - org.mpierce.metrics.reservoir - hdrhistogram-metrics-reservoir - 1.1.0 - + + org.apache.logging.log4j + log4j-core + 2.14.0 + - - org.hdrhistogram - HdrHistogram - 2.1.11 - + + org.apache.logging.log4j + log4j-slf4j-impl + 2.14.0 + - - io.dropwizard.metrics - metrics-graphite - 4.1.9 - + + org.assertj + assertj-core + 3.19.0 + test + - - io.dropwizard.metrics - metrics-core - 4.1.9 - - - - org.apache.commons - commons-text - ${commons.text.version} - - - - org.openjdk.jmh - jmh-core - ${jmh.version} - - - org.openjdk.jmh - jmh-generator-annprocess - ${jmh.version} - - - - org.yaml - snakeyaml - ${snakeyaml.version} - - - com.mitchtalmadge - ascii-data - ${ascii.data.version} - - - - - org.lz4 - lz4-java - ${lz4.version} - - - - - org.xerial.snappy - snappy-java - ${snappy.version} - - - - io.netty - netty-handler - ${netty.version} - - - - io.netty - netty-transport-native-epoll - ${netty.version} - linux-x86_64 - - - - io.netty - netty-transport-native-kqueue - 4.1.47.Final - linux-x86_64 - - - - com.github.docker-java - docker-java-api - ${docker.java.version} - - - org.slf4j - jcl-over-slf4j - - - - - - com.github.docker-java - docker-java-core - ${docker.java.version} - - - org.slf4j - jcl-over-slf4j - - - - - - com.github.docker-java - docker-java-transport-okhttp - ${docker.java.version} - - - org.slf4j - jcl-over-slf4j - - - - - - - com.github.docker-java - docker-java - ${docker.java.version} - - - org.slf4j - jcl-over-slf4j - - - - - - - com.github.oshi - oshi-core - 5.2.2 - - - - com.google.code.gson - gson - 2.8.5 - - - - com.elega9t - number-to-words - ${number.to.words.version} - - - - org.greenrobot - essentials - ${greenrobot.version} - - - - org.apache.commons - commons-lang3 - ${commons.lang.version} - - - - com.squareup - javapoet - ${javapoet.version} - - - - joda-time - joda-time - ${joda.time.version} - - - - org.apache.commons - commons-math3 - ${commons.math3.version} - - - - org.apache.commons - commons-csv - ${commons.csv.version} - - - commons-codec - commons-codec - ${commons.codec.version} - - - org.mvel - mvel2 - ${mvel2.version} - - - - org.antlr - antlr4-runtime - ${antlr4.version} - - - - org.apache.commons - commons-compress - ${commons.compress.version} - - - - com.fasterxml.jackson.jaxrs - jackson-jaxrs-json-provider - 2.9.8 - - - - com.sun.xml.bind - jaxb-core - ${jaxb.core.version} - - - - com.sun.xml.bind - jaxb-impl - ${jaxb.impl.version} - - - - - org.graalvm.sdk - graal-sdk - 20.3.0 - - - org.graalvm.js - js - 20.3.0 - runtime - - - org.graalvm.js - js-scriptengine - 20.3.0 - - - org.graalvm.tools - profiler - 20.3.0 - runtime - - - org.graalvm.tools - chromeinspector - 20.3.0 - runtime - + + org.junit.platform + junit-platform-launcher + 1.7.2 + test + + + org.junit.jupiter + junit-jupiter + ${junit.jupiter.version} + test + - - - - - org.apache.logging.log4j - log4j-api - 2.14.0 - - - - org.apache.logging.log4j - log4j-core - 2.14.0 - - - - org.apache.logging.log4j - log4j-slf4j-impl - 2.14.0 - - - - org.assertj - assertj-core - 3.19.0 - test - - - - org.junit.platform - junit-platform-launcher - 1.7.2 - test - - - org.junit.jupiter - junit-jupiter - ${junit.jupiter.version} - test - - - - - - - - org.apache.maven.plugins - maven-compiler-plugin - - true - - 11 - 11 - - - - - - - - - - org.apache.maven.plugins - maven-surefire-plugin - - - - **/*Integrated*Test*.java - **/*IntegrationTest.java - **/*IntegrationTests.java - - - **/*Test*.java - - false - - - - - org.apache.maven.plugins - maven-failsafe-plugin - - - run-tests - integration-test - - integration-test - verify - - - - - - 1 - false - - **/*Integrated*Test*.java - **/*IntegrationTest.java - **/*IntegrationTests.java - - - - - - - - - - - org.apache.maven.plugins - maven-javadoc-plugin - - ${javadoc.name} - ${javadoc.name} - false - false - false - false - - -Xdoclint:none - -Xdoclint:none - none - - - - attach-javadoc - - jar - - - - - - - - - org.apache.maven.plugins - maven-source-plugin - - - attach-sources - verify - - jar-no-fork - - - - - - - - org.sonatype.plugins - nexus-staging-maven-plugin - true - - ossrh - https://oss.sonatype.org/ - true - - - - - org.apache.maven.plugins - maven-gpg-plugin - - - - - - - org.apache.maven.plugins - maven-assembly-plugin - 3.3.0 - - - - org.apache.maven.plugins - maven-release-plugin - ${release.plugin.version} - - - org.apache.maven.plugins - maven-gpg-plugin - ${gpg.plugin.version} - - - org.apache.maven.plugins - maven-compiler-plugin - 3.8.1 - - - org.apache.maven.plugins - maven-surefire-plugin - ${surefire.plugin.version} - - - org.apache.maven.plugins - maven-failsafe-plugin - ${failsafe.plugin.version} - - - org.apache.maven.plugins - maven-javadoc-plugin - ${javadoc.plugin.version} - - - org.apache.maven.plugins - maven-source-plugin - ${source.plugin.version} - - - org.sonatype.plugins - nexus-staging-maven-plugin - ${nexus.staging.plugin.version} - - - org.antlr - antlr4-maven-plugin - ${antlr4.version} - - - org.codehaus.mojo - exec-maven-plugin - ${exec.plugin.version} - - - org.apache.maven.plugins - maven-enforcer-plugin - ${enforcer.plugin.version} - - - org.apache.maven.plugins - maven-clean-plugin - ${clean.plugin.version} - - - - org.apache.maven.plugins - maven-resources-plugin - ${resources.plugin.version} - - - org.apache.maven.plugins - maven-jar-plugin - ${jar.plugin.version} - - - org.apache.maven.plugins - maven-install-plugin - ${install.plugin.version} - - - org.apache.maven.plugins - maven-deploy-plugin - ${deploy.plugin.version} - - - - - - - - - - Jonathan Shook - jshook@gmail.com - nosqlbench.io - http://nosqlbench.io/ - - - Sebastián Estévez - estevezsebastian@gmail.com - nosqlbench.io - http://nosqlbench.io/ - - - - - - ossrh-snapshot - https://oss.sonatype.org/content/repositories/snapshots - - - ossrh-staging - https://oss.sonatype.org/service/local/staging/deploy/maven2 - - - - - - - release - + - - - org.apache.maven.plugins - maven-gpg-plugin - 1.6 - - - sign-artifacts - verify - - sign - - + + org.apache.maven.plugins + maven-compiler-plugin - - --pinentry-mode - loopback - + true + + 11 + 11 + + + + + - - - - - - + - + + org.apache.maven.plugins + maven-surefire-plugin + + + + **/*Integrated*Test*.java + **/*IntegrationTest.java + **/*IntegrationTests.java + + + **/*Test*.java + + false + + + + + + org.apache.maven.plugins + maven-failsafe-plugin + + + run-tests + integration-test + + integration-test + verify + + + + + + 1 + false + + **/*Integrated*Test*.java + **/*IntegrationTest.java + **/*IntegrationTests.java + + + + + + + + + + + org.apache.maven.plugins + maven-javadoc-plugin + + ${javadoc.name} + ${javadoc.name} + false + false + false + false + + -Xdoclint:none + -Xdoclint:none + none + + + + attach-javadoc + + jar + + + + + + + + + org.apache.maven.plugins + maven-source-plugin + + + attach-sources + verify + + jar-no-fork + + + + + + + + org.sonatype.plugins + nexus-staging-maven-plugin + true + + ossrh + https://oss.sonatype.org/ + true + + + + + org.apache.maven.plugins + maven-gpg-plugin + + + + + + + org.apache.maven.plugins + maven-assembly-plugin + 3.3.0 + + + + org.apache.maven.plugins + maven-release-plugin + ${release.plugin.version} + + + org.apache.maven.plugins + maven-gpg-plugin + ${gpg.plugin.version} + + + org.apache.maven.plugins + maven-compiler-plugin + 3.8.1 + + + org.apache.maven.plugins + maven-surefire-plugin + ${surefire.plugin.version} + + + org.apache.maven.plugins + maven-failsafe-plugin + ${failsafe.plugin.version} + + + org.apache.maven.plugins + maven-javadoc-plugin + ${javadoc.plugin.version} + + + org.apache.maven.plugins + maven-source-plugin + ${source.plugin.version} + + + org.sonatype.plugins + nexus-staging-maven-plugin + ${nexus.staging.plugin.version} + + + org.antlr + antlr4-maven-plugin + ${antlr4.version} + + + org.codehaus.mojo + exec-maven-plugin + ${exec.plugin.version} + + + org.apache.maven.plugins + maven-enforcer-plugin + ${enforcer.plugin.version} + + + org.apache.maven.plugins + maven-clean-plugin + ${clean.plugin.version} + + + + org.apache.maven.plugins + maven-resources-plugin + ${resources.plugin.version} + + + org.apache.maven.plugins + maven-jar-plugin + ${jar.plugin.version} + + + org.apache.maven.plugins + maven-install-plugin + ${install.plugin.version} + + + org.apache.maven.plugins + maven-deploy-plugin + ${deploy.plugin.version} + + + + + + + + + + Jonathan Shook + jshook@gmail.com + nosqlbench.io + http://nosqlbench.io/ + + + Sebastián Estévez + estevezsebastian@gmail.com + nosqlbench.io + http://nosqlbench.io/ + + + + + + ossrh-snapshot + https://oss.sonatype.org/content/repositories/snapshots + + + ossrh-staging + https://oss.sonatype.org/service/local/staging/deploy/maven2 + + + + + + + release + + + + + org.apache.maven.plugins + maven-gpg-plugin + 1.6 + + + sign-artifacts + verify + + sign + + + + + --pinentry-mode + loopback + + + + + + + + + + diff --git a/nb-annotations/pom.xml b/nb-annotations/pom.xml index fc33a8ce4..85e443b62 100644 --- a/nb-annotations/pom.xml +++ b/nb-annotations/pom.xml @@ -5,7 +5,7 @@ mvn-defaults io.nosqlbench - 4.15.58-SNAPSHOT + 4.15.64-SNAPSHOT ../mvn-defaults diff --git a/nb-api/pom.xml b/nb-api/pom.xml index a1e2ae8f3..89b6f187a 100644 --- a/nb-api/pom.xml +++ b/nb-api/pom.xml @@ -7,7 +7,7 @@ mvn-defaults io.nosqlbench - 4.15.58-SNAPSHOT + 4.15.64-SNAPSHOT ../mvn-defaults @@ -32,7 +32,7 @@ io.nosqlbench nb-annotations - 4.15.58-SNAPSHOT + 4.15.64-SNAPSHOT @@ -63,6 +63,18 @@ oshi-core + + com.amazonaws + aws-java-sdk-s3 + 1.12.12 + + + javax.xml.bind + jaxb-api + 2.4.0-b180830.0359 + + + org.openjdk.jmh jmh-core diff --git a/nb-api/src/main/java/io/nosqlbench/nb/addins/s3/s3urlhandler/S3ClientCache.java b/nb-api/src/main/java/io/nosqlbench/nb/addins/s3/s3urlhandler/S3ClientCache.java new file mode 100644 index 000000000..a49d49665 --- /dev/null +++ b/nb-api/src/main/java/io/nosqlbench/nb/addins/s3/s3urlhandler/S3ClientCache.java @@ -0,0 +1,41 @@ +package io.nosqlbench.nb.addins.s3.s3urlhandler; + +import com.amazonaws.auth.AWSCredentials; +import com.amazonaws.auth.AWSStaticCredentialsProvider; +import com.amazonaws.auth.BasicAWSCredentials; +import com.amazonaws.services.s3.AmazonS3; +import com.amazonaws.services.s3.AmazonS3ClientBuilder; + +import java.util.WeakHashMap; + +/** + * This client cache uses the credentials provided in a URL to create + * a fingerprint, and then creates a customized S3 client for each unique + * instance. If these clients are not used, they are allowed to be expired + * from the map and collected. + */ +public class S3ClientCache { + + private final WeakHashMap cache = new WeakHashMap<>(); + + public S3ClientCache() { + } + + public AmazonS3 get(S3UrlFields fields) { + AmazonS3 s3 = cache.computeIfAbsent(fields.getCredentialsFingerprint(), + cfp -> createAuthorizedClient(fields)); + return s3; + } + + private AmazonS3 createAuthorizedClient(S3UrlFields fields) { + if (fields.accessKey!=null && fields.secretKey!=null) { + AmazonS3ClientBuilder builder = AmazonS3ClientBuilder.standard(); + AWSCredentials specialcreds = new BasicAWSCredentials(fields.accessKey, fields.secretKey); + builder = builder.withCredentials(new AWSStaticCredentialsProvider(specialcreds)); + return builder.build(); + } else { + return AmazonS3ClientBuilder.defaultClient(); + } + } + +} diff --git a/nb-api/src/main/java/io/nosqlbench/nb/addins/s3/s3urlhandler/S3UrlConnection.java b/nb-api/src/main/java/io/nosqlbench/nb/addins/s3/s3urlhandler/S3UrlConnection.java new file mode 100644 index 000000000..e9175e973 --- /dev/null +++ b/nb-api/src/main/java/io/nosqlbench/nb/addins/s3/s3urlhandler/S3UrlConnection.java @@ -0,0 +1,31 @@ +package io.nosqlbench.nb.addins.s3.s3urlhandler; + +import com.amazonaws.services.s3.AmazonS3; +import com.amazonaws.services.s3.model.S3Object; + +import java.io.IOException; +import java.io.InputStream; +import java.net.URL; +import java.net.URLConnection; + +public class S3UrlConnection extends URLConnection { + + private final S3ClientCache clientCache; + + protected S3UrlConnection(S3ClientCache clientCache, URL url) { + super(url); + this.clientCache = clientCache; + } + + @Override + public InputStream getInputStream() throws IOException { + S3UrlFields fields = new S3UrlFields(url); + AmazonS3 s3 = clientCache.get(fields); + S3Object object = s3.getObject(fields.bucket, fields.key); + return object.getObjectContent(); + } + + @Override + public void connect() throws IOException { + } +} diff --git a/nb-api/src/main/java/io/nosqlbench/nb/addins/s3/s3urlhandler/S3UrlFields.java b/nb-api/src/main/java/io/nosqlbench/nb/addins/s3/s3urlhandler/S3UrlFields.java new file mode 100644 index 000000000..1b56922ad --- /dev/null +++ b/nb-api/src/main/java/io/nosqlbench/nb/addins/s3/s3urlhandler/S3UrlFields.java @@ -0,0 +1,99 @@ +package io.nosqlbench.nb.addins.s3.s3urlhandler; + +import java.net.MalformedURLException; +import java.net.URL; +import java.net.URLDecoder; +import java.nio.charset.StandardCharsets; +import java.util.Objects; + +public class S3UrlFields { + + public final String bucket; + public final String key; + public final String secretKey; + public final String accessKey; + private final String endpoint; + + public static S3UrlFields fromURLString(String urlString) { + URL url = null; + try { + url = new URL(urlString); + } catch (MalformedURLException e) { + throw new RuntimeException(e); + } + return new S3UrlFields(url); + } + public S3UrlFields(URL url) { + + String accessKey = null; + String secretKey = null; + + String userinfo = url.getUserInfo(); + if (userinfo != null) { + String[] userfields = userinfo.split(":", 2); + accessKey = URLDecoder.decode(userfields[0], StandardCharsets.UTF_8); + secretKey = URLDecoder.decode(userfields[1], StandardCharsets.UTF_8); + } else { + String query = url.getQuery(); + if (query != null) { + for (String qs : query.split("&")) { + String[] words = qs.split(":", 2); + if (words[0].equals("accessKey")) { + accessKey = URLDecoder.decode(words[1], StandardCharsets.UTF_8); + } else if (words[0].equals("secretKey")) { + secretKey = URLDecoder.decode(words[1], StandardCharsets.UTF_8); + } + } + } + } + + // https://docs.aws.amazon.com/AmazonS3/latest/userguide/access-bucket-intro.html + + this.accessKey = accessKey; + this.secretKey = secretKey; + + String[] bucketAndEndpoint = url.getHost().split("\\.", 2); + this.bucket = bucketAndEndpoint[0]; + this.endpoint = (bucketAndEndpoint.length==2) ? bucketAndEndpoint[1] : ""; + this.key = url.getPath().substring(1); + } + + public CredentialsFingerprint credentialsFingerprint() { + return new CredentialsFingerprint(this); + } + + public CredentialsFingerprint getCredentialsFingerprint() { + return null; + } + + public static class CredentialsFingerprint { + private final S3UrlFields fields; + + public CredentialsFingerprint(S3UrlFields fields) { + this.fields = fields; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + + S3UrlFields that = (S3UrlFields) o; + + if (!Objects.equals(fields.secretKey, that.secretKey)) return false; + if (!Objects.equals(fields.accessKey, that.accessKey)) return false; + return Objects.equals(fields.endpoint, that.endpoint); + } + + @Override + public int hashCode() { + int result = (fields.secretKey != null ? fields.secretKey.hashCode() : 0); + result = 31 * result + (fields.accessKey != null ? fields.accessKey.hashCode() : 0); + result = 31 * result + (fields.endpoint != null ? fields.endpoint.hashCode() : 0); + return result; + } + + } + + +} diff --git a/nb-api/src/main/java/io/nosqlbench/nb/addins/s3/s3urlhandler/S3UrlStreamHandler.java b/nb-api/src/main/java/io/nosqlbench/nb/addins/s3/s3urlhandler/S3UrlStreamHandler.java new file mode 100644 index 000000000..f77ff0aaa --- /dev/null +++ b/nb-api/src/main/java/io/nosqlbench/nb/addins/s3/s3urlhandler/S3UrlStreamHandler.java @@ -0,0 +1,21 @@ +package io.nosqlbench.nb.addins.s3.s3urlhandler; + +import java.io.IOException; +import java.net.URL; +import java.net.URLStreamHandler; + +public class S3UrlStreamHandler extends URLStreamHandler { + + private final S3ClientCache clientCache; + private final String protocol; + + public S3UrlStreamHandler(S3ClientCache clientCache, String protocol) { + this.clientCache = clientCache; + this.protocol = protocol; + } + + @Override + protected S3UrlConnection openConnection(URL url) throws IOException { + return new S3UrlConnection(clientCache, url); + } +} diff --git a/nb-api/src/main/java/io/nosqlbench/nb/addins/s3/s3urlhandler/S3UrlStreamHandlerProvider.java b/nb-api/src/main/java/io/nosqlbench/nb/addins/s3/s3urlhandler/S3UrlStreamHandlerProvider.java new file mode 100644 index 000000000..389903c40 --- /dev/null +++ b/nb-api/src/main/java/io/nosqlbench/nb/addins/s3/s3urlhandler/S3UrlStreamHandlerProvider.java @@ -0,0 +1,21 @@ +package io.nosqlbench.nb.addins.s3.s3urlhandler; + +import io.nosqlbench.nb.annotations.Service; + +import java.net.URLStreamHandler; +import java.net.spi.URLStreamHandlerProvider; + +@Service(value = URLStreamHandlerProvider.class, selector = "s3") +public class S3UrlStreamHandlerProvider extends URLStreamHandlerProvider { + + private final S3ClientCache clientCache = new S3ClientCache(); + + @Override + public URLStreamHandler createURLStreamHandler(String protocol) { + if ("s3".equals(protocol)) { + return new S3UrlStreamHandler(clientCache, protocol); + } + return null; + } + +} diff --git a/nb-api/src/main/java/io/nosqlbench/nb/api/NBEnvironment.java b/nb-api/src/main/java/io/nosqlbench/nb/api/NBEnvironment.java index 6591ec1e5..4e363fa80 100644 --- a/nb-api/src/main/java/io/nosqlbench/nb/api/NBEnvironment.java +++ b/nb-api/src/main/java/io/nosqlbench/nb/api/NBEnvironment.java @@ -1,6 +1,7 @@ package io.nosqlbench.nb.api; import io.nosqlbench.nb.api.errors.BasicError; +import io.nosqlbench.nb.api.metadata.SessionNamer; import org.apache.logging.log4j.Logger; import java.util.*; @@ -107,14 +108,19 @@ public class NBEnvironment { * @param defaultValue The value to return if the name is not found * @return the system property or environment variable's value, or the default value */ - public String getOr(String name, String defaultValue) { - String value = peek(name); + public String getOr(String name, String defaultValue, Map supplemental) { + String value = peek(name, supplemental); if (value == null) { value = defaultValue; } return reference(name, value); } + + public String getOr(String name, String defaultValue) { + return getOr(name, defaultValue, Map.of()); + } + /** * This is a non-referencing get of a value, and the canonical way to * access a value. This method codifies the semantics of whether something is @@ -122,8 +128,14 @@ public class NBEnvironment { * @param name The parameter name * @return A value, or null if none was found */ - private String peek(String name) { + private String peek(String name, Map supplemental) { String value = null; + if (supplemental.containsKey(name)) { + value = supplemental.get(name); + if (value!=null) { + return value; + } + } if (name.contains(".")) { value = System.getProperty(name.toLowerCase()); if (value != null) { @@ -167,7 +179,11 @@ public class NBEnvironment { } public boolean containsKey(String name) { - String value = peek(name); + return containsKey(name, Map.of()); + } + + public boolean containsKey(String name, Map supplemental) { + String value = peek(name, supplemental); return (value != null); } @@ -184,7 +200,7 @@ public class NBEnvironment { * @param word The word to interpolate the environment values into * @return The interpolated value, after substitutions, or null if any lookup failed */ - public Optional interpolate(String word) { + public Optional interpolate(String word, Map supplemental) { Pattern envpattern = Pattern.compile("(\\$(?[a-zA-Z_][A-Za-z0-9_.]+)|\\$\\{(?[^}]+)\\})"); Matcher matcher = envpattern.matcher(word); StringBuilder sb = new StringBuilder(); @@ -193,7 +209,7 @@ public class NBEnvironment { if (envvar == null) { envvar = matcher.group("env2"); } - String value = peek(envvar); + String value = peek(envvar,supplemental); if (value == null) { if (logger != null) { logger.debug("no value found for '" + envvar + "', returning Optional.empty() for '" + word + "'"); @@ -208,8 +224,12 @@ public class NBEnvironment { return Optional.of(sb.toString()); } - public List interpolate(CharSequence delim, String combined) { - String[] split = combined.split(delim.toString()); + public Optional interpolate(String word) { + return interpolate(word,Map.of()); + } + + public List interpolateEach(CharSequence delim, String toBeRecombined) { + String[] split = toBeRecombined.split(delim.toString()); List mapped = new ArrayList<>(); for (String pattern : split) { Optional interpolated = interpolate(pattern); @@ -218,4 +238,40 @@ public class NBEnvironment { return mapped; } + /** + * Interpolate system properties, environment variables, time fields, and arbitrary replacement strings + * into a single result. Templates such as {@code /tmp/%d-${testrun}-$System.index-SCENARIO} are supported. + * + *


+ * + * The tokens found in the raw template are interpolated in the following order. + *
    + *
  • Any token which exactly matches one of the keys in the provided map is substituted + * directly as is. No token sigil like '$' is used here, so if you want to support that + * as is, you need to provide the keys in your substitution map as such.
  • + *
  • Any tokens in the form {@code %f} which is supported by the time fields in + * {@link Formatter}
  • are honored and used with the timestamp provided.* + *
  • System Properties: Any token in the form {@code $word.word} will be taken as the name + * of a system property to be substited.
  • + *
  • Environment Variables: Any token in the form {@code $name}
  • will be takens as + * an environment variable to be substituted. + *
+ * + * @param rawtext The template, including any of the supported token forms + * @param millis The timestamp to use for any temporal tokens + * @param map Any additional parameters to interpolate into the template first + * @return Optionally, the interpolated string, as long as all references were qualified. Error + * handling is contextual to the caller -- If not getting a valid result would cause a downstream error, + * an error should likely be thrown. + */ + public final Optional interpolateWithTimestamp(String rawtext, long millis, Map map) { + String result = rawtext; + result = SessionNamer.format(result, millis); + return interpolate(result,map); + } + + public final Optional interpolateWithTimestamp(String rawText, long millis) { + return interpolateWithTimestamp(rawText, millis, Map.of()); + } + } diff --git a/nb-api/src/main/java/io/nosqlbench/nb/api/metadata/ScenarioMetadata.java b/nb-api/src/main/java/io/nosqlbench/nb/api/metadata/ScenarioMetadata.java new file mode 100644 index 000000000..ffbd94e8e --- /dev/null +++ b/nb-api/src/main/java/io/nosqlbench/nb/api/metadata/ScenarioMetadata.java @@ -0,0 +1,47 @@ +package io.nosqlbench.nb.api.metadata; + +import java.util.Map; + +/** + * If an object is ScenarioMetadata, then they will be updated with a map of + * scenario metadata. Supported types are: + *
    + *
  • ScriptingPluginInfo
  • + *
+ */ +public class ScenarioMetadata { + private final long startedAt; + private final String sessionName; + private final String systemId; + private final String systemFingerprint; + + public ScenarioMetadata(long startedAt, String sessionName, String systemId, String systemFingerprint) { + this.startedAt = startedAt; + this.sessionName = sessionName; + this.systemId = systemId; + this.systemFingerprint = systemFingerprint; + } + + public long getStartedAt() { + return startedAt; + } + + public String getSessionName() { + return sessionName; + } + + public String getSystemId() { + return systemId; + } + + public String getSystemFingerprint() { + return systemFingerprint; + } + + public Map asMap() { + return Map.of("STARTED_AT",String.valueOf(startedAt), + "SESSION_NAME",sessionName, + "SYSTEM_ID",systemId, + "SYSTEM_FINGERPRINT", systemFingerprint); + } +} diff --git a/nb-api/src/main/java/io/nosqlbench/nb/api/metadata/ScenarioMetadataAware.java b/nb-api/src/main/java/io/nosqlbench/nb/api/metadata/ScenarioMetadataAware.java new file mode 100644 index 000000000..aee925961 --- /dev/null +++ b/nb-api/src/main/java/io/nosqlbench/nb/api/metadata/ScenarioMetadataAware.java @@ -0,0 +1,21 @@ +package io.nosqlbench.nb.api.metadata; + +/** + * Where supported, the following named fields are injected into object which + * implement this interface: + *
    + *
  • SCENARIO_NAME - The full scenario name, used for logging, metrics, etc
  • + *
  • STARTED_AT_MILLIS - The millisecond timestamp used to create the scenario name
  • + *
  • SYSTEM_ID - A stable identifier based on the available ip addresses
  • + *
  • SYSTEM_FINGERPRINT - a stable and pseudonymous identifier based on SYSTEM_ID
  • + *
+ */ +public interface ScenarioMetadataAware { + void setScenarioMetadata(ScenarioMetadata metadata); + + static void apply(Object target, ScenarioMetadata metadata) { + if (target instanceof ScenarioMetadataAware) { + ((ScenarioMetadataAware)target).setScenarioMetadata(metadata); + } + } +} diff --git a/engine-cli/src/main/java/io/nosqlbench/engine/cli/SessionNamer.java b/nb-api/src/main/java/io/nosqlbench/nb/api/metadata/SessionNamer.java similarity index 76% rename from engine-cli/src/main/java/io/nosqlbench/engine/cli/SessionNamer.java rename to nb-api/src/main/java/io/nosqlbench/nb/api/metadata/SessionNamer.java index af5dde87d..ad85c271d 100644 --- a/engine-cli/src/main/java/io/nosqlbench/engine/cli/SessionNamer.java +++ b/nb-api/src/main/java/io/nosqlbench/nb/api/metadata/SessionNamer.java @@ -15,11 +15,13 @@ * / */ -package io.nosqlbench.engine.cli; +package io.nosqlbench.nb.api.metadata; + +import java.util.Arrays; public class SessionNamer { - public String format(String sessionName) { + public static String format(String sessionName, long sessionTimeMillis) { String nameTemplate = sessionName; if (nameTemplate==null || nameTemplate.isEmpty()) { nameTemplate = "scenario_%tY%tm%td_%tH%tM%tS_%tL"; @@ -27,11 +29,14 @@ public class SessionNamer { int splits = nameTemplate.split("%").length -1; Long[] times = new Long[splits]; - long now = System.currentTimeMillis(); - for (int i = 0; i < times.length; i++) times[i] = now; + Arrays.fill(times, sessionTimeMillis); sessionName = String.format(nameTemplate, (Object[]) times); return sessionName; } + + public static String format(String sessionName) { + return format(sessionName, System.currentTimeMillis()); + } } diff --git a/nb-api/src/main/java/io/nosqlbench/nb/api/SystemId.java b/nb-api/src/main/java/io/nosqlbench/nb/api/metadata/SystemId.java similarity index 60% rename from nb-api/src/main/java/io/nosqlbench/nb/api/SystemId.java rename to nb-api/src/main/java/io/nosqlbench/nb/api/metadata/SystemId.java index 2a6b2d41d..30b5fc4d4 100644 --- a/nb-api/src/main/java/io/nosqlbench/nb/api/SystemId.java +++ b/nb-api/src/main/java/io/nosqlbench/nb/api/metadata/SystemId.java @@ -1,4 +1,4 @@ -package io.nosqlbench.nb.api; +package io.nosqlbench.nb.api.metadata; import com.google.gson.Gson; import com.google.gson.GsonBuilder; @@ -7,10 +7,25 @@ import oshi.hardware.CentralProcessor; import oshi.hardware.HardwareAbstractionLayer; import oshi.hardware.NetworkIF; +import java.nio.charset.StandardCharsets; +import java.security.MessageDigest; +import java.security.NoSuchAlgorithmException; import java.util.*; public class SystemId { + /** + * Return the address of a node which is likely to be unique enough to identify + * it within a given subnet, after filtering out all local addresses. This is useful + * when you are managing configuration or results for a set of systems which + * share a common IP addressing scheme. This identifier should be stable as long + * as the node's addresses do not change. + * + * If you are needing an identifier for a node but wish to expose any address data, + * you can use the {@link #getNodeFingerprint()} which takes this value and hashes + * it with SHA-1 to produce a hex string. + * @return A address for the node, likely to be unique and stable for its lifetime + */ public static String getNodeId() { SystemInfo sysinfo = new SystemInfo(); HardwareAbstractionLayer hal = sysinfo.getHardware(); @@ -38,6 +53,28 @@ public class SystemId { return systemID; } + /** + * Produce a stable string identifier consisting of hexadecimal characters. + * The internal data used for this value is based on a stable ordering of non-local + * ip addresses available on the system. + * @return A stable node identifier + */ + public static String getNodeFingerprint() { + String addrId = getNodeId(); + try { + MessageDigest sha1_digest = MessageDigest.getInstance("SHA-1"); + byte[] addrBytes = sha1_digest.digest(addrId.getBytes(StandardCharsets.UTF_8)); + String fingerprint = ""; + for (int i=0; i < addrBytes.length; i++) { + fingerprint += + Integer.toString( ( addrBytes[i] & 0xff ) + 0x100, 16).substring( 1 ); + } + return fingerprint.toUpperCase(Locale.ROOT); + } catch (NoSuchAlgorithmException e) { + throw new RuntimeException(e); + } + } + public static String getHostSummary() { SystemInfo sysinfo = new SystemInfo(); HardwareAbstractionLayer hal = sysinfo.getHardware(); diff --git a/nb-api/src/main/java/io/nosqlbench/nb/api/spi/SimpleServiceLoader.java b/nb-api/src/main/java/io/nosqlbench/nb/api/spi/SimpleServiceLoader.java index fcfed676f..20430d3b9 100644 --- a/nb-api/src/main/java/io/nosqlbench/nb/api/spi/SimpleServiceLoader.java +++ b/nb-api/src/main/java/io/nosqlbench/nb/api/spi/SimpleServiceLoader.java @@ -70,6 +70,7 @@ public class SimpleServiceLoader { providers = new LinkedHashMap<>(); loader.stream().forEach(provider -> { + logger.trace("loading provider: " + provider.type()); Class type = provider.type(); if (!type.isAnnotationPresent(Service.class)) { throw new RuntimeException( diff --git a/nb-api/src/test/java/io/nosqlbench/nb/addins/s3/s3urlhandler/S3UrlStreamHandlerTest.java b/nb-api/src/test/java/io/nosqlbench/nb/addins/s3/s3urlhandler/S3UrlStreamHandlerTest.java new file mode 100644 index 000000000..0139431ab --- /dev/null +++ b/nb-api/src/test/java/io/nosqlbench/nb/addins/s3/s3urlhandler/S3UrlStreamHandlerTest.java @@ -0,0 +1,54 @@ +package io.nosqlbench.nb.addins.s3.s3urlhandler; + +import com.amazonaws.services.s3.AmazonS3; +import com.amazonaws.services.s3.AmazonS3ClientBuilder; +import com.amazonaws.services.s3.model.Bucket; +import com.amazonaws.services.s3.model.PutObjectResult; +import org.junit.jupiter.api.Disabled; +import org.junit.jupiter.api.Test; + +import java.io.BufferedReader; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.net.URL; + +import static org.assertj.core.api.Assertions.assertThat; + +public class S3UrlStreamHandlerTest { + + public static String bucketName = "nb-extension-test"; + public static String keyName = "key-name"; + public static String testValue = "test-value"; + + /** + * This test requires that you have credentials already configured on your local system + * for S3. It creates an object using the s3 client directly, then uses a generic + * URL method to access and verify the contents. + */ + @Disabled + @Test + public void sanityCheckS3UrlHandler() { + AmazonS3 client = AmazonS3ClientBuilder.defaultClient(); + + + Bucket bucket = null; + + if (!client.doesBucketExistV2(bucketName)) { + bucket = client.createBucket(bucketName); + } + PutObjectResult putObjectResult = client.putObject(bucketName, keyName, testValue); + assertThat(putObjectResult).isNotNull(); + + try { + URL url = new URL("s3://"+bucketName+"/"+keyName); + InputStream is = url.openStream(); + BufferedReader br = new BufferedReader(new InputStreamReader(is)); + String line = br.readLine(); + assertThat(line).isEqualTo(testValue); + System.out.println(line); + } catch (Exception e) { + e.printStackTrace(); + } + } + +} diff --git a/nb-api/src/test/java/io/nosqlbench/nb/addins/s3/s3utils/S3UploaderDemo.java b/nb-api/src/test/java/io/nosqlbench/nb/addins/s3/s3utils/S3UploaderDemo.java new file mode 100644 index 000000000..20c5f78fb --- /dev/null +++ b/nb-api/src/test/java/io/nosqlbench/nb/addins/s3/s3utils/S3UploaderDemo.java @@ -0,0 +1,45 @@ +package io.nosqlbench.nb.addins.s3.s3utils; + +import com.amazonaws.services.s3.transfer.MultipleFileUpload; +import com.amazonaws.services.s3.transfer.TransferManager; +import com.amazonaws.services.s3.transfer.TransferManagerBuilder; +import io.nosqlbench.nb.addins.s3.s3urlhandler.S3ClientCache; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; + +import java.nio.file.FileSystems; +import java.nio.file.Files; +import java.nio.file.LinkOption; +import java.nio.file.Path; + +/** + * This is a generic s3 directory uploader which is neither a scripting plugin nor a standard URL handler. + */ +public class S3UploaderDemo { + + private final S3ClientCache clientCache = new S3ClientCache(); + + private static final Logger logger = LogManager.getLogger(S3UploaderDemo.class); + + public MultipleFileUpload syncup(Path sourcePath, String bucket, String prefix) { + + if (!FileSystems.getDefault().equals(sourcePath.getFileSystem())) { + throw new RuntimeException("The file must reside on the default filesystem to be uploaded by S3."); + } + + if (!Files.isDirectory(sourcePath, LinkOption.NOFOLLOW_LINKS)) { + throw new RuntimeException("path '" + sourcePath + "' is not a directory."); + } + + TransferManager tm = TransferManagerBuilder.defaultTransferManager(); + MultipleFileUpload mfu = tm.uploadDirectory(bucket, prefix, sourcePath.toFile(), true); + try { + mfu.waitForCompletion(); + } catch (InterruptedException e) { + throw new RuntimeException("Multi-file upload was interrupted!"); + } + tm.shutdownNow(); + return mfu; + } + +} diff --git a/nb-api/src/test/java/io/nosqlbench/nb/addins/s3/s3utils/S3UploaderTest.java b/nb-api/src/test/java/io/nosqlbench/nb/addins/s3/s3utils/S3UploaderTest.java new file mode 100644 index 000000000..218587dcb --- /dev/null +++ b/nb-api/src/test/java/io/nosqlbench/nb/addins/s3/s3utils/S3UploaderTest.java @@ -0,0 +1,21 @@ +package io.nosqlbench.nb.addins.s3.s3utils; + +import com.amazonaws.services.s3.transfer.MultipleFileUpload; +import io.nosqlbench.nb.addins.s3.s3urlhandler.S3UrlStreamHandlerTest; +import org.junit.jupiter.api.Disabled; +import org.junit.jupiter.api.Test; + +import java.nio.file.Path; + +public class S3UploaderTest { + + @Disabled + @Test + public void testDirUpload() { + Path path = Path.of("src/test/resources/nesteddir1"); + S3UploaderDemo ul = new S3UploaderDemo(); + MultipleFileUpload mfu = ul.syncup(path, S3UrlStreamHandlerTest.bucketName, "test-prefix"); + System.out.println(mfu); + } + +} diff --git a/nb-api/src/test/java/io/nosqlbench/nb/api/NBEnvironmentTest.java b/nb-api/src/test/java/io/nosqlbench/nb/api/NBEnvironmentTest.java index 9693e4d04..9f316ad61 100644 --- a/nb-api/src/test/java/io/nosqlbench/nb/api/NBEnvironmentTest.java +++ b/nb-api/src/test/java/io/nosqlbench/nb/api/NBEnvironmentTest.java @@ -2,6 +2,9 @@ package io.nosqlbench.nb.api; import org.junit.jupiter.api.Test; +import java.util.Map; +import java.util.Optional; + import static org.assertj.core.api.Assertions.assertThat; public class NBEnvironmentTest { @@ -15,4 +18,22 @@ public class NBEnvironmentTest { assertThat(home1).matches(".+"); } + @Test + public void testInterpolationWithTimestamp() { + NBEnvironment env = new NBEnvironment(); + long millis = 1633964892320L; + String time1 = env.interpolateWithTimestamp("word WOO$WOO %td %% end", millis, Map.of("WOO","WOW")).orElse(null); + assertThat(time1).isEqualTo("word WOOWOW 11 % end"); + } + + @Test + public void testInterpolationPrecedence() { + NBEnvironment env = new NBEnvironment(); + Optional superseded = env.interpolate("$TEST_KEY, $USER", Map.of("TEST_KEY", "supersedes1", "USER", "supersedes2")); + assertThat(superseded).contains("supersedes1, supersedes2"); + superseded = env.interpolate("$USER", Map.of("TEST_KEY", "supersedes1")); + assertThat(superseded).isPresent(); + assertThat(superseded.get()).isNotEqualTo("supersedes2"); + } + } diff --git a/nb-api/src/test/java/io/nosqlbench/nb/api/SystemIdTest.java b/nb-api/src/test/java/io/nosqlbench/nb/api/SystemIdTest.java index 62508270f..940282c33 100644 --- a/nb-api/src/test/java/io/nosqlbench/nb/api/SystemIdTest.java +++ b/nb-api/src/test/java/io/nosqlbench/nb/api/SystemIdTest.java @@ -1,7 +1,10 @@ package io.nosqlbench.nb.api; +import io.nosqlbench.nb.api.metadata.SystemId; import org.junit.jupiter.api.Test; +import static org.assertj.core.api.Assertions.assertThat; + public class SystemIdTest { @Test @@ -9,4 +12,17 @@ public class SystemIdTest { String info = SystemId.getHostSummary(); System.out.println(info); } + + @Test + public void testNostId() { + String info = SystemId.getNodeId(); + assertThat(info).matches("\\d+\\.\\d+\\.\\d+\\.\\d+"); + } + + @Test + public void testNodeFingerprint() { + String hash = SystemId.getNodeFingerprint(); + assertThat(hash).matches("[A-Z0-9]+"); + } + } diff --git a/nb-api/src/test/java/io/nosqlbench/nb/api/content/NBIOTest.java b/nb-api/src/test/java/io/nosqlbench/nb/api/content/NBIOTest.java index 6193e192d..d415d3fa3 100644 --- a/nb-api/src/test/java/io/nosqlbench/nb/api/content/NBIOTest.java +++ b/nb-api/src/test/java/io/nosqlbench/nb/api/content/NBIOTest.java @@ -2,6 +2,10 @@ package io.nosqlbench.nb.api.content; import org.junit.jupiter.api.Test; +import java.io.File; +import java.io.IOException; +import java.nio.charset.StandardCharsets; +import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.Paths; import java.util.LinkedHashSet; @@ -257,4 +261,20 @@ public class NBIOTest { } + @Test + public void matchFullyQualifiedPathCorrectly() { + Path tmpdir = Paths.get("/tmp"); + if (!Files.isDirectory(tmpdir)) return; + try { + File tempFile = File.createTempFile(tmpdir.toString(), "testfile.csv"); + tempFile.deleteOnExit(); + String fullpath = tempFile.getAbsolutePath(); + Files.write(Path.of(fullpath), "COL1,COL2\n\"val1\",\"val2\"\n".getBytes(StandardCharsets.UTF_8)); + List> results = NBIO.all().name(fullpath).list(); + assertThat(results.size()).isEqualTo(1); + } catch (IOException e) { + throw new RuntimeException(e); + } + } + } diff --git a/nb/pom.xml b/nb/pom.xml index 52c56e4e9..07c508b78 100644 --- a/nb/pom.xml +++ b/nb/pom.xml @@ -5,7 +5,7 @@ mvn-defaults io.nosqlbench - 4.15.58-SNAPSHOT + 4.15.64-SNAPSHOT ../mvn-defaults @@ -24,127 +24,127 @@ io.nosqlbench engine-rest - 4.15.58-SNAPSHOT + 4.15.64-SNAPSHOT io.nosqlbench engine-cli - 4.15.58-SNAPSHOT + 4.15.64-SNAPSHOT io.nosqlbench engine-docs - 4.15.58-SNAPSHOT + 4.15.64-SNAPSHOT io.nosqlbench engine-core - 4.15.58-SNAPSHOT + 4.15.64-SNAPSHOT io.nosqlbench engine-extensions - 4.15.58-SNAPSHOT + 4.15.64-SNAPSHOT io.nosqlbench nbr - 4.15.58-SNAPSHOT + 4.15.64-SNAPSHOT io.nosqlbench driver-web - 4.15.58-SNAPSHOT + 4.15.64-SNAPSHOT io.nosqlbench driver-kafka - 4.15.58-SNAPSHOT + 4.15.64-SNAPSHOT io.nosqlbench driver-stdout - 4.15.58-SNAPSHOT + 4.15.64-SNAPSHOT io.nosqlbench driver-diag - 4.15.58-SNAPSHOT + 4.15.64-SNAPSHOT io.nosqlbench driver-tcp - 4.15.58-SNAPSHOT + 4.15.64-SNAPSHOT io.nosqlbench driver-http - 4.15.58-SNAPSHOT + 4.15.64-SNAPSHOT io.nosqlbench driver-jmx - 4.15.58-SNAPSHOT + 4.15.64-SNAPSHOT io.nosqlbench driver-dsegraph-shaded - 4.15.58-SNAPSHOT + 4.15.64-SNAPSHOT io.nosqlbench driver-cql-shaded - 4.15.58-SNAPSHOT + 4.15.64-SNAPSHOT io.nosqlbench driver-cqld3-shaded - 4.15.58-SNAPSHOT + 4.15.64-SNAPSHOT io.nosqlbench driver-cqlverify - 4.15.58-SNAPSHOT + 4.15.64-SNAPSHOT io.nosqlbench driver-mongodb - 4.15.58-SNAPSHOT + 4.15.64-SNAPSHOT io.nosqlbench driver-pulsar - 4.15.58-SNAPSHOT + 4.15.64-SNAPSHOT io.nosqlbench driver-cockroachdb - 4.15.58-SNAPSHOT + 4.15.64-SNAPSHOT io.nosqlbench driver-jms - 4.15.58-SNAPSHOT + 4.15.64-SNAPSHOT
@@ -231,7 +231,7 @@ io.nosqlbench driver-dsegraph-shaded - 4.15.58-SNAPSHOT + 4.15.64-SNAPSHOT @@ -244,12 +244,12 @@ io.nosqlbench driver-cql-shaded - 4.15.58-SNAPSHOT + 4.15.64-SNAPSHOT io.nosqlbench driver-cqlverify - 4.15.58-SNAPSHOT + 4.15.64-SNAPSHOT @@ -264,7 +264,7 @@ io.nosqlbench driver-cqld4 - 4.15.58-SNAPSHOT + 4.15.64-SNAPSHOT @@ -277,7 +277,7 @@ io.nosqlbench driver-mongodb - 4.15.58-SNAPSHOT + 4.15.64-SNAPSHOT diff --git a/nb/src/test/resources/scripts/async/extension_files.js b/nb/src/test/resources/scripts/async/extension_files.js new file mode 100644 index 000000000..5ef87ca8d --- /dev/null +++ b/nb/src/test/resources/scripts/async/extension_files.js @@ -0,0 +1,2 @@ +// Just an example +var content = files.read("somefile.txt"); diff --git a/nb/src/test/resources/scripts/async/extension_globalstate.js b/nb/src/test/resources/scripts/async/extension_globalstate.js new file mode 100644 index 000000000..4df5c24af --- /dev/null +++ b/nb/src/test/resources/scripts/async/extension_globalstate.js @@ -0,0 +1,4 @@ +// Just an example +var result= globalvars.put("result","OK") +var result = globalvars.get("result"); +print("result="+result); diff --git a/nb/src/test/resources/scripts/async/extension_http.js b/nb/src/test/resources/scripts/async/extension_http.js new file mode 100644 index 000000000..ea3d04bfa --- /dev/null +++ b/nb/src/test/resources/scripts/async/extension_http.js @@ -0,0 +1 @@ +var response = http.get("http://example.google.com") diff --git a/nb/src/test/resources/scripts/async/extension_s3.js b/nb/src/test/resources/scripts/async/extension_s3.js new file mode 100644 index 000000000..505a5ad66 --- /dev/null +++ b/nb/src/test/resources/scripts/async/extension_s3.js @@ -0,0 +1,3 @@ +// This requires active credentials, so it is disabled by default. +// This still serves as an example +// s3.uploadDirToUrl("testdata","s3://nb-extension-test/testdata1"); diff --git a/nb/src/test/resources/scripts/async/extension_shutdown_hook.js b/nb/src/test/resources/scripts/async/extension_shutdown_hook.js new file mode 100644 index 000000000..b7b3bc53c --- /dev/null +++ b/nb/src/test/resources/scripts/async/extension_shutdown_hook.js @@ -0,0 +1,3 @@ +shutdown.addShutdownHook('testfunc', function f() { + print("shutdown hook running"); +}); diff --git a/nbr/pom.xml b/nbr/pom.xml index 403ecbabf..df762035d 100644 --- a/nbr/pom.xml +++ b/nbr/pom.xml @@ -5,7 +5,7 @@ mvn-defaults io.nosqlbench - 4.15.58-SNAPSHOT + 4.15.64-SNAPSHOT ../mvn-defaults @@ -24,37 +24,37 @@ io.nosqlbench engine-rest - 4.15.58-SNAPSHOT + 4.15.64-SNAPSHOT io.nosqlbench engine-cli - 4.15.58-SNAPSHOT + 4.15.64-SNAPSHOT io.nosqlbench engine-docs - 4.15.58-SNAPSHOT + 4.15.64-SNAPSHOT io.nosqlbench engine-core - 4.15.58-SNAPSHOT + 4.15.64-SNAPSHOT io.nosqlbench engine-extensions - 4.15.58-SNAPSHOT + 4.15.64-SNAPSHOT io.nosqlbench driver-diag - 4.15.58-SNAPSHOT + 4.15.64-SNAPSHOT diff --git a/nbr/src/test/java/io/nosqlbench/engine/core/script/AsyncScriptIntegrationTests.java b/nbr/src/test/java/io/nosqlbench/engine/core/script/AsyncScriptIntegrationTests.java index 1c3e7c3a8..0404a3c6c 100644 --- a/nbr/src/test/java/io/nosqlbench/engine/core/script/AsyncScriptIntegrationTests.java +++ b/nbr/src/test/java/io/nosqlbench/engine/core/script/AsyncScriptIntegrationTests.java @@ -237,6 +237,14 @@ public class AsyncScriptIntegrationTests { assertThat(scenarioResult.getIOLog()).contains("count: "); } + @Test + public void testShutdownHook() { + ScenarioResult scenarioResult = runScenario("extension_shutdown_hook"); + assertThat(scenarioResult.getIOLog()).doesNotContain("shutdown hook running").describedAs( + "shutdown hooks should not run in the same IO context as the main scenario" + ); + } + @Test public void testExceptionPropagationFromMotorThread() { ScenarioResult scenarioResult = runScenario("activityerror"); diff --git a/pom.xml b/pom.xml index 17ad4df40..fad15759a 100644 --- a/pom.xml +++ b/pom.xml @@ -7,7 +7,7 @@ mvn-defaults io.nosqlbench - 4.15.58-SNAPSHOT + 4.15.64-SNAPSHOT mvn-defaults diff --git a/virtdata-api/pom.xml b/virtdata-api/pom.xml index ef8853ff6..85f8c4c5b 100644 --- a/virtdata-api/pom.xml +++ b/virtdata-api/pom.xml @@ -7,7 +7,7 @@ io.nosqlbench mvn-defaults - 4.15.58-SNAPSHOT + 4.15.64-SNAPSHOT ../mvn-defaults @@ -23,14 +23,14 @@ io.nosqlbench - 4.15.58-SNAPSHOT + 4.15.64-SNAPSHOT nb-api io.nosqlbench virtdata-lang - 4.15.58-SNAPSHOT + 4.15.64-SNAPSHOT diff --git a/virtdata-lang/pom.xml b/virtdata-lang/pom.xml index 86c6c3a08..fd11d3451 100644 --- a/virtdata-lang/pom.xml +++ b/virtdata-lang/pom.xml @@ -7,7 +7,7 @@ mvn-defaults io.nosqlbench - 4.15.58-SNAPSHOT + 4.15.64-SNAPSHOT ../mvn-defaults diff --git a/virtdata-lib-basics/pom.xml b/virtdata-lib-basics/pom.xml index e3e895ac8..96dff44e9 100644 --- a/virtdata-lib-basics/pom.xml +++ b/virtdata-lib-basics/pom.xml @@ -7,7 +7,7 @@ mvn-defaults io.nosqlbench - 4.15.58-SNAPSHOT + 4.15.64-SNAPSHOT ../mvn-defaults @@ -20,7 +20,7 @@ io.nosqlbench virtdata-api - 4.15.58-SNAPSHOT + 4.15.64-SNAPSHOT diff --git a/virtdata-lib-curves4/pom.xml b/virtdata-lib-curves4/pom.xml index d8180aad1..472525977 100644 --- a/virtdata-lib-curves4/pom.xml +++ b/virtdata-lib-curves4/pom.xml @@ -4,7 +4,7 @@ mvn-defaults io.nosqlbench - 4.15.58-SNAPSHOT + 4.15.64-SNAPSHOT ../mvn-defaults @@ -22,13 +22,13 @@ io.nosqlbench virtdata-api - 4.15.58-SNAPSHOT + 4.15.64-SNAPSHOT io.nosqlbench virtdata-lib-basics - 4.15.58-SNAPSHOT + 4.15.64-SNAPSHOT diff --git a/virtdata-lib-random/pom.xml b/virtdata-lib-random/pom.xml index 7340ace06..9951266af 100644 --- a/virtdata-lib-random/pom.xml +++ b/virtdata-lib-random/pom.xml @@ -7,7 +7,7 @@ mvn-defaults io.nosqlbench - 4.15.58-SNAPSHOT + 4.15.64-SNAPSHOT ../mvn-defaults @@ -20,13 +20,13 @@ io.nosqlbench virtdata-api - 4.15.58-SNAPSHOT + 4.15.64-SNAPSHOT io.nosqlbench virtdata-lib-basics - 4.15.58-SNAPSHOT + 4.15.64-SNAPSHOT diff --git a/virtdata-lib-realer/pom.xml b/virtdata-lib-realer/pom.xml index 50f85453b..42aa56c8a 100644 --- a/virtdata-lib-realer/pom.xml +++ b/virtdata-lib-realer/pom.xml @@ -4,7 +4,7 @@ mvn-defaults io.nosqlbench - 4.15.58-SNAPSHOT + 4.15.64-SNAPSHOT ../mvn-defaults @@ -20,7 +20,7 @@ io.nosqlbench virtdata-lib-basics - 4.15.58-SNAPSHOT + 4.15.64-SNAPSHOT diff --git a/virtdata-realdata/pom.xml b/virtdata-realdata/pom.xml index d25fec8ad..390e78a4e 100644 --- a/virtdata-realdata/pom.xml +++ b/virtdata-realdata/pom.xml @@ -7,7 +7,7 @@ mvn-defaults io.nosqlbench - 4.15.58-SNAPSHOT + 4.15.64-SNAPSHOT ../mvn-defaults @@ -18,7 +18,7 @@ io.nosqlbench virtdata-api - 4.15.58-SNAPSHOT + 4.15.64-SNAPSHOT diff --git a/virtdata-userlibs/pom.xml b/virtdata-userlibs/pom.xml index 534735b45..7707899a6 100644 --- a/virtdata-userlibs/pom.xml +++ b/virtdata-userlibs/pom.xml @@ -4,7 +4,7 @@ mvn-defaults io.nosqlbench - 4.15.58-SNAPSHOT + 4.15.64-SNAPSHOT ../mvn-defaults @@ -18,36 +18,36 @@ io.nosqlbench virtdata-realdata - 4.15.58-SNAPSHOT + 4.15.64-SNAPSHOT io.nosqlbench virtdata-lib-realer - 4.15.58-SNAPSHOT + 4.15.64-SNAPSHOT io.nosqlbench virtdata-api - 4.15.58-SNAPSHOT + 4.15.64-SNAPSHOT io.nosqlbench virtdata-lib-random - 4.15.58-SNAPSHOT + 4.15.64-SNAPSHOT io.nosqlbench - 4.15.58-SNAPSHOT + 4.15.64-SNAPSHOT virtdata-lib-basics io.nosqlbench - 4.15.58-SNAPSHOT + 4.15.64-SNAPSHOT virtdata-lib-curves4 @@ -55,7 +55,7 @@ io.nosqlbench docsys - 4.15.58-SNAPSHOT + 4.15.64-SNAPSHOT