mirror of
https://github.com/nosqlbench/nosqlbench.git
synced 2024-12-28 01:31:05 -06:00
Merge remote-tracking branch 'upstream/master'
This commit is contained in:
commit
ba1082eefe
3
.github/workflows/release.yml
vendored
3
.github/workflows/release.yml
vendored
@ -77,7 +77,8 @@ jobs:
|
||||
- name: prepare release summary
|
||||
id: prepare_summary
|
||||
run: |
|
||||
summary=$(scripts/release-notes.sh)
|
||||
#summary=$(scripts/release-notes.sh)
|
||||
summary=$(cat RELEASENOTES.md)
|
||||
summary="${summary//'%'/'%25'}"
|
||||
summary="${summary//$'\n'/'%0A'}"
|
||||
summary="${summary//$'\r'/'%0D'}"
|
||||
|
@ -1 +1,12 @@
|
||||
2a1284c3 (HEAD -> master) sync up mongo version and enable
|
||||
d3b3490b formatting
|
||||
6d8443e6 simplify command builder
|
||||
42559152 support graphite exporter custom mappings
|
||||
9235a51b add prom label based dashboard
|
||||
8a37abe7 mark previous dashboard as deprecated
|
||||
d64c0840 sanitize named scenario aliasing
|
||||
b248c866 configurable docker tags for grafana
|
||||
98bb5feb support more grafana datasources and dashboards
|
||||
3aea85cb use named steps in bundled workloads
|
||||
0fd96c9c added astra-specific schema support for Astra or DSE with guardrail enabled
|
||||
eb41be7e organize cql op functions
|
||||
|
@ -9,7 +9,7 @@
|
||||
<parent>
|
||||
<artifactId>mvn-defaults</artifactId>
|
||||
<groupId>io.nosqlbench</groupId>
|
||||
<version>3.12.119-SNAPSHOT</version>
|
||||
<version>3.12.120-SNAPSHOT</version>
|
||||
<relativePath>../mvn-defaults</relativePath>
|
||||
</parent>
|
||||
|
||||
@ -18,7 +18,7 @@
|
||||
<dependency>
|
||||
<groupId>io.nosqlbench</groupId>
|
||||
<artifactId>nb-api</artifactId>
|
||||
<version>3.12.119-SNAPSHOT</version>
|
||||
<version>3.12.120-SNAPSHOT</version>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
@ -112,7 +112,7 @@
|
||||
<dependency>
|
||||
<groupId>io.nosqlbench</groupId>
|
||||
<artifactId>virtdata-api</artifactId>
|
||||
<version>3.12.119-SNAPSHOT</version>
|
||||
<version>3.12.120-SNAPSHOT</version>
|
||||
</dependency>
|
||||
|
||||
</dependencies>
|
||||
|
@ -4,7 +4,7 @@
|
||||
<parent>
|
||||
<groupId>io.nosqlbench</groupId>
|
||||
<artifactId>mvn-defaults</artifactId>
|
||||
<version>3.12.119-SNAPSHOT</version>
|
||||
<version>3.12.120-SNAPSHOT</version>
|
||||
<relativePath>../mvn-defaults</relativePath>
|
||||
</parent>
|
||||
|
||||
@ -23,7 +23,7 @@
|
||||
<dependency>
|
||||
<groupId>io.nosqlbench</groupId>
|
||||
<artifactId>engine-api</artifactId>
|
||||
<version>3.12.119-SNAPSHOT</version>
|
||||
<version>3.12.120-SNAPSHOT</version>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
|
@ -13,6 +13,7 @@ import io.nosqlbench.activitytype.cql.errorhandling.exceptions.MaxTriesExhausted
|
||||
import io.nosqlbench.activitytype.cql.errorhandling.exceptions.UnexpectedPagingException;
|
||||
import io.nosqlbench.activitytype.cql.statements.core.ReadyCQLStatement;
|
||||
import com.google.common.util.concurrent.ListenableFuture;
|
||||
import io.nosqlbench.activitytype.cql.statements.modifiers.StatementModifier;
|
||||
import io.nosqlbench.engine.api.activityapi.core.ActivityDefObserver;
|
||||
import io.nosqlbench.engine.api.activityapi.core.MultiPhaseAction;
|
||||
import io.nosqlbench.engine.api.activityapi.core.SyncAction;
|
||||
|
@ -14,6 +14,7 @@ import io.nosqlbench.activitytype.cql.errorhandling.NBCycleErrorHandler;
|
||||
import io.nosqlbench.activitytype.cql.errorhandling.HashedCQLErrorHandler;
|
||||
import io.nosqlbench.activitytype.cql.statements.binders.CqlBinderTypes;
|
||||
import io.nosqlbench.activitytype.cql.statements.core.*;
|
||||
import io.nosqlbench.activitytype.cql.statements.modifiers.StatementModifier;
|
||||
import io.nosqlbench.activitytype.cql.statements.rowoperators.RowCycleOperators;
|
||||
import io.nosqlbench.activitytype.cql.statements.rowoperators.Save;
|
||||
import io.nosqlbench.activitytype.cql.statements.rsoperators.ResultSetCycleOperators;
|
||||
|
@ -15,6 +15,7 @@ import io.nosqlbench.activitytype.cql.errorhandling.exceptions.UnexpectedPagingE
|
||||
import io.nosqlbench.activitytype.cql.statements.core.ReadyCQLStatement;
|
||||
import com.google.common.util.concurrent.Futures;
|
||||
import com.google.common.util.concurrent.ListenableFuture;
|
||||
import io.nosqlbench.activitytype.cql.statements.modifiers.StatementModifier;
|
||||
import io.nosqlbench.engine.api.activityapi.core.BaseAsyncAction;
|
||||
import io.nosqlbench.engine.api.activityapi.core.ops.fluent.opfacets.FailedOp;
|
||||
import io.nosqlbench.engine.api.activityapi.core.ops.fluent.opfacets.StartedOp;
|
||||
|
@ -0,0 +1,11 @@
|
||||
package io.nosqlbench.activitytype.cql.statements.modifiers;
|
||||
|
||||
import com.datastax.driver.core.Statement;
|
||||
|
||||
public class StartTimerOp implements StatementModifier {
|
||||
|
||||
@Override
|
||||
public Statement modify(Statement unmodified, long cycleNum) {
|
||||
return null;
|
||||
}
|
||||
}
|
@ -1,4 +1,4 @@
|
||||
package io.nosqlbench.activitytype.cql.core;
|
||||
package io.nosqlbench.activitytype.cql.statements.modifiers;
|
||||
|
||||
import com.datastax.driver.core.Statement;
|
||||
|
@ -0,0 +1,13 @@
|
||||
package io.nosqlbench.activitytype.cql.statements.rsoperators;
|
||||
|
||||
import com.datastax.driver.core.ResultSet;
|
||||
import com.datastax.driver.core.Statement;
|
||||
import io.nosqlbench.activitytype.cql.api.ResultSetCycleOperator;
|
||||
|
||||
public class StopTimerOp implements ResultSetCycleOperator {
|
||||
|
||||
@Override
|
||||
public int apply(ResultSet resultSet, Statement statement, long cycle) {
|
||||
return 0;
|
||||
}
|
||||
}
|
@ -5,7 +5,7 @@ import com.datastax.driver.core.QueryTrace;
|
||||
import com.datastax.driver.core.ResultSet;
|
||||
import com.datastax.driver.core.Statement;
|
||||
import io.nosqlbench.activitytype.cql.api.ResultSetCycleOperator;
|
||||
import io.nosqlbench.activitytype.cql.core.StatementModifier;
|
||||
import io.nosqlbench.activitytype.cql.statements.modifiers.StatementModifier;
|
||||
import io.nosqlbench.engine.api.util.SimpleConfig;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
@ -3,9 +3,10 @@ description: An IOT workload which more optimal DSE settings
|
||||
|
||||
scenarios:
|
||||
default:
|
||||
- run driver=cql tags==phase:schema threads==1 cycles==UNDEF
|
||||
- run driver=cql tags==phase:rampup cycles===TEMPLATE(rampup-cycles,10000000) threads=auto
|
||||
- run driver=cql tags==phase:main cycles===TEMPLATE(main-cycles,10000000) threads=auto
|
||||
schema: run driver=cql tags==phase:schema threads==1 cycles==UNDEF
|
||||
rampup: run driver=cql tags==phase:rampup cycles===TEMPLATE(rampup-cycles,10000000) threads=auto
|
||||
main: run driver=cql tags==phase:main cycles===TEMPLATE(main-cycles,10000000) threads=auto
|
||||
|
||||
bindings:
|
||||
machine_id: Mod(<<sources:10000>>); ToHashedUUID() -> java.util.UUID
|
||||
sensor_name: HashedLineToString('data/variable_words.txt')
|
||||
|
@ -4,13 +4,13 @@ description: |
|
||||
|
||||
scenarios:
|
||||
default:
|
||||
- run driver=cql tags==phase:schema threads==1 cycles==UNDEF
|
||||
- run driver=cql tags==phase:rampup cycles===TEMPLATE(rampup-cycles,10000000) threads=auto
|
||||
- run driver=cql tags==phase:main cycles===TEMPLATE(main-cycles,10000000) threads=auto
|
||||
schema: run driver=cql tags==phase:schema threads==1 cycles==UNDEF
|
||||
rampup: run driver=cql tags==phase:rampup cycles===TEMPLATE(rampup-cycles,10000000) threads=auto
|
||||
main: run driver=cql tags==phase:main cycles===TEMPLATE(main-cycles,10000000) threads=auto
|
||||
astra:
|
||||
- run driver=cql tags==phase:schema-astra threads==1 cycles==UNDEF
|
||||
- run driver=cql tags==phase:rampup cycles===TEMPLATE(rampup-cycles,10000000) threads=auto
|
||||
- run driver=cql tags==phase:main cycles===TEMPLATE(main-cycles,10000000) threads=auto
|
||||
schema: run driver=cql tags==phase:schema-astra threads==1 cycles==UNDEF
|
||||
rampup: run driver=cql tags==phase:rampup cycles===TEMPLATE(rampup-cycles,10000000) threads=auto
|
||||
main: run driver=cql tags==phase:main cycles===TEMPLATE(main-cycles,10000000) threads=auto
|
||||
params:
|
||||
instrument: TEMPLATE(instrument,false)
|
||||
bindings:
|
||||
|
@ -1,13 +1,14 @@
|
||||
# nb -v run driver=cql yaml=cql-keyvalue tags=phase:schema host=dsehost
|
||||
scenarios:
|
||||
default:
|
||||
- run driver=cql tags==phase:schema threads==1 cycles==UNDEF
|
||||
- run driver=cql tags==phase:rampup cycles===TEMPLATE(rampup-cycles,10000000) threads=auto
|
||||
- run driver=cql tags==phase:main cycles===TEMPLATE(main-cycles,10000000) threads=auto
|
||||
schema: run driver=cql tags==phase:schema threads==1 cycles==UNDEF
|
||||
rampup: run driver=cql tags==phase:rampup cycles===TEMPLATE(rampup-cycles,10000000) threads=auto
|
||||
main: run driver=cql tags==phase:main cycles===TEMPLATE(main-cycles,10000000) threads=auto
|
||||
astra:
|
||||
- run driver=cql tags==phase:schema-astra threads==1 cycles==UNDEF
|
||||
- run driver=cql tags==phase:rampup cycles===TEMPLATE(rampup-cycles,10000000) threads=auto
|
||||
- run driver=cql tags==phase:main cycles===TEMPLATE(main-cycles,10000000) threads=auto
|
||||
schema: run driver=cql tags==phase:schema-astra threads==1 cycles==UNDEF
|
||||
rampup: run driver=cql tags==phase:rampup cycles===TEMPLATE(rampup-cycles,10000000) threads=auto
|
||||
main: run driver=cql tags==phase:main cycles===TEMPLATE(main-cycles,10000000) threads=auto
|
||||
|
||||
bindings:
|
||||
seq_key: Mod(<<keycount:1000000000>>); ToString() -> String
|
||||
seq_value: Hash(); Mod(<<valuecount:1000000000>>); ToString() -> String
|
||||
|
@ -1,13 +1,14 @@
|
||||
# nb -v cql-tabular rampup-cycles=1E6 main-cycles=1E9
|
||||
scenarios:
|
||||
default:
|
||||
- run driver=cql tags==phase:schema threads==1 cycles==UNDEF
|
||||
- run driver=cql tags==phase:rampup cycles===TEMPLATE(rampup-cycles,10000000) threads=auto
|
||||
- run driver=cql tags==phase:main cycles===TEMPLATE(main-cycles,10000000) threads=auto
|
||||
schema: run driver=cql tags==phase:schema threads==1 cycles==UNDEF
|
||||
rampup: run driver=cql tags==phase:rampup cycles===TEMPLATE(rampup-cycles,10000000) threads=auto
|
||||
main: run driver=cql tags==phase:main cycles===TEMPLATE(main-cycles,10000000) threads=auto
|
||||
astra:
|
||||
- run driver=cql tags==phase:schema-astra threads==1 cycles==UNDEF
|
||||
- run driver=cql tags==phase:rampup cycles===TEMPLATE(rampup-cycles,10000000) threads=auto
|
||||
- run driver=cql tags==phase:main cycles===TEMPLATE(main-cycles,10000000) threads=auto
|
||||
schema: run driver=cql tags==phase:schema-astra threads==1 cycles==UNDEF
|
||||
rampup: run driver=cql tags==phase:rampup cycles===TEMPLATE(rampup-cycles,10000000) threads=auto
|
||||
main: run driver=cql tags==phase:main cycles===TEMPLATE(main-cycles,10000000) threads=auto
|
||||
|
||||
bindings:
|
||||
# for ramp-up and verify
|
||||
part_layout: Div(<<partsize:1000000>>); ToString() -> String
|
||||
|
@ -4,7 +4,7 @@
|
||||
<parent>
|
||||
<groupId>io.nosqlbench</groupId>
|
||||
<artifactId>mvn-defaults</artifactId>
|
||||
<version>3.12.119-SNAPSHOT</version>
|
||||
<version>3.12.120-SNAPSHOT</version>
|
||||
<relativePath>../mvn-defaults</relativePath>
|
||||
</parent>
|
||||
|
||||
@ -24,7 +24,7 @@
|
||||
<dependency>
|
||||
<groupId>io.nosqlbench</groupId>
|
||||
<artifactId>driver-cql-shaded</artifactId>
|
||||
<version>3.12.119-SNAPSHOT</version>
|
||||
<version>3.12.120-SNAPSHOT</version>
|
||||
</dependency>
|
||||
|
||||
</dependencies>
|
||||
|
@ -5,7 +5,7 @@
|
||||
<parent>
|
||||
<artifactId>mvn-defaults</artifactId>
|
||||
<groupId>io.nosqlbench</groupId>
|
||||
<version>3.12.119-SNAPSHOT</version>
|
||||
<version>3.12.120-SNAPSHOT</version>
|
||||
<relativePath>../mvn-defaults</relativePath>
|
||||
</parent>
|
||||
|
||||
@ -20,7 +20,7 @@
|
||||
<dependency>
|
||||
<groupId>io.nosqlbench</groupId>
|
||||
<artifactId>engine-api</artifactId>
|
||||
<version>3.12.119-SNAPSHOT</version>
|
||||
<version>3.12.120-SNAPSHOT</version>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
|
@ -5,7 +5,7 @@
|
||||
<parent>
|
||||
<artifactId>mvn-defaults</artifactId>
|
||||
<groupId>io.nosqlbench</groupId>
|
||||
<version>3.12.119-SNAPSHOT</version>
|
||||
<version>3.12.120-SNAPSHOT</version>
|
||||
<relativePath>../mvn-defaults</relativePath>
|
||||
</parent>
|
||||
|
||||
@ -22,7 +22,7 @@
|
||||
<dependency>
|
||||
<groupId>io.nosqlbench</groupId>
|
||||
<artifactId>engine-api</artifactId>
|
||||
<version>3.12.119-SNAPSHOT</version>
|
||||
<version>3.12.120-SNAPSHOT</version>
|
||||
</dependency>
|
||||
|
||||
<!-- test scope only -->
|
||||
|
@ -4,7 +4,7 @@
|
||||
<parent>
|
||||
<artifactId>mvn-defaults</artifactId>
|
||||
<groupId>io.nosqlbench</groupId>
|
||||
<version>3.12.119-SNAPSHOT</version>
|
||||
<version>3.12.120-SNAPSHOT</version>
|
||||
<relativePath>../mvn-defaults</relativePath>
|
||||
</parent>
|
||||
|
||||
@ -30,13 +30,13 @@
|
||||
<dependency>
|
||||
<groupId>io.nosqlbench</groupId>
|
||||
<artifactId>engine-api</artifactId>
|
||||
<version>3.12.119-SNAPSHOT</version>
|
||||
<version>3.12.120-SNAPSHOT</version>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>io.nosqlbench</groupId>
|
||||
<artifactId>driver-stdout</artifactId>
|
||||
<version>3.12.119-SNAPSHOT</version>
|
||||
<version>3.12.120-SNAPSHOT</version>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
|
@ -7,7 +7,7 @@
|
||||
<parent>
|
||||
<artifactId>mvn-defaults</artifactId>
|
||||
<groupId>io.nosqlbench</groupId>
|
||||
<version>3.12.119-SNAPSHOT</version>
|
||||
<version>3.12.120-SNAPSHOT</version>
|
||||
<relativePath>../mvn-defaults</relativePath>
|
||||
</parent>
|
||||
|
||||
@ -21,7 +21,7 @@
|
||||
<dependency>
|
||||
<groupId>io.nosqlbench</groupId>
|
||||
<artifactId>engine-api</artifactId>
|
||||
<version>3.12.119-SNAPSHOT</version>
|
||||
<version>3.12.120-SNAPSHOT</version>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
|
@ -7,7 +7,7 @@
|
||||
<parent>
|
||||
<artifactId>mvn-defaults</artifactId>
|
||||
<groupId>io.nosqlbench</groupId>
|
||||
<version>3.12.119-SNAPSHOT</version>
|
||||
<version>3.12.120-SNAPSHOT</version>
|
||||
<relativePath>../mvn-defaults</relativePath>
|
||||
</parent>
|
||||
|
||||
@ -21,7 +21,7 @@
|
||||
<dependency>
|
||||
<groupId>io.nosqlbench</groupId>
|
||||
<artifactId>engine-api</artifactId>
|
||||
<version>3.12.119-SNAPSHOT</version>
|
||||
<version>3.12.120-SNAPSHOT</version>
|
||||
</dependency>
|
||||
|
||||
<!-- test scope only -->
|
||||
|
@ -7,7 +7,7 @@
|
||||
<parent>
|
||||
<artifactId>mvn-defaults</artifactId>
|
||||
<groupId>io.nosqlbench</groupId>
|
||||
<version>3.12.119-SNAPSHOT</version>
|
||||
<version>3.12.120-SNAPSHOT</version>
|
||||
<relativePath>../mvn-defaults</relativePath>
|
||||
</parent>
|
||||
|
||||
@ -23,13 +23,13 @@
|
||||
<dependency>
|
||||
<groupId>io.nosqlbench</groupId>
|
||||
<artifactId>engine-api</artifactId>
|
||||
<version>3.12.119-SNAPSHOT</version>
|
||||
<version>3.12.120-SNAPSHOT</version>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>io.nosqlbench</groupId>
|
||||
<artifactId>driver-stdout</artifactId>
|
||||
<version>3.12.119-SNAPSHOT</version>
|
||||
<version>3.12.120-SNAPSHOT</version>
|
||||
</dependency>
|
||||
|
||||
<!-- test scope only -->
|
||||
|
@ -7,7 +7,7 @@
|
||||
<parent>
|
||||
<artifactId>mvn-defaults</artifactId>
|
||||
<groupId>io.nosqlbench</groupId>
|
||||
<version>3.12.119-SNAPSHOT</version>
|
||||
<version>3.12.120-SNAPSHOT</version>
|
||||
<relativePath>../mvn-defaults</relativePath>
|
||||
</parent>
|
||||
|
||||
@ -22,7 +22,7 @@
|
||||
<dependency>
|
||||
<groupId>io.nosqlbench</groupId>
|
||||
<artifactId>engine-api</artifactId>
|
||||
<version>3.12.119-SNAPSHOT</version>
|
||||
<version>3.12.120-SNAPSHOT</version>
|
||||
</dependency>
|
||||
|
||||
|
||||
|
@ -5,7 +5,7 @@
|
||||
<parent>
|
||||
<artifactId>mvn-defaults</artifactId>
|
||||
<groupId>io.nosqlbench</groupId>
|
||||
<version>3.12.119-SNAPSHOT</version>
|
||||
<version>3.12.120-SNAPSHOT</version>
|
||||
<relativePath>../mvn-defaults</relativePath>
|
||||
</parent>
|
||||
|
||||
@ -22,19 +22,19 @@
|
||||
<dependency>
|
||||
<groupId>io.nosqlbench</groupId>
|
||||
<artifactId>nb-api</artifactId>
|
||||
<version>3.12.119-SNAPSHOT</version>
|
||||
<version>3.12.120-SNAPSHOT</version>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>io.nosqlbench</groupId>
|
||||
<artifactId>nb-annotations</artifactId>
|
||||
<version>3.12.119-SNAPSHOT</version>
|
||||
<version>3.12.120-SNAPSHOT</version>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>io.nosqlbench</groupId>
|
||||
<artifactId>virtdata-userlibs</artifactId>
|
||||
<version>3.12.119-SNAPSHOT</version>
|
||||
<version>3.12.120-SNAPSHOT</version>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
|
@ -8,7 +8,6 @@ import io.nosqlbench.engine.api.templating.StrInterpolator;
|
||||
import io.nosqlbench.nb.api.content.Content;
|
||||
import io.nosqlbench.nb.api.content.NBIO;
|
||||
import io.nosqlbench.nb.api.errors.BasicError;
|
||||
import io.nosqlbench.virtdata.library.basics.core.stathelpers.DiscreteProbabilityBuffer;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
@ -34,11 +33,11 @@ public class NBCLIScenarioParser {
|
||||
public static boolean isFoundWorkload(String workload,
|
||||
String... includes) {
|
||||
Optional<Content<?>> found = NBIO.all()
|
||||
.prefix("activities")
|
||||
.prefix(includes)
|
||||
.name(workload)
|
||||
.extension("yaml")
|
||||
.first();
|
||||
.prefix("activities")
|
||||
.prefix(includes)
|
||||
.name(workload)
|
||||
.extension("yaml")
|
||||
.first();
|
||||
return found.isPresent();
|
||||
}
|
||||
|
||||
@ -48,11 +47,11 @@ public class NBCLIScenarioParser {
|
||||
|
||||
String workloadName = arglist.removeFirst();
|
||||
Optional<Content<?>> found = NBIO.all()
|
||||
.prefix("activities")
|
||||
.prefix(includes)
|
||||
.name(workloadName)
|
||||
.extension("yaml")
|
||||
.first();
|
||||
.prefix("activities")
|
||||
.prefix(includes)
|
||||
.name(workloadName)
|
||||
.extension("yaml")
|
||||
.first();
|
||||
//
|
||||
Content<?> workloadContent = found.orElseThrow();
|
||||
|
||||
@ -62,9 +61,9 @@ public class NBCLIScenarioParser {
|
||||
// Buffer in CLI word from user, but only until the next command
|
||||
List<String> scenarioNames = new ArrayList<>();
|
||||
while (arglist.size() > 0
|
||||
&& !arglist.peekFirst().contains("=")
|
||||
&& !arglist.peekFirst().startsWith("-")
|
||||
&& !RESERVED_WORDS.contains(arglist.peekFirst())) {
|
||||
&& !arglist.peekFirst().contains("=")
|
||||
&& !arglist.peekFirst().startsWith("-")
|
||||
&& !RESERVED_WORDS.contains(arglist.peekFirst())) {
|
||||
scenarioNames.add(arglist.removeFirst());
|
||||
}
|
||||
if (scenarioNames.size() == 0) {
|
||||
@ -74,8 +73,8 @@ public class NBCLIScenarioParser {
|
||||
// Parse CLI command into keyed parameters, in order
|
||||
LinkedHashMap<String, String> userParams = new LinkedHashMap<>();
|
||||
while (arglist.size() > 0
|
||||
&& arglist.peekFirst().contains("=")
|
||||
&& !arglist.peekFirst().startsWith("-")) {
|
||||
&& arglist.peekFirst().contains("=")
|
||||
&& !arglist.peekFirst().startsWith("-")) {
|
||||
String[] arg = arglist.removeFirst().split("=");
|
||||
arg[0] = Synonyms.canonicalize(arg[0], logger);
|
||||
if (userParams.containsKey(arg[0])) {
|
||||
@ -93,11 +92,11 @@ public class NBCLIScenarioParser {
|
||||
|
||||
// Load in named scenario
|
||||
Content<?> yamlWithNamedScenarios = NBIO.all()
|
||||
.prefix(SEARCH_IN)
|
||||
.prefix(includes)
|
||||
.name(workloadName)
|
||||
.extension("yaml")
|
||||
.one();
|
||||
.prefix(SEARCH_IN)
|
||||
.prefix(includes)
|
||||
.name(workloadName)
|
||||
.extension("yaml")
|
||||
.one();
|
||||
|
||||
StmtsDocList stmts = StatementsLoader.load(logger, yamlWithNamedScenarios);
|
||||
|
||||
@ -107,7 +106,7 @@ public class NBCLIScenarioParser {
|
||||
|
||||
if (namedSteps == null) {
|
||||
throw new BasicError("Unable to find named scenario '" + scenarioName + "' in workload '" + workloadName
|
||||
+ "', but you can pick from " + String.join(",", scenarios.getScenarioNames()));
|
||||
+ "', but you can pick from " + String.join(",", scenarios.getScenarioNames()));
|
||||
}
|
||||
|
||||
// each named command line step of the named scenario
|
||||
@ -136,37 +135,38 @@ public class NBCLIScenarioParser {
|
||||
|
||||
// Undefine any keys with a value of 'undef'
|
||||
List<String> undefKeys = buildingCmd.entrySet()
|
||||
.stream()
|
||||
.filter(e -> e.getValue().toLowerCase().endsWith("=undef"))
|
||||
.map(Map.Entry::getKey)
|
||||
.collect(Collectors.toList());
|
||||
.stream()
|
||||
.filter(e -> e.getValue().toLowerCase().endsWith("=undef"))
|
||||
.map(Map.Entry::getKey)
|
||||
.collect(Collectors.toList());
|
||||
undefKeys.forEach(buildingCmd::remove);
|
||||
|
||||
if (!buildingCmd.containsKey("workload")) {
|
||||
String relativeWorkloadPathFromRoot = yamlWithNamedScenarios.asPath().toString();
|
||||
relativeWorkloadPathFromRoot = relativeWorkloadPathFromRoot.startsWith("/") ?
|
||||
relativeWorkloadPathFromRoot.substring(1) : relativeWorkloadPathFromRoot;
|
||||
relativeWorkloadPathFromRoot.substring(1) : relativeWorkloadPathFromRoot;
|
||||
buildingCmd.put("workload", "workload=" + relativeWorkloadPathFromRoot);
|
||||
}
|
||||
|
||||
if (!buildingCmd.containsKey("alias")) {
|
||||
buildingCmd.put("alias", "alias="+WORKLOAD_SCENARIO_STEP);
|
||||
buildingCmd.put("alias", "alias=" + WORKLOAD_SCENARIO_STEP);
|
||||
}
|
||||
|
||||
String alias = buildingCmd.get("alias");
|
||||
for (String token : new String[]{"WORKLOAD", "SCENARIO", "STEP"}) {
|
||||
if (!alias.contains(token)) {
|
||||
logger.warn("Your alias template '" + alias + "' does not contain " + token + ", which will " +
|
||||
"cause your metrics to be combined under the same name. It is strongly advised that you " +
|
||||
"include them in a template like " + WORKLOAD_SCENARIO_STEP + ".");
|
||||
"cause your metrics to be combined under the same name. It is strongly advised that you " +
|
||||
"include them in a template like " + WORKLOAD_SCENARIO_STEP + ".");
|
||||
}
|
||||
}
|
||||
|
||||
alias = alias.replaceAll("WORKLOAD", workloadContent.asPath().getFileName().toString().replaceAll(
|
||||
".yaml",""));
|
||||
alias = alias.replaceAll("SCENARIO", scenarioName);
|
||||
alias = alias.replaceAll("STEP", stepName);
|
||||
alias=(alias.startsWith("alias=") ? alias : "alias="+alias);
|
||||
String workloadToken = workloadContent.asPath().getFileName().toString();
|
||||
|
||||
alias = alias.replaceAll("WORKLOAD", sanitize(workloadToken));
|
||||
alias = alias.replaceAll("SCENARIO", sanitize(scenarioName));
|
||||
alias = alias.replaceAll("STEP", sanitize(stepName));
|
||||
alias = (alias.startsWith("alias=") ? alias : "alias=" + alias);
|
||||
buildingCmd.put("alias", alias);
|
||||
|
||||
logger.debug("Named scenario built command: " + String.join(" ", buildingCmd.values()));
|
||||
@ -178,6 +178,13 @@ public class NBCLIScenarioParser {
|
||||
|
||||
}
|
||||
|
||||
public static String sanitize(String word) {
|
||||
String sanitized = word;
|
||||
sanitized = sanitized.replaceAll("\\..+$","");
|
||||
sanitized = sanitized.replaceAll("[^a-zA-Z0-9]+","");
|
||||
return sanitized;
|
||||
}
|
||||
|
||||
private static final Pattern WordAndMaybeAssignment = Pattern.compile("(?<name>\\w+)((?<oper>=+)(?<val>.+))?");
|
||||
|
||||
private static LinkedHashMap<String, CmdArg> parseStep(String cmd) {
|
||||
@ -252,10 +259,10 @@ public class NBCLIScenarioParser {
|
||||
public static List<WorkloadDesc> getWorkloadsWithScenarioScripts(String... includes) {
|
||||
|
||||
List<Content<?>> activities = NBIO.all()
|
||||
.prefix(SEARCH_IN)
|
||||
.prefix(includes)
|
||||
.extension("yaml")
|
||||
.list();
|
||||
.prefix(SEARCH_IN)
|
||||
.prefix(includes)
|
||||
.extension("yaml")
|
||||
.list();
|
||||
|
||||
List<Path> yamlPathList = activities.stream().map(Content::asPath).collect(Collectors.toList());
|
||||
|
||||
@ -264,11 +271,11 @@ public class NBCLIScenarioParser {
|
||||
for (Path yamlPath : yamlPathList) {
|
||||
String referenced = yamlPath.toString();
|
||||
referenced = referenced.startsWith("/") ? referenced.substring(1) :
|
||||
referenced;
|
||||
referenced;
|
||||
|
||||
Content<?> content = NBIO.all().prefix(SEARCH_IN)
|
||||
.name(referenced).extension("yaml")
|
||||
.one();
|
||||
.name(referenced).extension("yaml")
|
||||
.one();
|
||||
|
||||
StmtsDocList stmts = StatementsLoader.load(logger, content);
|
||||
|
||||
@ -290,11 +297,11 @@ public class NBCLIScenarioParser {
|
||||
if (scenarioNames != null && scenarioNames.size() > 0) {
|
||||
String path = yamlPath.toString();
|
||||
path = path.startsWith(FileSystems.getDefault().getSeparator()) ? path.substring(1) : path;
|
||||
LinkedHashMap<String,String> sortedTemplates = new LinkedHashMap<>();
|
||||
LinkedHashMap<String, String> sortedTemplates = new LinkedHashMap<>();
|
||||
ArrayList<String> keyNames = new ArrayList<>(templates.keySet());
|
||||
Collections.sort(keyNames);
|
||||
for (String keyName : keyNames) {
|
||||
sortedTemplates.put(keyName,templates.get(keyName));
|
||||
sortedTemplates.put(keyName, templates.get(keyName));
|
||||
}
|
||||
|
||||
String description = stmts.getDescription();
|
||||
@ -328,10 +335,9 @@ public class NBCLIScenarioParser {
|
||||
while (matcher.find()) {
|
||||
String match = matcher.group(1);
|
||||
String[] matchArray = match.split(":");
|
||||
if (matchArray.length==1) {
|
||||
if (matchArray.length == 1) {
|
||||
templates.put(matchArray[0], "-none-");
|
||||
}
|
||||
else {
|
||||
} else {
|
||||
templates.put(matchArray[0], matchArray[1]);
|
||||
}
|
||||
}
|
||||
|
@ -4,7 +4,7 @@
|
||||
<parent>
|
||||
<artifactId>mvn-defaults</artifactId>
|
||||
<groupId>io.nosqlbench</groupId>
|
||||
<version>3.12.119-SNAPSHOT</version>
|
||||
<version>3.12.120-SNAPSHOT</version>
|
||||
<relativePath>../mvn-defaults</relativePath>
|
||||
</parent>
|
||||
|
||||
@ -23,7 +23,7 @@
|
||||
<dependency>
|
||||
<groupId>io.nosqlbench</groupId>
|
||||
<artifactId>engine-core</artifactId>
|
||||
<version>3.12.119-SNAPSHOT</version>
|
||||
<version>3.12.120-SNAPSHOT</version>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
@ -47,7 +47,7 @@
|
||||
<dependency>
|
||||
<groupId>io.nosqlbench</groupId>
|
||||
<artifactId>engine-docker</artifactId>
|
||||
<version>3.12.119-SNAPSHOT</version>
|
||||
<version>3.12.120-SNAPSHOT</version>
|
||||
</dependency>
|
||||
|
||||
</dependencies>
|
||||
|
@ -186,10 +186,15 @@ public class NBCLI {
|
||||
}
|
||||
|
||||
String reportGraphiteTo = options.wantsReportGraphiteTo();
|
||||
|
||||
if (options.wantsDockerMetrics()) {
|
||||
logger.info("Docker metrics is enabled. Docker must be installed for this to work");
|
||||
DockerMetricsManager dmh = new DockerMetricsManager();
|
||||
dmh.startMetrics();
|
||||
Map<String,String> dashboardOptions = Map.of(
|
||||
DockerMetricsManager.GRAFANA_TAG, options.getDockerGrafanaTag()
|
||||
);
|
||||
dmh.startMetrics(dashboardOptions);
|
||||
|
||||
String warn = "Docker Containers are started, for grafana and prometheus, hit" +
|
||||
" these urls in your browser: http://<host>:3000 and http://<host>:9090";
|
||||
logger.warn(warn);
|
||||
|
@ -79,6 +79,7 @@ public class NBCLIOptions {
|
||||
private static final String GRAALJS_ENGINE = "--graaljs";
|
||||
private static final String NASHORN_ENGINE = "--nashorn";
|
||||
private static final String GRAALJS_COMPAT = "--graaljs-compat";
|
||||
private static final String DOCKER_GRAFANA_TAG = "--docker-grafana-tag";
|
||||
|
||||
|
||||
public static final Set<String> RESERVED_WORDS = new HashSet<>() {{
|
||||
@ -103,7 +104,7 @@ public class NBCLIOptions {
|
||||
private String reportGraphiteTo = null;
|
||||
private String reportCsvTo = null;
|
||||
private int reportInterval = 10;
|
||||
private String metricsPrefix = "nosqlbench.";
|
||||
private String metricsPrefix = "nosqlbench";
|
||||
private String wantsMetricsForActivity;
|
||||
private String sessionName = "";
|
||||
private boolean showScript = false;
|
||||
@ -129,6 +130,7 @@ public class NBCLIOptions {
|
||||
private Scenario.Engine engine = Scenario.Engine.Graalvm;
|
||||
private boolean graaljs_compat = false;
|
||||
private int hdr_digits = 4;
|
||||
private String docker_grafana_tag = "7.0.1";
|
||||
|
||||
public NBCLIOptions(String[] args) {
|
||||
parse(args);
|
||||
@ -184,6 +186,10 @@ public class NBCLIOptions {
|
||||
}
|
||||
|
||||
switch (word) {
|
||||
case DOCKER_GRAFANA_TAG:
|
||||
arglist.removeFirst();
|
||||
docker_grafana_tag = readWordOrThrow(arglist,"grafana docker tag");
|
||||
break;
|
||||
case GRAALJS_COMPAT:
|
||||
graaljs_compat = true;
|
||||
arglist.removeFirst();
|
||||
@ -630,6 +636,10 @@ public class NBCLIOptions {
|
||||
return wantsWorkloadsList;
|
||||
}
|
||||
|
||||
public String getDockerGrafanaTag() {
|
||||
return docker_grafana_tag;
|
||||
}
|
||||
|
||||
public static class LoggerConfig {
|
||||
public String file;
|
||||
public String pattern = ".*";
|
||||
|
@ -1,5 +1,6 @@
|
||||
package io.nosqlbench.engine.cli;
|
||||
|
||||
import io.nosqlbench.engine.api.scenarios.NBCLIScenarioParser;
|
||||
import io.nosqlbench.nb.api.errors.BasicError;
|
||||
import org.junit.Test;
|
||||
|
||||
@ -98,4 +99,10 @@ public class NBCLIScenarioParserTest {
|
||||
}
|
||||
|
||||
|
||||
@Test
|
||||
public void testSanitizer() {
|
||||
String sanitized = NBCLIScenarioParser.sanitize("A-b,c_d");
|
||||
assertThat(sanitized).isEqualTo("Abcd");
|
||||
|
||||
}
|
||||
}
|
||||
|
@ -5,7 +5,7 @@
|
||||
<parent>
|
||||
<artifactId>mvn-defaults</artifactId>
|
||||
<groupId>io.nosqlbench</groupId>
|
||||
<version>3.12.119-SNAPSHOT</version>
|
||||
<version>3.12.120-SNAPSHOT</version>
|
||||
<relativePath>../mvn-defaults</relativePath>
|
||||
</parent>
|
||||
|
||||
@ -28,7 +28,7 @@
|
||||
<dependency>
|
||||
<groupId>io.nosqlbench</groupId>
|
||||
<artifactId>engine-api</artifactId>
|
||||
<version>3.12.119-SNAPSHOT</version>
|
||||
<version>3.12.120-SNAPSHOT</version>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
|
@ -90,9 +90,9 @@ public class MetricReporters implements Shutdownable {
|
||||
}
|
||||
}
|
||||
|
||||
public MetricReporters addGraphite(String host, int graphitePort, String prefix) {
|
||||
public MetricReporters addGraphite(String host, int graphitePort, String globalPrefix) {
|
||||
|
||||
logger.debug("Adding graphite reporter to " + host + " with port " + graphitePort + " and prefix " + prefix);
|
||||
logger.debug("Adding graphite reporter to " + host + " with port " + graphitePort + " and prefix " + globalPrefix);
|
||||
|
||||
if (metricRegistries.isEmpty()) {
|
||||
throw new RuntimeException("There are no metric registries.");
|
||||
@ -101,8 +101,9 @@ public class MetricReporters implements Shutdownable {
|
||||
for (PrefixedRegistry prefixedRegistry : metricRegistries) {
|
||||
|
||||
Graphite graphite = new Graphite(new InetSocketAddress(host, graphitePort));
|
||||
String _prefix = prefixedRegistry.prefix != null ? (!prefixedRegistry.prefix.isEmpty() ? globalPrefix + "." + prefixedRegistry.prefix : globalPrefix) : globalPrefix;
|
||||
GraphiteReporter graphiteReporter = GraphiteReporter.forRegistry(prefixedRegistry.metricRegistry)
|
||||
.prefixedWith(prefixedRegistry.prefix != null ? (!prefixedRegistry.prefix.isEmpty() ? prefix + "." + prefixedRegistry.prefix : prefix) : prefix)
|
||||
.prefixedWith(_prefix)
|
||||
.convertRatesTo(TimeUnit.SECONDS)
|
||||
.convertDurationsTo(TimeUnit.NANOSECONDS)
|
||||
.filter(ActivityMetrics.METRIC_FILTER)
|
||||
|
@ -4,7 +4,7 @@
|
||||
<parent>
|
||||
<artifactId>mvn-defaults</artifactId>
|
||||
<groupId>io.nosqlbench</groupId>
|
||||
<version>3.12.119-SNAPSHOT</version>
|
||||
<version>3.12.120-SNAPSHOT</version>
|
||||
<relativePath>../mvn-defaults</relativePath>
|
||||
</parent>
|
||||
|
||||
@ -65,7 +65,7 @@
|
||||
<dependency>
|
||||
<groupId>io.nosqlbench</groupId>
|
||||
<artifactId>engine-api</artifactId>
|
||||
<version>3.12.119-SNAPSHOT</version>
|
||||
<version>3.12.120-SNAPSHOT</version>
|
||||
</dependency>
|
||||
</dependencies>
|
||||
|
||||
|
@ -2,10 +2,7 @@ package io.nosqlbench.engine.docker;
|
||||
|
||||
|
||||
import com.github.dockerjava.api.DockerClient;
|
||||
import com.github.dockerjava.api.command.CreateContainerResponse;
|
||||
import com.github.dockerjava.api.command.DockerCmdExecFactory;
|
||||
import com.github.dockerjava.api.command.ListContainersCmd;
|
||||
import com.github.dockerjava.api.command.LogContainerCmd;
|
||||
import com.github.dockerjava.api.command.*;
|
||||
import com.github.dockerjava.api.model.*;
|
||||
import com.github.dockerjava.core.DefaultDockerClientConfig;
|
||||
import com.github.dockerjava.core.DockerClientBuilder;
|
||||
@ -35,20 +32,21 @@ public class DockerHelper {
|
||||
private DockerClient dockerClient;
|
||||
private Logger logger = LoggerFactory.getLogger(DockerHelper.class);
|
||||
|
||||
public DockerHelper(){
|
||||
public DockerHelper() {
|
||||
System.getProperties().setProperty(DOCKER_HOST, DOCKER_HOST_ADDR);
|
||||
this.config = DefaultDockerClientConfig.createDefaultConfigBuilder().withDockerHost(DOCKER_HOST_ADDR).build();
|
||||
DockerCmdExecFactory dockerCmdExecFactory = new OkHttpDockerCmdExecFactory()
|
||||
.withReadTimeout(60000)
|
||||
.withConnectTimeout(60000);
|
||||
.withReadTimeout(60000)
|
||||
.withConnectTimeout(60000);
|
||||
|
||||
this.dockerClient = DockerClientBuilder.getInstance(config)
|
||||
.withDockerCmdExecFactory(dockerCmdExecFactory)
|
||||
.build();
|
||||
.withDockerCmdExecFactory(dockerCmdExecFactory)
|
||||
.build();
|
||||
}
|
||||
|
||||
public String startDocker(String IMG, String tag, String name, List<Integer> ports, List<String> volumeDescList, List<String> envList, List<String> cmdList, String reload, List<String> linkNames) {
|
||||
logger.debug("Starting docker with img=" + IMG + ", tag=" + tag + ", name=" + name + ", " +
|
||||
"ports=" + ports + ", volumes=" + volumeDescList + ", env=" + envList + ", cmds=" + cmdList + ", reload=" + reload);
|
||||
"ports=" + ports + ", volumes=" + volumeDescList + ", env=" + envList + ", cmds=" + cmdList + ", reload=" + reload);
|
||||
|
||||
boolean existingContainer = removeExitedContainers(name);
|
||||
|
||||
@ -60,7 +58,7 @@ public class DockerHelper {
|
||||
|
||||
Container containerId = searchContainer(name, reload);
|
||||
if (containerId != null) {
|
||||
logger.debug("container is already up with the id: "+ containerId.getId());
|
||||
logger.debug("container is already up with the id: " + containerId.getId());
|
||||
return null;
|
||||
}
|
||||
|
||||
@ -72,14 +70,14 @@ public class DockerHelper {
|
||||
List<Image> dockerList = dockerClient.listImagesCmd().withImageNameFilter(IMG).exec();
|
||||
if (dockerList.size() == 0) {
|
||||
dockerClient.pullImageCmd(IMG)
|
||||
.withTag(tag)
|
||||
.exec(new PullImageResultCallback()).awaitSuccess();
|
||||
.withTag(tag)
|
||||
.exec(new PullImageResultCallback()).awaitSuccess();
|
||||
|
||||
dockerList = dockerClient.listImagesCmd().withImageNameFilter(IMG).exec();
|
||||
if (dockerList.size() == 0) {
|
||||
logger.error(String.format("Image %s not found, unable to automatically pull image." +
|
||||
" Check `docker images`",
|
||||
IMG));
|
||||
" Check `docker images`",
|
||||
IMG));
|
||||
System.exit(1);
|
||||
}
|
||||
}
|
||||
@ -107,44 +105,27 @@ public class DockerHelper {
|
||||
volumeBindList.add(new Bind(volFrom, vol));
|
||||
}
|
||||
|
||||
|
||||
CreateContainerResponse containerResponse;
|
||||
List<Link> links = linkNames.stream().map(x->new Link(x,x)).collect(Collectors.toList());
|
||||
if (envList == null) {
|
||||
containerResponse = dockerClient.createContainerCmd(IMG + ":" + tag)
|
||||
.withCmd(cmdList)
|
||||
.withExposedPorts(tcpPorts)
|
||||
.withHostConfig(
|
||||
new HostConfig()
|
||||
.withPortBindings(portBindings)
|
||||
.withPublishAllPorts(true)
|
||||
.withBinds(volumeBindList)
|
||||
)
|
||||
.withName(name)
|
||||
//.withVolumes(volumeList)
|
||||
.withLinks(links)
|
||||
.exec();
|
||||
} else {
|
||||
long user = new UnixSystem().getUid();
|
||||
containerResponse = dockerClient.createContainerCmd(IMG + ":" + tag)
|
||||
.withEnv(envList)
|
||||
.withExposedPorts(tcpPorts)
|
||||
.withHostConfig(
|
||||
new HostConfig()
|
||||
.withPortBindings(portBindings)
|
||||
.withPublishAllPorts(true)
|
||||
.withBinds(volumeBindList)
|
||||
)
|
||||
.withName(name)
|
||||
.withLinks(links)
|
||||
.withUser(""+user)
|
||||
//.withVolumes(volumeList)
|
||||
.exec();
|
||||
List<Link> links = linkNames.stream().map(x -> new Link(x, x)).collect(Collectors.toList());
|
||||
CreateContainerCmd builder = dockerClient.createContainerCmd(IMG + ":" + tag);
|
||||
if (cmdList!=null) {
|
||||
builder = builder.withCmd(cmdList);
|
||||
}
|
||||
|
||||
builder = builder.withUser(String.valueOf(new UnixSystem().getUid()));
|
||||
builder = builder.withExposedPorts(tcpPorts);
|
||||
builder = builder.withHostConfig(new HostConfig()
|
||||
.withPortBindings(portBindings)
|
||||
.withPublishAllPorts(true)
|
||||
.withBinds(volumeBindList));
|
||||
builder = builder.withName(name);
|
||||
builder = builder.withLinks(links);
|
||||
if (envList!=null) {
|
||||
builder = builder.withEnv(envList);
|
||||
}
|
||||
CreateContainerResponse containerResponse = builder.exec();
|
||||
dockerClient.startContainerCmd(containerResponse.getId()).exec();
|
||||
|
||||
if (existingContainer){
|
||||
if (existingContainer) {
|
||||
logger.debug("Started existing container");
|
||||
return null;
|
||||
}
|
||||
@ -232,12 +213,12 @@ public class DockerHelper {
|
||||
public void pollLog(String containerId, ResultCallbackTemplate<LogContainerResultCallback, Frame> logCallback) {
|
||||
|
||||
LogContainerResultCallback loggingCallback = new
|
||||
LogContainerResultCallback();
|
||||
LogContainerResultCallback();
|
||||
|
||||
LogContainerCmd cmd = dockerClient.logContainerCmd(containerId)
|
||||
.withStdOut(true)
|
||||
.withFollowStream(true)
|
||||
.withTailAll();
|
||||
.withStdOut(true)
|
||||
.withFollowStream(true)
|
||||
.withTailAll();
|
||||
|
||||
final boolean[] httpStarted = {false};
|
||||
cmd.exec(logCallback);
|
||||
|
@ -12,11 +12,13 @@ import com.github.dockerjava.api.model.ContainerNetworkSettings;
|
||||
import com.github.dockerjava.api.model.Frame;
|
||||
import com.github.dockerjava.core.async.ResultCallbackTemplate;
|
||||
import com.github.dockerjava.core.command.LogContainerResultCallback;
|
||||
import io.nosqlbench.nb.api.content.Content;
|
||||
import io.nosqlbench.nb.api.content.NBIO;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import java.io.*;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
import java.nio.file.Paths;
|
||||
@ -27,6 +29,8 @@ import static io.nosqlbench.engine.docker.RestHelper.post;
|
||||
|
||||
public class DockerMetricsManager {
|
||||
|
||||
public static final String GRAFANA_TAG = "grafana_tag";
|
||||
|
||||
private final DockerHelper dh;
|
||||
|
||||
String userHome = System.getProperty("user.home");
|
||||
@ -35,22 +39,22 @@ public class DockerMetricsManager {
|
||||
|
||||
public DockerMetricsManager() {
|
||||
dh = new DockerHelper();
|
||||
}
|
||||
}
|
||||
|
||||
public void startMetrics() {
|
||||
public void startMetrics(Map<String, String> options) {
|
||||
|
||||
String ip = startGraphite();
|
||||
|
||||
startPrometheus(ip);
|
||||
|
||||
startGrafana(ip);
|
||||
startGrafana(ip, options.get(GRAFANA_TAG));
|
||||
|
||||
}
|
||||
|
||||
private void startGrafana(String ip) {
|
||||
private void startGrafana(String ip, String tag) {
|
||||
|
||||
String GRAFANA_IMG = "grafana/grafana";
|
||||
String tag = "5.3.2";
|
||||
tag = (tag == null || tag.isEmpty()) ? "latest" : tag;
|
||||
String name = "grafana";
|
||||
List<Integer> port = Arrays.asList(3000);
|
||||
|
||||
@ -60,24 +64,24 @@ public class DockerMetricsManager {
|
||||
}
|
||||
|
||||
List<String> volumeDescList = Arrays.asList(
|
||||
userHome + "/.nosqlbench/grafana:/var/lib/grafana:rw"
|
||||
//cwd+"/docker-metrics/grafana:/grafana",
|
||||
//cwd+"/docker-metrics/grafana/datasources:/etc/grafana/provisioning/datasources",
|
||||
//cwd+"/docker-metrics/grafana/dashboardconf:/etc/grafana/provisioning/dashboards"
|
||||
//,cwd+"/docker-metrics/grafana/dashboards:/var/lib/grafana/dashboards:ro"
|
||||
userHome + "/.nosqlbench/grafana:/var/lib/grafana:rw"
|
||||
//cwd+"/docker-metrics/grafana:/grafana",
|
||||
//cwd+"/docker-metrics/grafana/datasources:/etc/grafana/provisioning/datasources",
|
||||
//cwd+"/docker-metrics/grafana/dashboardconf:/etc/grafana/provisioning/dashboards"
|
||||
//,cwd+"/docker-metrics/grafana/dashboards:/var/lib/grafana/dashboards:ro"
|
||||
);
|
||||
List<String> envList = Arrays.asList(
|
||||
"GF_SECURITY_ADMIN_PASSWORD=admin",
|
||||
"GF_AUTH_ANONYMOUS_ENABLED=\"true\"",
|
||||
"GF_SNAPSHOTS_EXTERNAL_SNAPSHOT_URL=https://assethub.datastax.com:3001",
|
||||
"GF_SNAPSHOTS_EXTERNAL_SNAPSHOT_NAME=\"Upload to DataStax\""
|
||||
"GF_SECURITY_ADMIN_PASSWORD=admin",
|
||||
"GF_AUTH_ANONYMOUS_ENABLED=\"true\"",
|
||||
"GF_SNAPSHOTS_EXTERNAL_SNAPSHOT_URL=https://assethub.datastax.com:3001",
|
||||
"GF_SNAPSHOTS_EXTERNAL_SNAPSHOT_NAME=\"Upload to DataStax\""
|
||||
);
|
||||
|
||||
String reload = null;
|
||||
List<String> linkNames = new ArrayList();
|
||||
linkNames.add("prom");
|
||||
String containerId = dh.startDocker(GRAFANA_IMG, tag, name, port, volumeDescList, envList, null, reload, linkNames);
|
||||
if (containerId == null){
|
||||
if (containerId == null) {
|
||||
return;
|
||||
}
|
||||
|
||||
@ -103,19 +107,19 @@ public class DockerMetricsManager {
|
||||
}
|
||||
|
||||
List<String> volumeDescList = Arrays.asList(
|
||||
//cwd+"/docker-metrics/prometheus:/prometheus",
|
||||
userHome + "/.nosqlbench/prometheus-conf:/etc/prometheus",
|
||||
userHome + "/.nosqlbench/prometheus:/prometheus"
|
||||
//"./prometheus/tg_dse.json:/etc/prometheus/tg_dse.json"
|
||||
//cwd+"/docker-metrics/prometheus:/prometheus",
|
||||
userHome + "/.nosqlbench/prometheus-conf:/etc/prometheus",
|
||||
userHome + "/.nosqlbench/prometheus:/prometheus"
|
||||
//"./prometheus/tg_dse.json:/etc/prometheus/tg_dse.json"
|
||||
);
|
||||
|
||||
List<String> envList = null;
|
||||
|
||||
List<String> cmdList = Arrays.asList(
|
||||
"--config.file=/etc/prometheus/prometheus.yml",
|
||||
"--storage.tsdb.path=/prometheus",
|
||||
"--storage.tsdb.retention=183d",
|
||||
"--web.enable-lifecycle"
|
||||
"--config.file=/etc/prometheus/prometheus.yml",
|
||||
"--storage.tsdb.path=/prometheus",
|
||||
"--storage.tsdb.retention=183d",
|
||||
"--web.enable-lifecycle"
|
||||
|
||||
);
|
||||
|
||||
@ -136,16 +140,23 @@ public class DockerMetricsManager {
|
||||
String name = "graphite-exporter";
|
||||
//TODO: look into UDP
|
||||
List<Integer> port = Arrays.asList(9108, 9109);
|
||||
List<String> volumeDescList = Arrays.asList();
|
||||
List<String> volumeDescList = new ArrayList<String>();
|
||||
|
||||
setupGraphiteFiles(volumeDescList);
|
||||
|
||||
List<String> envList = Arrays.asList();
|
||||
|
||||
String reload = null;
|
||||
List<String> linkNames = new ArrayList();
|
||||
dh.startDocker(GRAPHITE_EXPORTER_IMG, tag, name, port, volumeDescList, envList, null, reload, linkNames);
|
||||
|
||||
List<String> cmdOpts = Arrays.asList("--graphite.mapping-config=/tmp/graphite_mapping.conf");
|
||||
|
||||
dh.startDocker(GRAPHITE_EXPORTER_IMG, tag, name, port, volumeDescList, envList, cmdOpts, reload, linkNames);
|
||||
|
||||
logger.info("graphite exporter container started");
|
||||
|
||||
logger.info("searching for graphite exporter container ip");
|
||||
|
||||
ContainerNetworkSettings settings = dh.searchContainer(name, null).getNetworkSettings();
|
||||
Map<String, ContainerNetwork> networks = settings.getNetworks();
|
||||
String ip = null;
|
||||
@ -157,6 +168,29 @@ public class DockerMetricsManager {
|
||||
return ip;
|
||||
}
|
||||
|
||||
private void setupGraphiteFiles(List<String> volumeDescList) {
|
||||
String exporterConfig = NBIO.readCharBuffer("docker/graphite/graphite_mapping.conf").toString();
|
||||
|
||||
File nosqlbenchdir = new File(userHome, "/.nosqlbench/");
|
||||
mkdir(nosqlbenchdir);
|
||||
|
||||
File graphiteExporterDir = new File(userHome, "/.nosqlbench/graphite-exporter");
|
||||
mkdir(graphiteExporterDir);
|
||||
|
||||
Path mappingPath = Path.of(userHome, ".nosqlbench", "graphite-exporter", "graphite_mapping.conf");
|
||||
|
||||
if (!Files.exists(mappingPath)) {
|
||||
try {
|
||||
Files.writeString(mappingPath, exporterConfig);
|
||||
} catch (IOException e) {
|
||||
throw new RuntimeException("Error writing initial graphite mapping config in " + mappingPath, e);
|
||||
}
|
||||
}
|
||||
|
||||
volumeDescList.add(mappingPath.toString() + ":/tmp/graphite_mapping.conf");
|
||||
|
||||
}
|
||||
|
||||
private void setupPromFiles(String ip) {
|
||||
String datasource = NBIO.readCharBuffer("docker/prometheus/prometheus.yml").toString();
|
||||
|
||||
@ -213,14 +247,14 @@ public class DockerMetricsManager {
|
||||
}
|
||||
|
||||
private void mkdir(File dir) {
|
||||
if(dir.exists()){
|
||||
if (dir.exists()) {
|
||||
return;
|
||||
}
|
||||
if(! dir.mkdir()){
|
||||
if( dir.canWrite()){
|
||||
if (!dir.mkdir()) {
|
||||
if (dir.canWrite()) {
|
||||
System.out.println("no write access");
|
||||
}
|
||||
if( dir.canRead()){
|
||||
if (dir.canRead()) {
|
||||
System.out.println("no read access");
|
||||
}
|
||||
System.out.println("Could not create directory " + dir.getPath());
|
||||
@ -238,6 +272,7 @@ public class DockerMetricsManager {
|
||||
}
|
||||
return exists;
|
||||
}
|
||||
|
||||
private boolean promFilesExist() {
|
||||
File nosqlbenchDir = new File(userHome, "/.nosqlbench/");
|
||||
boolean exists = nosqlbenchDir.exists();
|
||||
@ -265,7 +300,7 @@ public class DockerMetricsManager {
|
||||
Files.setPosixFilePermissions(grafanaDirPath, perms);
|
||||
} catch (IOException e) {
|
||||
logger.error("failed to set permissions on grafana directory " +
|
||||
"directory " + userHome + "/.nosqlbench/grafana)");
|
||||
"directory " + userHome + "/.nosqlbench/grafana)");
|
||||
e.printStackTrace();
|
||||
System.exit(1);
|
||||
}
|
||||
@ -273,9 +308,31 @@ public class DockerMetricsManager {
|
||||
|
||||
|
||||
private void configureGrafana() {
|
||||
post("http://localhost:3000/api/dashboards/db", "docker/dashboards/analysis.json", true, "load analysis dashboard");
|
||||
post("http://localhost:3000/api/datasources", "docker/datasources/prometheus-datasource.yaml", true, "configure data source");
|
||||
logger.warn("default grafana creds are admin/admin");
|
||||
List<Content<?>> dashboardContent = NBIO.all().prefix("docker/dashboards").extension(".json").list();
|
||||
|
||||
for (Content<?> content : dashboardContent) {
|
||||
String dashboardData = content.asString();
|
||||
post(
|
||||
"http://localhost:3000/api/dashboards/db",
|
||||
() -> dashboardData,
|
||||
true,
|
||||
"load dashboard from " + content.asPath().toString()
|
||||
);
|
||||
|
||||
}
|
||||
|
||||
List<Content<?>> datasources = NBIO.all().prefix("docker/datasources").extension(".yaml").list();
|
||||
|
||||
for (Content<?> datasource : datasources) {
|
||||
String datasourceContent = datasource.asString();
|
||||
post(
|
||||
"http://localhost:3000/api/datasources",
|
||||
() -> datasourceContent,
|
||||
true,
|
||||
"configure data source from " + datasource.asPath().toString());
|
||||
}
|
||||
|
||||
logger.warn("default grafana creds are admin/admin");
|
||||
}
|
||||
|
||||
|
||||
|
@ -10,6 +10,7 @@ import java.net.http.HttpClient;
|
||||
import java.net.http.HttpRequest;
|
||||
import java.net.http.HttpResponse;
|
||||
import java.util.Base64;
|
||||
import java.util.function.Supplier;
|
||||
|
||||
public class RestHelper {
|
||||
private static Logger logger = LoggerFactory.getLogger(RestHelper.class);
|
||||
@ -23,8 +24,15 @@ public class RestHelper {
|
||||
}
|
||||
|
||||
|
||||
public static HttpResponse<String> post(String url, String path, boolean auth, String taskname) {
|
||||
logger.debug("posting to " + url + " with path:" + path +", auth: " + auth + " task:" + taskname);
|
||||
public static HttpResponse<String> post(String url,
|
||||
Supplier<String> contentSupplier, boolean auth,
|
||||
String taskname) {
|
||||
String content =null;
|
||||
if (contentSupplier!=null) {
|
||||
content = contentSupplier.get();
|
||||
}
|
||||
|
||||
logger.debug("posting to " + url + ", auth: " + auth + " task:" + taskname);
|
||||
|
||||
HttpRequest.Builder builder = HttpRequest.newBuilder();
|
||||
builder = builder.uri(URI.create(url));
|
||||
@ -34,11 +42,9 @@ public class RestHelper {
|
||||
builder = builder.header("Authorization", basicAuth("admin", "admin"));
|
||||
}
|
||||
|
||||
if (path !=null) {
|
||||
logger.debug("POSTing " + path + " to " + url);
|
||||
String dashboard = NBIO.readCharBuffer(path).toString();
|
||||
logger.debug("length of content for " + path + " is " + dashboard.length());
|
||||
builder = builder.POST(HttpRequest.BodyPublishers.ofString(dashboard));
|
||||
if (content !=null) {
|
||||
logger.debug("POSTing " + content.length() + "bytes to " + url + " for " + taskname);
|
||||
builder = builder.POST(HttpRequest.BodyPublishers.ofString(content));
|
||||
builder.setHeader("Content-Type", "application/json");
|
||||
} else {
|
||||
logger.debug(("POSTing empty body to " + url));
|
||||
|
@ -21,6 +21,7 @@
|
||||
"provisioned": true
|
||||
},
|
||||
"dashboard": {
|
||||
"id": null,
|
||||
"__inputs": [
|
||||
{
|
||||
"description": "",
|
||||
@ -73,7 +74,6 @@
|
||||
"editable": true,
|
||||
"gnetId": null,
|
||||
"graphTooltip": 1,
|
||||
"id": 1,
|
||||
"iteration": 1552332041877,
|
||||
"links": [],
|
||||
"panels": [
|
||||
@ -7069,7 +7069,7 @@
|
||||
]
|
||||
},
|
||||
"timezone": "",
|
||||
"title": "Analysis",
|
||||
"title": "NoSQLBench (deprecated)",
|
||||
"id": null,
|
||||
"uid": null,
|
||||
"version": 1
|
File diff suppressed because it is too large
Load Diff
@ -0,0 +1,138 @@
|
||||
mappings:
|
||||
#- match: 'nosqlbench\\.workloads\\.(.+?)_(.+?)?_(.+?)\\.(.+?)\\.(.+?)_rate'
|
||||
# nosqlbench_workloads_ cqliot_default_schema_tries_stddev{instance="172.17.0.2:9108",job="graphite_import"}
|
||||
|
||||
# meter avg rate
|
||||
- match: 'nosqlbench\.workloads\.([0-9a-zA-Z]+)_([0-9a-zA-Z]+)?_([^.]+)\.(.+?)\.m(1|5|15).rate'
|
||||
match_type: regex
|
||||
name: $4
|
||||
labels:
|
||||
workload: $1
|
||||
scenario: $2
|
||||
step: $3
|
||||
appname: "nosqlbench"
|
||||
usermode: "named_scenario"
|
||||
property: "m${5}_rate"
|
||||
type: avg_rate
|
||||
avg_of: "${5}m"
|
||||
|
||||
# meter mean_rate
|
||||
- match: 'nosqlbench\.workloads\.([0-9a-zA-Z]+)_([0-9a-zA-Z]+)?_([^.]+)\.(.+?)\.mean_rate'
|
||||
match_type: regex
|
||||
name: $4
|
||||
labels:
|
||||
workload: $1
|
||||
scenario: $2
|
||||
step: $3
|
||||
appname: "nosqlbench"
|
||||
usermode: "named_scenario"
|
||||
property: "mean_rate"
|
||||
type: "avg_rate"
|
||||
avg_of: "run"
|
||||
|
||||
# histogram p0
|
||||
- match: 'nosqlbench\.workloads\.([0-9a-zA-Z]+)_([0-9a-zA-Z]+)?_([^.]+)\.(.+?)\.(min)'
|
||||
match_type: regex
|
||||
name: $4
|
||||
labels:
|
||||
workload: $1
|
||||
scenario: $2
|
||||
step: $3
|
||||
appname: "nosqlbench"
|
||||
usermode: "named_scenario"
|
||||
property: "min"
|
||||
type: "pctile"
|
||||
pctile: "0"
|
||||
|
||||
# histogram p100
|
||||
- match: 'nosqlbench\.workloads\.([0-9a-zA-Z]+)_([0-9a-zA-Z]+)?_([^.]+)\.(.+?)\.(max)'
|
||||
match_type: regex
|
||||
name: $4
|
||||
labels:
|
||||
workload: $1
|
||||
scenario: $2
|
||||
step: $3
|
||||
appname: "nosqlbench"
|
||||
usermode: "named_scenario"
|
||||
property: "max"
|
||||
type: "pctile"
|
||||
pctile: "100"
|
||||
|
||||
# histogram pctile
|
||||
# nosqlbench_workloads_cqliot_default_rampup_phases_servicetime_p50
|
||||
- match: 'nosqlbench\.workloads\.([0-9a-zA-Z]+)_([0-9a-zA-Z]+)?_([^.]+)\.(.+?)\.p(.+)'
|
||||
match_type: regex
|
||||
name: $4
|
||||
labels:
|
||||
workload: $1
|
||||
scenario: $2
|
||||
step: $3
|
||||
appname: "nosqlbench"
|
||||
usermode: "named_scenario"
|
||||
property: "p${5}"
|
||||
type: "pctile"
|
||||
pctile: $5
|
||||
|
||||
# histogram
|
||||
- match: 'nosqlbench\.workloads\.([0-9a-zA-Z]+)_([0-9a-zA-Z]+)?_([^.]+)\.(.+?)\.(mean|stddev)'
|
||||
match_type: regex
|
||||
name: $4
|
||||
labels:
|
||||
workload: $1
|
||||
scenario: $2
|
||||
step: $3
|
||||
appname: "nosqlbench"
|
||||
usermode: "named_scenario"
|
||||
property: $5
|
||||
type: "pctile_stat"
|
||||
pctile_stat: $5
|
||||
|
||||
# error counter
|
||||
- match: 'nosqlbench\.workloads\.([0-9a-zA-Z]+)_([0-9a-zA-Z]+)?_([^.]+)\.(errorcounts)\.(.+?)\.(count)'
|
||||
match_type: regex
|
||||
name: "${4}_${5}"
|
||||
labels:
|
||||
workload: $1
|
||||
scenario: $2
|
||||
step: $3
|
||||
appname: "nosqlbench"
|
||||
usermode: "named_scenario"
|
||||
type: "counter"
|
||||
error: $5
|
||||
property: $6
|
||||
|
||||
# counter
|
||||
- match: 'nosqlbench\.workloads\.([0-9a-zA-Z]+)_([0-9a-zA-Z]+)?_([^.]+)\.(.+?)\.(count)'
|
||||
match_type: regex
|
||||
name: $4
|
||||
labels:
|
||||
workload: $1
|
||||
scenario: $2
|
||||
step: $3
|
||||
appname: "nosqlbench"
|
||||
usermode: "named_scenario"
|
||||
type: "counter"
|
||||
property: $5
|
||||
|
||||
# gauge
|
||||
- match: 'nosqlbench\.workloads\.([0-9a-zA-Z]+)_([0-9a-zA-Z]+)?_([^.]+)\.(.+)'
|
||||
match_type: regex
|
||||
name: $4
|
||||
labels:
|
||||
workload: $1
|
||||
scenario: $2
|
||||
step: $3
|
||||
appname: "nosqlbench"
|
||||
usermode: "named_scenario"
|
||||
property: "gauge"
|
||||
type: "gauge"
|
||||
#
|
||||
|
||||
- match: 'nosqlbench\.(.+)'
|
||||
match_type: regex
|
||||
name: "nosqlbench_${1}"
|
||||
labels:
|
||||
appname: "nosqlbench"
|
||||
usermode: "testing_metrics_exporter"
|
||||
usermode: "named_scenario"
|
||||
|
@ -4,7 +4,7 @@
|
||||
<parent>
|
||||
<artifactId>mvn-defaults</artifactId>
|
||||
<groupId>io.nosqlbench</groupId>
|
||||
<version>3.12.119-SNAPSHOT</version>
|
||||
<version>3.12.120-SNAPSHOT</version>
|
||||
<relativePath>../mvn-defaults</relativePath>
|
||||
</parent>
|
||||
|
||||
@ -28,7 +28,7 @@
|
||||
<dependency>
|
||||
<groupId>io.nosqlbench</groupId>
|
||||
<artifactId>docsys</artifactId>
|
||||
<version>3.12.119-SNAPSHOT</version>
|
||||
<version>3.12.120-SNAPSHOT</version>
|
||||
</dependency>
|
||||
|
||||
</dependencies>
|
||||
|
@ -231,7 +231,7 @@ follows:
|
||||
- WORKLOAD - The simple name part of the fully qualified workload name. For example, with a workload (yaml path) of
|
||||
foo/bar/baz.yaml, the WORKLOAD name used here would be `baz`.
|
||||
|
||||
- SCENARIO - The name of the scenario as provided ot the command line.
|
||||
- SCENARIO - The name of the scenario as provided on the command line.
|
||||
|
||||
- STEP - The name of the step in the named scenario. If you used the list or string forms to provide a command template,
|
||||
then the steps are automatically named as a zero-padded number representing the step in the named scenario, starting
|
||||
@ -243,8 +243,9 @@ explaining why this is a bad idea.
|
||||
|
||||
:::info
|
||||
|
||||
UNDEF is handled before alias expansion above, so it is possible to force activity naming with `alias===UNDEF`. This is
|
||||
generally recommended, and will inform users if they try to set the alias in an unsafe way.
|
||||
UNDEF is handled before alias expansion above, so it is possible to force the default activity naming behavior above
|
||||
with `alias===UNDEF`. This is generally recommended, and will inform users if they try to set the alias in an unsafe
|
||||
way.
|
||||
|
||||
:::
|
||||
|
||||
|
@ -4,7 +4,7 @@
|
||||
<parent>
|
||||
<artifactId>mvn-defaults</artifactId>
|
||||
<groupId>io.nosqlbench</groupId>
|
||||
<version>3.12.119-SNAPSHOT</version>
|
||||
<version>3.12.120-SNAPSHOT</version>
|
||||
<relativePath>../mvn-defaults</relativePath>
|
||||
</parent>
|
||||
|
||||
@ -22,7 +22,7 @@
|
||||
<dependency>
|
||||
<groupId>io.nosqlbench</groupId>
|
||||
<artifactId>engine-api</artifactId>
|
||||
<version>3.12.119-SNAPSHOT</version>
|
||||
<version>3.12.120-SNAPSHOT</version>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
|
@ -3,7 +3,7 @@
|
||||
|
||||
<groupId>io.nosqlbench</groupId>
|
||||
<artifactId>mvn-defaults</artifactId>
|
||||
<version>3.12.119-SNAPSHOT</version>
|
||||
<version>3.12.120-SNAPSHOT</version>
|
||||
<packaging>pom</packaging>
|
||||
|
||||
<properties>
|
||||
|
@ -5,7 +5,7 @@
|
||||
<parent>
|
||||
<artifactId>mvn-defaults</artifactId>
|
||||
<groupId>io.nosqlbench</groupId>
|
||||
<version>3.12.119-SNAPSHOT</version>
|
||||
<version>3.12.120-SNAPSHOT</version>
|
||||
<relativePath>../mvn-defaults</relativePath>
|
||||
</parent>
|
||||
|
||||
|
@ -5,7 +5,7 @@
|
||||
<parent>
|
||||
<artifactId>mvn-defaults</artifactId>
|
||||
<groupId>io.nosqlbench</groupId>
|
||||
<version>3.12.119-SNAPSHOT</version>
|
||||
<version>3.12.120-SNAPSHOT</version>
|
||||
<relativePath>../mvn-defaults</relativePath>
|
||||
</parent>
|
||||
|
||||
@ -31,7 +31,7 @@
|
||||
<dependency>
|
||||
<groupId>io.nosqlbench</groupId>
|
||||
<artifactId>nb-annotations</artifactId>
|
||||
<version>3.12.119-SNAPSHOT</version>
|
||||
<version>3.12.120-SNAPSHOT</version>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
|
30
nb/pom.xml
30
nb/pom.xml
@ -5,7 +5,7 @@
|
||||
<parent>
|
||||
<artifactId>mvn-defaults</artifactId>
|
||||
<groupId>io.nosqlbench</groupId>
|
||||
<version>3.12.119-SNAPSHOT</version>
|
||||
<version>3.12.120-SNAPSHOT</version>
|
||||
<relativePath>../mvn-defaults</relativePath>
|
||||
</parent>
|
||||
|
||||
@ -24,36 +24,36 @@
|
||||
<dependency>
|
||||
<groupId>io.nosqlbench</groupId>
|
||||
<artifactId>driver-web</artifactId>
|
||||
<version>3.12.119-SNAPSHOT</version>
|
||||
<version>3.12.120-SNAPSHOT</version>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>io.nosqlbench</groupId>
|
||||
<artifactId>driver-kafka</artifactId>
|
||||
<version>3.12.119-SNAPSHOT</version>
|
||||
<version>3.12.120-SNAPSHOT</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>io.nosqlbench</groupId>
|
||||
<artifactId>engine-cli</artifactId>
|
||||
<version>3.12.119-SNAPSHOT</version>
|
||||
<version>3.12.120-SNAPSHOT</version>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>io.nosqlbench</groupId>
|
||||
<artifactId>engine-docs</artifactId>
|
||||
<version>3.12.119-SNAPSHOT</version>
|
||||
<version>3.12.120-SNAPSHOT</version>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>io.nosqlbench</groupId>
|
||||
<artifactId>engine-core</artifactId>
|
||||
<version>3.12.119-SNAPSHOT</version>
|
||||
<version>3.12.120-SNAPSHOT</version>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>io.nosqlbench</groupId>
|
||||
<artifactId>engine-extensions</artifactId>
|
||||
<version>3.12.119-SNAPSHOT</version>
|
||||
<version>3.12.120-SNAPSHOT</version>
|
||||
</dependency>
|
||||
|
||||
<!-- <dependency>-->
|
||||
@ -65,43 +65,43 @@
|
||||
<dependency>
|
||||
<groupId>io.nosqlbench</groupId>
|
||||
<artifactId>driver-stdout</artifactId>
|
||||
<version>3.12.119-SNAPSHOT</version>
|
||||
<version>3.12.120-SNAPSHOT</version>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>io.nosqlbench</groupId>
|
||||
<artifactId>driver-diag</artifactId>
|
||||
<version>3.12.119-SNAPSHOT</version>
|
||||
<version>3.12.120-SNAPSHOT</version>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>io.nosqlbench</groupId>
|
||||
<artifactId>driver-tcp</artifactId>
|
||||
<version>3.12.119-SNAPSHOT</version>
|
||||
<version>3.12.120-SNAPSHOT</version>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>io.nosqlbench</groupId>
|
||||
<artifactId>driver-http</artifactId>
|
||||
<version>3.12.119-SNAPSHOT</version>
|
||||
<version>3.12.120-SNAPSHOT</version>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>io.nosqlbench</groupId>
|
||||
<artifactId>driver-cql-shaded</artifactId>
|
||||
<version>3.12.119-SNAPSHOT</version>
|
||||
<version>3.12.120-SNAPSHOT</version>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>io.nosqlbench</groupId>
|
||||
<artifactId>driver-cqlverify</artifactId>
|
||||
<version>3.12.119-SNAPSHOT</version>
|
||||
<version>3.12.120-SNAPSHOT</version>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>io.nosqlbench</groupId>
|
||||
<artifactId>driver-mongodb</artifactId>
|
||||
<version>3.12.119-SNAPSHOT</version>
|
||||
<version>3.12.120-SNAPSHOT</version>
|
||||
</dependency>
|
||||
|
||||
<!-- <dependency>-->
|
||||
@ -259,7 +259,7 @@
|
||||
<dependency>
|
||||
<groupId>io.nosqlbench</groupId>
|
||||
<artifactId>driver-mongodb</artifactId>
|
||||
<version>3.12.119-SNAPSHOT</version>
|
||||
<version>3.12.120-SNAPSHOT</version>
|
||||
</dependency>
|
||||
</dependencies>
|
||||
</profile>
|
||||
|
@ -39,9 +39,9 @@ public class NBCliIntegrationTests {
|
||||
public void listWorkloadsTest() {
|
||||
ProcessInvoker invoker = new ProcessInvoker();
|
||||
invoker.setLogDir("logs/test");
|
||||
ProcessResult result = invoker.run("workload-test", 15,
|
||||
java, "-jar", JARNAME, "--logs-dir", "logs/test", "--list" +
|
||||
"-workloads"
|
||||
ProcessResult result = invoker.run(
|
||||
"workload-test", 15, java, "-jar",
|
||||
JARNAME, "--logs-dir", "logs/test", "--list-workloads"
|
||||
);
|
||||
System.out.println(result.getStdoutData());
|
||||
System.out.println(result.getStderrData());
|
||||
|
2
pom.xml
2
pom.xml
@ -7,7 +7,7 @@
|
||||
<parent>
|
||||
<artifactId>mvn-defaults</artifactId>
|
||||
<groupId>io.nosqlbench</groupId>
|
||||
<version>3.12.119-SNAPSHOT</version>
|
||||
<version>3.12.120-SNAPSHOT</version>
|
||||
<relativePath>mvn-defaults</relativePath>
|
||||
</parent>
|
||||
|
||||
|
@ -7,7 +7,7 @@
|
||||
<parent>
|
||||
<groupId>io.nosqlbench</groupId>
|
||||
<artifactId>mvn-defaults</artifactId>
|
||||
<version>3.12.119-SNAPSHOT</version>
|
||||
<version>3.12.120-SNAPSHOT</version>
|
||||
<relativePath>../mvn-defaults</relativePath>
|
||||
</parent>
|
||||
|
||||
@ -23,14 +23,14 @@
|
||||
|
||||
<dependency>
|
||||
<groupId>io.nosqlbench</groupId>
|
||||
<version>3.12.119-SNAPSHOT</version>
|
||||
<version>3.12.120-SNAPSHOT</version>
|
||||
<artifactId>nb-api</artifactId>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>io.nosqlbench</groupId>
|
||||
<artifactId>virtdata-lang</artifactId>
|
||||
<version>3.12.119-SNAPSHOT</version>
|
||||
<version>3.12.120-SNAPSHOT</version>
|
||||
</dependency>
|
||||
|
||||
|
||||
|
@ -7,7 +7,7 @@
|
||||
<parent>
|
||||
<artifactId>mvn-defaults</artifactId>
|
||||
<groupId>io.nosqlbench</groupId>
|
||||
<version>3.12.119-SNAPSHOT</version>
|
||||
<version>3.12.120-SNAPSHOT</version>
|
||||
<relativePath>../mvn-defaults</relativePath>
|
||||
</parent>
|
||||
|
||||
|
@ -7,7 +7,7 @@
|
||||
<parent>
|
||||
<artifactId>mvn-defaults</artifactId>
|
||||
<groupId>io.nosqlbench</groupId>
|
||||
<version>3.12.119-SNAPSHOT</version>
|
||||
<version>3.12.120-SNAPSHOT</version>
|
||||
<relativePath>../mvn-defaults</relativePath>
|
||||
</parent>
|
||||
|
||||
@ -20,7 +20,7 @@
|
||||
<dependency>
|
||||
<groupId>io.nosqlbench</groupId>
|
||||
<artifactId>virtdata-api</artifactId>
|
||||
<version>3.12.119-SNAPSHOT</version>
|
||||
<version>3.12.120-SNAPSHOT</version>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
|
@ -4,7 +4,7 @@
|
||||
<parent>
|
||||
<artifactId>mvn-defaults</artifactId>
|
||||
<groupId>io.nosqlbench</groupId>
|
||||
<version>3.12.119-SNAPSHOT</version>
|
||||
<version>3.12.120-SNAPSHOT</version>
|
||||
<relativePath>../mvn-defaults</relativePath>
|
||||
</parent>
|
||||
|
||||
@ -22,13 +22,13 @@
|
||||
<dependency>
|
||||
<groupId>io.nosqlbench</groupId>
|
||||
<artifactId>virtdata-api</artifactId>
|
||||
<version>3.12.119-SNAPSHOT</version>
|
||||
<version>3.12.120-SNAPSHOT</version>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>io.nosqlbench</groupId>
|
||||
<artifactId>virtdata-lib-basics</artifactId>
|
||||
<version>3.12.119-SNAPSHOT</version>
|
||||
<version>3.12.120-SNAPSHOT</version>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
|
@ -7,7 +7,7 @@
|
||||
<parent>
|
||||
<artifactId>mvn-defaults</artifactId>
|
||||
<groupId>io.nosqlbench</groupId>
|
||||
<version>3.12.119-SNAPSHOT</version>
|
||||
<version>3.12.120-SNAPSHOT</version>
|
||||
<relativePath>../mvn-defaults</relativePath>
|
||||
</parent>
|
||||
|
||||
@ -20,13 +20,13 @@
|
||||
<dependency>
|
||||
<groupId>io.nosqlbench</groupId>
|
||||
<artifactId>virtdata-api</artifactId>
|
||||
<version>3.12.119-SNAPSHOT</version>
|
||||
<version>3.12.120-SNAPSHOT</version>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>io.nosqlbench</groupId>
|
||||
<artifactId>virtdata-lib-basics</artifactId>
|
||||
<version>3.12.119-SNAPSHOT</version>
|
||||
<version>3.12.120-SNAPSHOT</version>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
|
@ -4,7 +4,7 @@
|
||||
<parent>
|
||||
<artifactId>mvn-defaults</artifactId>
|
||||
<groupId>io.nosqlbench</groupId>
|
||||
<version>3.12.119-SNAPSHOT</version>
|
||||
<version>3.12.120-SNAPSHOT</version>
|
||||
<relativePath>../mvn-defaults</relativePath>
|
||||
</parent>
|
||||
|
||||
@ -24,7 +24,7 @@
|
||||
<dependency>
|
||||
<groupId>io.nosqlbench</groupId>
|
||||
<artifactId>virtdata-lib-basics</artifactId>
|
||||
<version>3.12.119-SNAPSHOT</version>
|
||||
<version>3.12.120-SNAPSHOT</version>
|
||||
</dependency>
|
||||
</dependencies>
|
||||
|
||||
|
@ -7,7 +7,7 @@
|
||||
<parent>
|
||||
<artifactId>mvn-defaults</artifactId>
|
||||
<groupId>io.nosqlbench</groupId>
|
||||
<version>3.12.119-SNAPSHOT</version>
|
||||
<version>3.12.120-SNAPSHOT</version>
|
||||
<relativePath>../mvn-defaults</relativePath>
|
||||
</parent>
|
||||
|
||||
@ -18,7 +18,7 @@
|
||||
<dependency>
|
||||
<groupId>io.nosqlbench</groupId>
|
||||
<artifactId>virtdata-api</artifactId>
|
||||
<version>3.12.119-SNAPSHOT</version>
|
||||
<version>3.12.120-SNAPSHOT</version>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
|
@ -4,7 +4,7 @@
|
||||
<parent>
|
||||
<artifactId>mvn-defaults</artifactId>
|
||||
<groupId>io.nosqlbench</groupId>
|
||||
<version>3.12.119-SNAPSHOT</version>
|
||||
<version>3.12.120-SNAPSHOT</version>
|
||||
<relativePath>../mvn-defaults</relativePath>
|
||||
</parent>
|
||||
|
||||
@ -17,32 +17,32 @@
|
||||
<dependency>
|
||||
<groupId>io.nosqlbench</groupId>
|
||||
<artifactId>virtdata-realdata</artifactId>
|
||||
<version>3.12.119-SNAPSHOT</version>
|
||||
<version>3.12.120-SNAPSHOT</version>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>io.nosqlbench</groupId>
|
||||
<artifactId>virtdata-lib-realer</artifactId>
|
||||
<version>3.12.119-SNAPSHOT</version>
|
||||
<version>3.12.120-SNAPSHOT</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>io.nosqlbench</groupId>
|
||||
<artifactId>virtdata-api</artifactId>
|
||||
<version>3.12.119-SNAPSHOT</version>
|
||||
<version>3.12.120-SNAPSHOT</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>io.nosqlbench</groupId>
|
||||
<artifactId>virtdata-lib-random</artifactId>
|
||||
<version>3.12.119-SNAPSHOT</version>
|
||||
<version>3.12.120-SNAPSHOT</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>io.nosqlbench</groupId>
|
||||
<version>3.12.119-SNAPSHOT</version>
|
||||
<version>3.12.120-SNAPSHOT</version>
|
||||
<artifactId>virtdata-lib-basics</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>io.nosqlbench</groupId>
|
||||
<version>3.12.119-SNAPSHOT</version>
|
||||
<version>3.12.120-SNAPSHOT</version>
|
||||
<artifactId>virtdata-lib-curves4</artifactId>
|
||||
</dependency>
|
||||
|
||||
@ -50,7 +50,7 @@
|
||||
<dependency>
|
||||
<groupId>io.nosqlbench</groupId>
|
||||
<artifactId>docsys</artifactId>
|
||||
<version>3.12.119-SNAPSHOT</version>
|
||||
<version>3.12.120-SNAPSHOT</version>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
|
Loading…
Reference in New Issue
Block a user