diff --git a/.github/workflows/blocking_issues.yml b/.github/workflows/blocking_issues.yml
deleted file mode 100644
index 7bea810ac..000000000
--- a/.github/workflows/blocking_issues.yml
+++ /dev/null
@@ -1,15 +0,0 @@
-name: Blocking Issues
-
-on:
- issues:
- types: [closed]
- pull_request_target:
- types: [opened, edited]
-
-jobs:
- blocking_issues:
- runs-on: ubuntu-latest
- name: Checks for blocking issues
-
- steps:
- - uses: Levi-Lesches/blocking-issues@v1.1
diff --git a/adapter-mongodb/src/main/resources/activities/baselinesv2/mongodb-keyvalue2.yaml b/adapter-mongodb/src/main/resources/activities/baselinesv2/mongodb-keyvalue2.yaml
index 46468b531..197d058a1 100644
--- a/adapter-mongodb/src/main/resources/activities/baselinesv2/mongodb-keyvalue2.yaml
+++ b/adapter-mongodb/src/main/resources/activities/baselinesv2/mongodb-keyvalue2.yaml
@@ -2,7 +2,7 @@
# nb5 run driver=mongodb workload=/path/to/mongodb-keyvalue2.yaml tags=block:schema connection='mongodb+srv://user:pass@sample-db.host.mongodb.net/?retryWrites=true&w=majority' database=baselines -vv --show-stacktraces
# nb5 run driver=mongodb workload=/path/to/mongodb-keyvalue2.yaml tags=block:rampup cycles=25 connection='mongodb+srv://user:pass@sample-db.host.mongodb.net/?retryWrites=true&w=majority' database=baselines -vv --show-stacktraces
# nb5 run driver=mongodb workload=/path/to/mongodb-keyvalue2.yaml tags='block:main-.*' cycles=25 connection='mongodb+srv://user:pass@sample-db.host.mongodb.net/?retryWrites=true&w=majority' database=baselines -vv --show-stacktraces
-min_version: "4.17.30"
+min_version: "4.17.31"
description: |
This workload is analogous to the cql-keyvalue2 workload, just implemented for MongoDB.
@@ -52,13 +52,17 @@ blocks:
key: {
bsonType: "string",
description: "'key' must be a string and is required"
+ },
+ value: {
+ bsonType: "string",
+ description: "'value' must be a string and is optional but, recommended"
}
}
}
},
validationLevel: "strict",
validationAction: "error",
- comment: "keyvalue collection creation with strict types and required 'key' field."
+ comment: "keyvalue collection creation with strict types and a required 'key' field."
}
create-key-index: |
{
diff --git a/adapter-mongodb/src/main/resources/activities/baselinesv2/mongodb-tabular2.yaml b/adapter-mongodb/src/main/resources/activities/baselinesv2/mongodb-tabular2.yaml
index ce3e51810..049769b68 100644
--- a/adapter-mongodb/src/main/resources/activities/baselinesv2/mongodb-tabular2.yaml
+++ b/adapter-mongodb/src/main/resources/activities/baselinesv2/mongodb-tabular2.yaml
@@ -2,7 +2,7 @@
# nb5 run driver=mongodb workload=/path/to/mongodb-tabular2.yaml tags=block:schema connection='mongodb+srv://user:pass@sample-db.host.mongodb.net/?retryWrites=true&w=majority' database=baselines -vv --show-stacktraces
# nb5 run driver=mongodb workload=/path/to/mongodb-tabular2.yaml tags=block:rampup cycles=25 connection='mongodb+srv://user:pass@sample-db.host.mongodb.net/?retryWrites=true&w=majority' database=baselines -vv --show-stacktraces
# nb5 run driver=mongodb workload=/path/to/mongodb-tabular2.yaml tags='block:main.*' cycles=25 connection='mongodb+srv://user:pass@sample-db.host.mongodb.net/?retryWrites=true&w=majority' database=baselines -vv --show-stacktraces
-min_version: "4.17.30"
+min_version: "4.17.31"
description: |
This workload is analogous to the cql-tabular2 workload, just implemented for MongoDB.
diff --git a/adapter-mongodb/src/main/resources/activities/baselinesv2/mongodb-timeseries2.yaml b/adapter-mongodb/src/main/resources/activities/baselinesv2/mongodb-timeseries2.yaml
index 386c1a94f..ef9349d1b 100644
--- a/adapter-mongodb/src/main/resources/activities/baselinesv2/mongodb-timeseries2.yaml
+++ b/adapter-mongodb/src/main/resources/activities/baselinesv2/mongodb-timeseries2.yaml
@@ -6,7 +6,7 @@
# https://www.mongodb.com/community/forums/t/how-to-store-a-uuid-with-binary-subtype-0x04-using-the-mongodb-java-driver/13184
# https://www.mongodb.com/community/forums/t/problem-inserting-uuid-field-with-binary-subtype-via-atlas-web-ui/1071/4
# https://www.mongodb.com/community/forums/t/timeseries-last-x-documents/186574/5
-min_version: "4.17.30"
+min_version: "4.17.31"
description: |
This workload is analogous to the cql-timeseries2 workload, just implemented for MongoDB.
diff --git a/adapters-api/src/main/resources/workload_definition/README.md b/adapters-api/src/main/resources/workload_definition/README.md
index 7bcd0ba9c..c0464384e 100644
--- a/adapters-api/src/main/resources/workload_definition/README.md
+++ b/adapters-api/src/main/resources/workload_definition/README.md
@@ -63,17 +63,21 @@ The process of loading a workload definition occurs in several discrete steps du
session:
1. The workload file is loaded.
-2. Template variables are interposed.
+2. Template variables from the activity parameters are interposed into the raw contents of the
+ file.
3. The file is deserialized from its native form into a raw data structure.
4. The raw data structure is transformed into a normalized data structure according to the Op
Template normalization rules.
-5. The data is provided to the ParsedOp API for use by the developer.
-6. The DriverAdapter is loaded which understands the op fields provided in the op template.
-7. The DriverAdapter uses its documented rules to determine which types of native driver operations
+5. Each op template is then denormalized as a self-contained data
+ structure, containing all the provided bindings, params, and tags from the upper layers of the
+ doc structure.
+6. The data is provided to the ParsedOp API for use by the developer.
+7. The DriverAdapter is loaded which understands the op fields provided in the op template.
+8. The DriverAdapter uses its documented rules to determine which types of native driver operations
each op template is intended to represent. This is called **Op Mapping**.
-8. The DriverAdapter uses the identified types to create dispensers of native driver operations.
- This is called **Op Dispensing**.
-9. The op dispensers are arranged into an indexed bank of op sources according to the specified
+9. The DriverAdapter (via the selected Op Mapper) uses the identified types to create dispensers of
+ native driver operations. This is called **Op Dispensing**.
+10. The op dispensers are arranged into an indexed bank of op sources according to the specified
ratios and or sequencing strategy. From this point on, NoSQLBench has the ability to
construct an operation for any given cycle at high speed.
diff --git a/docsys/pom.xml b/docsys/pom.xml
index 02a8e5490..5ea4dcbf4 100644
--- a/docsys/pom.xml
+++ b/docsys/pom.xml
@@ -50,22 +50,22 @@
org.eclipse.jetty
jetty-server
- 11.0.11
+ 11.0.12
org.eclipse.jetty
jetty-servlets
- 11.0.11
+ 11.0.12
org.eclipse.jetty
jetty-servlet
- 11.0.11
+ 11.0.12
org.eclipse.jetty
jetty-rewrite
- 11.0.11
+ 11.0.12
diff --git a/engine-rest/pom.xml b/engine-rest/pom.xml
index c1b72cd64..25583745a 100644
--- a/engine-rest/pom.xml
+++ b/engine-rest/pom.xml
@@ -29,7 +29,7 @@
io.swagger.core.v3
swagger-models
- 2.2.2
+ 2.2.3
diff --git a/mvn-defaults/pom.xml b/mvn-defaults/pom.xml
index 1853f1d24..e0dcc4f7e 100644
--- a/mvn-defaults/pom.xml
+++ b/mvn-defaults/pom.xml
@@ -46,7 +46,7 @@
2.4.14.Final
1.0.0
- 1.32
+ 1.33
1.1.2.6
@@ -141,7 +141,7 @@
io.dropwizard.metrics
metrics-core
- 4.2.10
+ 4.2.12
@@ -189,7 +189,7 @@
io.netty
netty-handler
- 4.1.81.Final
+ 4.1.82.Final
@@ -271,7 +271,7 @@
com.google.code.gson
gson
- 2.9.0
+ 2.9.1
@@ -301,7 +301,7 @@
joda-time
joda-time
- 2.11.1
+ 2.11.2
diff --git a/nb-api/pom.xml b/nb-api/pom.xml
index d2aea36ec..5b4546aea 100644
--- a/nb-api/pom.xml
+++ b/nb-api/pom.xml
@@ -97,7 +97,7 @@
com.amazonaws
aws-java-sdk-s3
- 1.12.325
+ 1.12.330
diff --git a/nbr-examples/src/test/resources/scripts/examples/extension_histostatslogger.js b/nbr-examples/src/test/resources/scripts/examples/extension_histostatslogger.js
index 377c1b357..50c99758b 100644
--- a/nbr-examples/src/test/resources/scripts/examples/extension_histostatslogger.js
+++ b/nbr-examples/src/test/resources/scripts/examples/extension_histostatslogger.js
@@ -18,16 +18,15 @@ activitydef = {
"alias" : "testhistostatslogger",
"driver" : "diag",
"cycles" : "50000",
- "threads" : "20",
- "interval" : "2000",
- "targetrate" : "10000.0",
+ "threads" : "5",
+ "rate" : "100.0",
"op" : "noop"
};
histostatslogger.logHistoStats("testing extention histostatslogger", ".*", "logs/histostats.csv", "0.5s");
print("started logging to logs/histostats.csv for all metrics at 1/2" +
" second intervals.");
-
scenario.start(activitydef);
-scenario.waitMillis(2000);
+scenario.waitMillis(4000);
scenario.stop(activitydef);
+
diff --git a/nbr/src/test/java/io/nosqlbench/cli/testing/ExitStatusIntegrationTests.java b/nbr/src/test/java/io/nosqlbench/cli/testing/ExitStatusIntegrationTests.java
index f50704c62..3d0185c04 100644
--- a/nbr/src/test/java/io/nosqlbench/cli/testing/ExitStatusIntegrationTests.java
+++ b/nbr/src/test/java/io/nosqlbench/cli/testing/ExitStatusIntegrationTests.java
@@ -19,40 +19,40 @@ package io.nosqlbench.cli.testing;
import org.junit.jupiter.api.Test;
import java.util.Optional;
-import java.util.stream.Collectors;
import static org.assertj.core.api.Assertions.assertThat;
-public class ExitStatusIntegrationTests {
+class ExitStatusIntegrationTests {
private final String java = Optional.ofNullable(System.getenv(
- "JAVA_HOME")).map(v -> v+"/bin/java").orElse("java");
+ "JAVA_HOME")).map(v -> v + "/bin/java").orElse("java");
private final static String JARNAME = "target/nbr.jar";
+
@Test
- public void testExitStatusOnBadParam() {
+ void testExitStatusOnBadParam() {
ProcessInvoker invoker = new ProcessInvoker();
invoker.setLogDir("logs/test");
ProcessResult result = invoker.run("exitstatus_badparam", 15,
java, "-jar", JARNAME, "--logs-dir", "logs/test/badparam/",
- "badparam"
+ "badparam"
);
assertThat(result.exception).isNull();
- String stderr = result.getStderrData().stream().collect(Collectors.joining("\n"));
+ String stderr = String.join("\n", result.getStderrData());
assertThat(stderr).contains("Scenario stopped due to error");
assertThat(result.exitStatus).isEqualTo(2);
}
@Test
- public void testExitStatusOnActivityInitException() {
+ void testExitStatusOnActivityInitException() {
ProcessInvoker invoker = new ProcessInvoker();
invoker.setLogDir("logs/test");
ProcessResult result = invoker.run("exitstatus_initexception", 15,
java, "-jar", JARNAME, "--logs-dir", "logs/test/initerror", "run",
- "driver=diag", "op=initdelay:initdelay=notanumber"
+ "driver=diag", "op=initdelay:initdelay=notanumber"
);
assertThat(result.exception).isNull();
- String stderr = result.getStdoutData().stream().collect(Collectors.joining("\n"));
+ String stderr = String.join("\n", result.getStdoutData());
assertThat(stderr).contains("For input string: \"notanumber\"");
assertThat(result.exitStatus).isEqualTo(2);
}
@@ -73,19 +73,18 @@ public class ExitStatusIntegrationTests {
// }
@Test
- public void testExitStatusOnActivityOpException() {
+ void testExitStatusOnActivityOpException() {
ProcessInvoker invoker = new ProcessInvoker();
invoker.setLogDir("logs/test");
ProcessResult result = invoker.run("exitstatus_asyncstoprequest", 30,
java, "-jar", JARNAME, "--logs-dir", "logs/test/asyncstop", "run",
- "driver=diag", "cyclerate=5", "op=erroroncycle:erroroncycle=10", "cycles=2000", "-vvv"
+ "driver=diag", "cyclerate=1", "op=erroroncycle:erroroncycle=10", "cycles=2000", "-vvv"
);
assertThat(result.exception).isNull();
- String stdout = result.getStdoutData().stream().collect(Collectors.joining("\n"));
+ String stdout = String.join("\n", result.getStdoutData());
assertThat(stdout).contains("Diag was requested to stop on cycle 10");
assertThat(result.exitStatus).isEqualTo(2);
}
-
}
diff --git a/virtdata-lib-basics/src/main/java/io/nosqlbench/virtdata/library/basics/shared/from_bytebuffer/to_string/ToBase64.java b/virtdata-lib-basics/src/main/java/io/nosqlbench/virtdata/library/basics/shared/from_bytebuffer/to_string/ToBase64.java
new file mode 100644
index 000000000..95e3f9303
--- /dev/null
+++ b/virtdata-lib-basics/src/main/java/io/nosqlbench/virtdata/library/basics/shared/from_bytebuffer/to_string/ToBase64.java
@@ -0,0 +1,45 @@
+/*
+ * Copyright (c) 2022 nosqlbench
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package io.nosqlbench.virtdata.library.basics.shared.from_bytebuffer.to_string;
+
+import com.amazonaws.util.Base64;
+import io.nosqlbench.virtdata.api.annotations.Categories;
+import io.nosqlbench.virtdata.api.annotations.Category;
+import io.nosqlbench.virtdata.api.annotations.ThreadSafeMapper;
+
+import java.nio.ByteBuffer;
+import java.util.Arrays;
+import java.util.function.Function;
+
+/**
+ * Takes a bytebuffer and turns it into a base64 string
+ */
+
+@ThreadSafeMapper
+@Categories({Category.general})
+public class ToBase64 implements Function {
+
+ @Override
+ public String apply(ByteBuffer input) {
+ ByteBuffer bb = input.asReadOnlyBuffer();
+ bb.position(0);
+ byte[] b = new byte[bb.limit()];
+ bb.get(b, 0, b.length);
+ return Base64.encodeAsString(b);
+ }
+
+}