diff --git a/.all-contributorsrc b/.all-contributorsrc
new file mode 100644
index 000000000..cd5020d17
--- /dev/null
+++ b/.all-contributorsrc
@@ -0,0 +1,484 @@
+{
+ "projectOwner": "nosqlbench",
+ "projectName": "nosqlbench",
+ "repoType": "github",
+ "repoHost": "https://github.com",
+ "files": [
+ "CONTRIBUTING.md"
+ ],
+ "imageSize": 50,
+ "commit": false,
+ "commitConvention": "angular",
+ "contributorsPerLine": 6,
+ "contributorsSortAlphabetically": false,
+ "linkToUsage": false,
+ "skipCi": true,
+ "contributors": [
+ {
+ "login": "jshook",
+ "name": "Jonathan Shook",
+ "avatar_url": "https://avatars.githubusercontent.com/u/2148847?v=4",
+ "profile": "https://github.com/jshook",
+ "contributions": [
+ "review",
+ "tool",
+ "bug",
+ "business",
+ "code",
+ "content",
+ "data",
+ "doc",
+ "design",
+ "example",
+ "ideas",
+ "infra",
+ "maintenance",
+ "mentoring",
+ "platform",
+ "plugin",
+ "projectManagement",
+ "research",
+ "security",
+ "test",
+ "userTesting"
+ ]
+ },
+ {
+ "login": "MikeYaacoubStax",
+ "name": "MikeYaacoubStax",
+ "avatar_url": "https://avatars.githubusercontent.com/u/117678633?v=4",
+ "profile": "https://github.com/MikeYaacoubStax",
+ "contributions": [
+ "review",
+ "tool",
+ "bug",
+ "business",
+ "code",
+ "content",
+ "data",
+ "doc",
+ "design",
+ "example",
+ "ideas",
+ "infra",
+ "maintenance",
+ "mentoring",
+ "platform",
+ "plugin",
+ "projectManagement",
+ "research",
+ "security",
+ "test",
+ "userTesting"
+ ]
+ },
+ {
+ "login": "jeffbanks",
+ "name": "Jeff Banks",
+ "avatar_url": "https://avatars.githubusercontent.com/u/4078933?v=4",
+ "profile": "http://jjbanks.com",
+ "contributions": [
+ "code",
+ "mentoring",
+ "test",
+ "bug",
+ "business",
+ "content",
+ "data",
+ "doc",
+ "design",
+ "example",
+ "ideas",
+ "infra",
+ "maintenance",
+ "platform",
+ "plugin",
+ "projectManagement",
+ "research",
+ "review",
+ "security",
+ "tool",
+ "userTesting"
+ ]
+ },
+ {
+ "login": "msmygit",
+ "name": "Madhavan",
+ "avatar_url": "https://avatars.githubusercontent.com/u/19366623?v=4",
+ "profile": "https://github.com/msmygit",
+ "contributions": [
+ "code",
+ "bug",
+ "doc",
+ "ideas",
+ "question",
+ "research",
+ "review",
+ "tool",
+ "userTesting",
+ "talk",
+ "tutorial"
+ ]
+ },
+ {
+ "login": "hemidactylus",
+ "name": "Stefano Lottini",
+ "avatar_url": "https://avatars.githubusercontent.com/u/14221764?v=4",
+ "profile": "https://github.com/hemidactylus",
+ "contributions": [
+ "bug",
+ "blog",
+ "code",
+ "content",
+ "data",
+ "doc",
+ "example",
+ "eventOrganizing",
+ "mentoring",
+ "promotion",
+ "research",
+ "tutorial",
+ "userTesting",
+ "video"
+ ]
+ },
+ {
+ "login": "phact",
+ "name": "SebastiΓ‘n EstΓ©vez",
+ "avatar_url": "https://avatars.githubusercontent.com/u/1313220?v=4",
+ "profile": "https://github.com/phact",
+ "contributions": [
+ "bug",
+ "design",
+ "business",
+ "code",
+ "content",
+ "data",
+ "doc",
+ "ideas",
+ "promotion",
+ "research"
+ ]
+ },
+ {
+ "login": "smccarthy788",
+ "name": "Sean McCarthy",
+ "avatar_url": "https://avatars.githubusercontent.com/u/6601281?v=4",
+ "profile": "https://github.com/smccarthy788",
+ "contributions": [
+ "mentoring",
+ "ideas",
+ "code",
+ "test"
+ ]
+ },
+ {
+ "login": "yabinmeng",
+ "name": "yabinmeng",
+ "avatar_url": "https://avatars.githubusercontent.com/u/16789452?v=4",
+ "profile": "https://github.com/yabinmeng",
+ "contributions": [
+ "bug",
+ "test"
+ ]
+ },
+ {
+ "login": "eolivelli",
+ "name": "Enrico Olivelli",
+ "avatar_url": "https://avatars.githubusercontent.com/u/9469110?v=4",
+ "profile": "http://eolivelli.blogspot.it/",
+ "contributions": [
+ "test",
+ "code",
+ "review"
+ ]
+ },
+ {
+ "login": "lhotari",
+ "name": "Lari Hotari",
+ "avatar_url": "https://avatars.githubusercontent.com/u/66864?v=4",
+ "profile": "https://github.com/lhotari",
+ "contributions": [
+ "bug",
+ "code",
+ "review"
+ ]
+ },
+ {
+ "login": "mfleming",
+ "name": "Matt Fleming",
+ "avatar_url": "https://avatars.githubusercontent.com/u/94254?v=4",
+ "profile": "http://www.codeblueprint.co.uk",
+ "contributions": [
+ "bug",
+ "design"
+ ]
+ },
+ {
+ "login": "tjake",
+ "name": "Jake Luciani",
+ "avatar_url": "https://avatars.githubusercontent.com/u/44456?v=4",
+ "profile": "https://github.com/tjake",
+ "contributions": [
+ "bug",
+ "ideas"
+ ]
+ },
+ {
+ "login": "lakshmi-M18",
+ "name": "Lakshmi Manjunatha",
+ "avatar_url": "https://avatars.githubusercontent.com/u/89935678?v=4",
+ "profile": "https://github.com/lakshmi-M18",
+ "contributions": [
+ "bug"
+ ]
+ },
+ {
+ "login": "pingtimeout",
+ "name": "Pierre Laporte",
+ "avatar_url": "https://avatars.githubusercontent.com/u/1159578?v=4",
+ "profile": "http://www.pingtimeout.fr",
+ "contributions": [
+ "ideas",
+ "bug"
+ ]
+ },
+ {
+ "login": "tatu-at-datastax",
+ "name": "Tatu Saloranta",
+ "avatar_url": "https://avatars.githubusercontent.com/u/87213665?v=4",
+ "profile": "https://github.com/tatu-at-datastax",
+ "contributions": [
+ "doc"
+ ]
+ },
+ {
+ "login": "alexott",
+ "name": "Alex Ott",
+ "avatar_url": "https://avatars.githubusercontent.com/u/30342?v=4",
+ "profile": "http://alexott.net",
+ "contributions": [
+ "platform",
+ "bug",
+ "code"
+ ]
+ },
+ {
+ "login": "jeffreyscarpenter",
+ "name": "Jeffrey Carpenter",
+ "avatar_url": "https://avatars.githubusercontent.com/u/12115970?v=4",
+ "profile": "https://github.com/jeffreyscarpenter",
+ "contributions": [
+ "bug",
+ "test",
+ "maintenance"
+ ]
+ },
+ {
+ "login": "yassermohamed81",
+ "name": "yassermohamed81",
+ "avatar_url": "https://avatars.githubusercontent.com/u/53837411?v=4",
+ "profile": "https://github.com/yassermohamed81",
+ "contributions": [
+ "code"
+ ]
+ },
+ {
+ "login": "Pierrotws",
+ "name": "Pierre Sauvage",
+ "avatar_url": "https://avatars.githubusercontent.com/u/6002161?v=4",
+ "profile": "https://github.com/Pierrotws",
+ "contributions": [
+ "code"
+ ]
+ },
+ {
+ "login": "dougwettlaufer",
+ "name": "Doug Wettlaufer",
+ "avatar_url": "https://avatars.githubusercontent.com/u/45750136?v=4",
+ "profile": "https://github.com/dougwettlaufer",
+ "contributions": [
+ "test"
+ ]
+ },
+ {
+ "login": "jeromatron",
+ "name": "Jeremy Hanna",
+ "avatar_url": "https://avatars.githubusercontent.com/u/254887?v=4",
+ "profile": "http://jeromatron.blogspot.com",
+ "contributions": [
+ "test",
+ "ideas"
+ ]
+ },
+ {
+ "login": "alicel",
+ "name": "Alice Lottini",
+ "avatar_url": "https://avatars.githubusercontent.com/u/2972347?v=4",
+ "profile": "https://github.com/alicel",
+ "contributions": [
+ "bug",
+ "ideas",
+ "test"
+ ]
+ },
+ {
+ "login": "EricBorczuk",
+ "name": "Eric Borczuk",
+ "avatar_url": "https://avatars.githubusercontent.com/u/4205492?v=4",
+ "profile": "https://github.com/EricBorczuk",
+ "contributions": [
+ "code",
+ "review",
+ "test"
+ ]
+ },
+ {
+ "login": "weideng1",
+ "name": "weideng1",
+ "avatar_url": "https://avatars.githubusercontent.com/u/5520525?v=4",
+ "profile": "https://github.com/weideng1",
+ "contributions": [
+ "test",
+ "ideas",
+ "code"
+ ]
+ },
+ {
+ "login": "ivansenic",
+ "name": "Ivan Senic",
+ "avatar_url": "https://avatars.githubusercontent.com/u/10600041?v=4",
+ "profile": "https://github.com/ivansenic",
+ "contributions": [
+ "test"
+ ]
+ },
+ {
+ "login": "justinchuch",
+ "name": "Justin Chu",
+ "avatar_url": "https://avatars.githubusercontent.com/u/15710241?v=4",
+ "profile": "https://justinchuch.wordpress.com/",
+ "contributions": [
+ "code",
+ "test",
+ "review"
+ ]
+ },
+ {
+ "login": "ds-steven-matison",
+ "name": "Steven Matison",
+ "avatar_url": "https://avatars.githubusercontent.com/u/70520951?v=4",
+ "profile": "https://ds-steven-matison.github.io/",
+ "contributions": [
+ "test",
+ "ideas"
+ ]
+ },
+ {
+ "login": "szimmer1",
+ "name": "shahar z",
+ "avatar_url": "https://avatars.githubusercontent.com/u/8455475?v=4",
+ "profile": "https://github.com/szimmer1",
+ "contributions": [
+ "ideas",
+ "code"
+ ]
+ },
+ {
+ "login": "ncarvind",
+ "name": "ncarvind",
+ "avatar_url": "https://avatars.githubusercontent.com/u/70302571?v=4",
+ "profile": "https://github.com/ncarvind",
+ "contributions": [
+ "code",
+ "test"
+ ]
+ },
+ {
+ "login": "MMirelli",
+ "name": "Massimiliano Mirelli",
+ "avatar_url": "https://avatars.githubusercontent.com/u/22191891?v=4",
+ "profile": "https://github.com/MMirelli",
+ "contributions": [
+ "platform"
+ ]
+ },
+ {
+ "login": "derrickCos",
+ "name": "Derrick Cosmas",
+ "avatar_url": "https://avatars.githubusercontent.com/u/25781387?v=4",
+ "profile": "https://github.com/derrickCos",
+ "contributions": [
+ "code",
+ "ideas"
+ ]
+ },
+ {
+ "login": "grighetto",
+ "name": "Gianluca Righetto",
+ "avatar_url": "https://avatars.githubusercontent.com/u/413792?v=4",
+ "profile": "https://github.com/grighetto",
+ "contributions": [
+ "platform"
+ ]
+ },
+ {
+ "login": "BrynCooke",
+ "name": "Bryn Cooke",
+ "avatar_url": "https://avatars.githubusercontent.com/u/747836?v=4",
+ "profile": "https://github.com/BrynCooke",
+ "contributions": [
+ "doc"
+ ]
+ },
+ {
+ "login": "KatSarah",
+ "name": "KatSarah",
+ "avatar_url": "https://avatars.githubusercontent.com/u/658015?v=4",
+ "profile": "https://github.com/KatSarah",
+ "contributions": [
+ "ideas"
+ ]
+ },
+ {
+ "login": "peytoncasper",
+ "name": "Peyton Casper",
+ "avatar_url": "https://avatars.githubusercontent.com/u/8305883?v=4",
+ "profile": "https://github.com/peytoncasper",
+ "contributions": [
+ "ideas",
+ "code"
+ ]
+ },
+ {
+ "login": "landim",
+ "name": "Arthur Costa",
+ "avatar_url": "https://avatars.githubusercontent.com/u/91446?v=4",
+ "profile": "https://github.com/landim",
+ "contributions": [
+ "bug"
+ ]
+ },
+ {
+ "login": "guyboltonking",
+ "name": "Guy Bolton King",
+ "avatar_url": "https://avatars.githubusercontent.com/u/98294?v=4",
+ "profile": "https://github.com/guyboltonking",
+ "contributions": [
+ "bug",
+ "code",
+ "ideas"
+ ]
+ },
+ {
+ "login": "XN137",
+ "name": "Christopher Lambert",
+ "avatar_url": "https://avatars.githubusercontent.com/u/1204398?v=4",
+ "profile": "https://github.com/XN137",
+ "contributions": [
+ "code",
+ "ideas"
+ ]
+ }
+ ]
+}
diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml
index ed002fbf7..20f1f52b1 100644
--- a/.github/workflows/build.yml
+++ b/.github/workflows/build.yml
@@ -79,6 +79,7 @@ jobs:
builddocs:
needs: build
runs-on: ubuntu-20.04
+ if: ${{ github.repository == 'nosqlbench/nosqlbench' && github.event_name == 'push' && github.ref_name == 'main' }}
steps:
- name: set git username
diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md
index 208e1da06..311e5fcdf 100644
--- a/CONTRIBUTING.md
+++ b/CONTRIBUTING.md
@@ -1,3 +1,5 @@
+[](#contributors)
+
NoSQLBench is an ambitious project. It aims to solve long-standing problems in distributed systems
testing. There are *many* ways you can contribute! Please take a moment to review this document
in order to make the contribution process easy and effective for everyone involved.
@@ -117,5 +119,73 @@ are eager to get it into the hands of users who need it.
[discord server](https://discord.gg/dBHRakusMN) and raise your hand!
+## Contributors
+Thanks to these contributors! :sparkle:
+For recognizing contributions, please follow [this documentation](https://allcontributors.org/docs/en/bot/usage) and pick a key/contribution type from [here](https://allcontributors.org/docs/en/emoji-key).
+
+
+
+
+
+
+
+
+
+---
diff --git a/README.md b/README.md
index f3e9578b7..dc79a6c55 100644
--- a/README.md
+++ b/README.md
@@ -1,5 +1,7 @@
[comment]: <  >
-
+[](https://maven-badges.herokuapp.com/maven-central/io.nosqlbench/nosqlbench)
+[](https://github.com/nosqlbench/nosqlbench/stargazers)
+[](https://discord.gg/dBHRakusMN)
# NoSQLBench v5
@@ -104,3 +106,9 @@ available, but more work is needed to support them fully. Here is what is suppor
+
+
+## Contributors
+Checkout all our wonderful contributors [here](./CONTRIBUTING.md#contributors).
+
+---
\ No newline at end of file
diff --git a/adapter-cqld4/src/main/java/io/nosqlbench/cqlgen/core/CGWorkloadExporter.java b/adapter-cqld4/src/main/java/io/nosqlbench/cqlgen/core/CGWorkloadExporter.java
index d9415a00b..0794067b9 100644
--- a/adapter-cqld4/src/main/java/io/nosqlbench/cqlgen/core/CGWorkloadExporter.java
+++ b/adapter-cqld4/src/main/java/io/nosqlbench/cqlgen/core/CGWorkloadExporter.java
@@ -320,9 +320,9 @@ public class CGWorkloadExporter implements BundledApp {
put("default",
new LinkedHashMap<>() {{
- put("schema", "run driver=cql tags=block:'schema-*.*' threads===UNDEF cycles===UNDEF");
+ put("schema", "run driver=cql tags=block:\"schema.*\" threads===UNDEF cycles===UNDEF");
put("rampup", "run driver=cql tags=block:rampup threads=auto cycles===TEMPLATE(rampup-cycles,10000)");
- put("main", "run driver=cql tags=block:'main-*.*' threads=auto cycles===TEMPLATE(main-cycles,10000)");
+ put("main", "run driver=cql tags=block:\"main.*\" threads=auto cycles===TEMPLATE(main-cycles,10000)");
}});
put("main-insert", "run driver=cql tags=block:main-insert threads=auto cycles===TEMPLATE(main-cycles,10000)");
diff --git a/adapter-cqld4/src/main/resources/activities/baselines/cql-iot-dse.yaml b/adapter-cqld4/src/main/resources/activities/baselines/cql-iot-dse.yaml
index 1bc40fad9..ae83d127f 100644
--- a/adapter-cqld4/src/main/resources/activities/baselines/cql-iot-dse.yaml
+++ b/adapter-cqld4/src/main/resources/activities/baselines/cql-iot-dse.yaml
@@ -2,8 +2,8 @@ description: An IOT workload with more optimal settings for DSE
scenarios:
default:
schema: run driver=cql tags==block:schema threads==1 cycles==UNDEF
- rampup: run driver=cql tags==block:rampup cycles===TEMPLATE(rampup-cycles,10000000) threads=auto
- main: run driver=cql tags==block:"main-*.*" cycles===TEMPLATE(main-cycles,10000000) threads=auto
+ rampup: run driver=cql tags==block:rampup cycles===TEMPLATE(rampup-cycles,100) threads=auto
+ main: run driver=cql tags==block:"main.*" cycles===TEMPLATE(main-cycles,100) threads=auto
bindings:
machine_id: Mod(<>); ToHashedUUID() -> java.util.UUID
diff --git a/adapter-cqld4/src/main/resources/activities/baselines/cql-iot.yaml b/adapter-cqld4/src/main/resources/activities/baselines/cql-iot.yaml
index 6250cc8ce..3daaf9eda 100644
--- a/adapter-cqld4/src/main/resources/activities/baselines/cql-iot.yaml
+++ b/adapter-cqld4/src/main/resources/activities/baselines/cql-iot.yaml
@@ -7,11 +7,11 @@ scenarios:
default:
schema: run driver=cql tags==block:schema threads==1 cycles==UNDEF
rampup: run driver=cql tags==block:rampup cycles===TEMPLATE(rampup-cycles,10000000) threads=auto
- main: run driver=cql tags==block:"main-*.*" cycles===TEMPLATE(main-cycles,10000000) threads=auto
+ main: run driver=cql tags==block:"main.*" cycles===TEMPLATE(main-cycles,10000000) threads=auto
astra:
schema: run driver=cql tags==block:schema-astra threads==1 cycles==UNDEF
rampup: run driver=cql tags==block:rampup cycles===TEMPLATE(rampup-cycles,10000000) threads=auto
- main: run driver=cql tags==block:"main-*.*" cycles===TEMPLATE(main-cycles,10000000) threads=auto
+ main: run driver=cql tags==block:"main.*" cycles===TEMPLATE(main-cycles,10000000) threads=auto
params:
instrument: TEMPLATE(instrument,false)
diff --git a/adapter-cqld4/src/main/resources/activities/baselines/cql-keyvalue-astra.yaml b/adapter-cqld4/src/main/resources/activities/baselines/cql-keyvalue-astra.yaml
index 7ef9285d3..17bcc06a4 100644
--- a/adapter-cqld4/src/main/resources/activities/baselines/cql-keyvalue-astra.yaml
+++ b/adapter-cqld4/src/main/resources/activities/baselines/cql-keyvalue-astra.yaml
@@ -5,11 +5,11 @@ scenarios:
default:
schema: run driver=cql tags==block:schema threads==1 cycles==UNDEF
rampup: run driver=cql tags==block:rampup cycles===TEMPLATE(rampup-cycles,10000000) threads=auto
- main: run driver=cql tags==block:'main-*.*' cycles===TEMPLATE(main-cycles,10000000) threads=auto
+ main: run driver=cql tags==block:"main.*" cycles===TEMPLATE(main-cycles,10000000) threads=auto
astra:
schema: run driver=cql tags==block:schema-astra threads==1 cycles==UNDEF
rampup: run driver=cql tags==block:rampup cycles===TEMPLATE(rampup-cycles,10000000) threads=auto
- main: run driver=cql tags==block:'main-*.*' cycles===TEMPLATE(main-cycles,10000000) threads=auto
+ main: run driver=cql tags==block:"main.*" cycles===TEMPLATE(main-cycles,10000000) threads=auto
bindings:
seq_key: Mod(<>); ToString() -> String
diff --git a/adapter-cqld4/src/main/resources/activities/baselines/cql-starter.yaml b/adapter-cqld4/src/main/resources/activities/baselines/cql-starter.yaml
new file mode 100644
index 000000000..120fb6753
--- /dev/null
+++ b/adapter-cqld4/src/main/resources/activities/baselines/cql-starter.yaml
@@ -0,0 +1,79 @@
+description: |
+ A cql-starter workload primarily for:
+ * Cassandra: 3.x, 4.x.
+ * DataStax Enterprise: 6.8.x.
+ * DataStax Astra.
+
+scenarios:
+ default:
+ schema: run driver=cql tags==block:schema threads==1 cycles==UNDEF
+ rampup: run driver=cql tags==block:rampup cycles===TEMPLATE(rampup-cycles,10) threads=auto
+ main: run driver=cql tags==block:"main.*" cycles===TEMPLATE(main-cycles,10) threads=auto
+ astra:
+ schema: run driver=cql tags==block:schema-astra threads==1 cycles==UNDEF
+ rampup: run driver=cql tags==block:rampup cycles===TEMPLATE(rampup-cycles,10) threads=auto
+ main: run driver=cql tags==block:"main.*" cycles===TEMPLATE(main-cycles,10) threads=auto
+
+params:
+ x: y
+
+bindings:
+ machine_id: Mod(<>); ToHashedUUID() -> java.util.UUID
+ message: ToString(); TextOfFile('data/cql-starter-message.txt')
+ time: Mul(100L); Div(10000L); ToJavaInstant()
+ timestamp: Mul(<>L); Div(<>L); Mul(1000L);
+
+blocks:
+ schema:
+ params:
+ prepared: false
+ ops:
+ create-keyspace: |
+ create keyspace if not exists <>
+ WITH replication = {'class': 'SimpleStrategy', 'replication_factor': '<>'}
+ AND durable_writes = true;
+ create-table: |
+ create table if not exists <>.<> (
+ machine_id UUID,
+ message text,
+ time timestamp,
+ PRIMARY KEY ((machine_id), time)
+ ) WITH CLUSTERING ORDER BY (time DESC);
+# truncate-table: |
+# truncate table <>.<>;
+ schema-astra:
+ params:
+ prepared: false
+ ops:
+ create-table-astra: |
+ create table if not exists <>.<> (
+ machine_id UUID,
+ message text,
+ time timestamp,
+ PRIMARY KEY ((machine_id), time)
+ ) WITH CLUSTERING ORDER BY (time DESC);
+ rampup:
+ params:
+ cl: <>
+ idempotent: true
+ ops:
+ insert-rampup: |
+ insert into <>.<> (machine_id, message, time)
+ values ({machine_id}, {message}, {time}) using timestamp {timestamp};
+ main-read:
+ params:
+ ratio: <>
+ cl: <>
+ ops:
+ select-read: |
+ select * from <>.<>
+ where machine_id={machine_id};
+ main-write:
+ params:
+ ratio: <>
+ cl: <>
+ idempotent: true
+ ops:
+ insert-main: |
+ insert into <>.<>
+ (machine_id, message, time) values ({machine_id}, {message}, {time}) using timestamp {timestamp};
\ No newline at end of file
diff --git a/adapter-cqld4/src/main/resources/activities/baselines/incremental.yaml b/adapter-cqld4/src/main/resources/activities/baselines/incremental.yaml
index 9606236b6..5220be278 100644
--- a/adapter-cqld4/src/main/resources/activities/baselines/incremental.yaml
+++ b/adapter-cqld4/src/main/resources/activities/baselines/incremental.yaml
@@ -29,11 +29,11 @@ scenarios:
default:
schema: run tags=block:schema.* threads==1
main: run tags=block:main-.*.* cycles===TEMPLATE(main-cycles,0) threads=auto
- default-schema: run tags=block:'schema.*' threads==1
- default-main: run tags=block:'main.*' cycles===TEMPLATE(main-cycles,0) threads=auto
+ default-schema: run tags=block:"schema.*" threads==1
+ default-main: run tags=block:"main.*" cycles===TEMPLATE(main-cycles,0) threads=auto
astra:
schema: run tags=block:astra-schema threads==1
- main: run tags=block:'main.*' cycles===TEMPLATE(main-cycles,0) threads=auto
+ main: run tags=block:"main.*" cycles===TEMPLATE(main-cycles,0) threads=auto
params:
instrument: true
diff --git a/adapter-cqld4/src/main/resources/activities/baselinesv2/cql-tabular2.yaml b/adapter-cqld4/src/main/resources/activities/baselinesv2/cql-tabular2.yaml
index 67d3947f9..150665129 100644
--- a/adapter-cqld4/src/main/resources/activities/baselinesv2/cql-tabular2.yaml
+++ b/adapter-cqld4/src/main/resources/activities/baselinesv2/cql-tabular2.yaml
@@ -28,12 +28,12 @@ description: |
scenarios:
default:
schema: run driver=cql tags==block:schema threads==1 cycles==UNDEF
- rampup: run driver=cql tags==block:rampup cycles===TEMPLATE(rampup-cycles,10B) threads=auto
- main: run driver=cql tags==block:"main-*.*" cycles===TEMPLATE(main-cycles,100M) threads=auto
+ rampup: run driver=cql tags==block:rampup cycles===TEMPLATE(rampup-cycles,100) threads=auto
+ main: run driver=cql tags==block:"main.*" cycles===TEMPLATE(main-cycles,100) threads=auto
astra:
schema: run driver=cql tags==block:schema-astra threads==1 cycles==UNDEF
- rampup: run driver=cql tags==block:rampup cycles===TEMPLATE(rampup-cycles,10000000) threads=auto
- main: run driver=cql tags==block:"main-*.*" cycles===TEMPLATE(main-cycles,10000000) threads=auto
+ rampup: run driver=cql tags==block:rampup cycles===TEMPLATE(rampup-cycles,100) threads=auto
+ main: run driver=cql tags==block:"main.*" cycles===TEMPLATE(main-cycles,100) threads=auto
params:
instrument: true
@@ -100,13 +100,13 @@ blocks:
rampup-insert: |
insert into TEMPLATE(keyspace,baselines).TEMPLATE(table,tabular)
(part,clust,data0,data1,data2,data3,data4,data5,data6,data7)
- values ({part_layout},{clust_layout},{data0},{data1},{data2},{data3},{data4},{data5},{data6},{data7})
+ values ({part_layout},{clust_layout},{data0},{data1},{data2},{data3},{data4},{data5},{data6},{data7});
verify:
params:
cl: TEMPLATE(read_cl,LOCAL_QUORUM)
ops:
verify-select: |
- select * from TEMPLATE(keyspace,baselines).TEMPLATE(table,tabular) where part={part_layout} and clust={clust_layout}
+ select * from TEMPLATE(keyspace,baselines).TEMPLATE(table,tabular) where part={part_layout} and clust={clust_layout};
main-read:
params:
ratio: 1
@@ -136,4 +136,4 @@ blocks:
main-write: |
insert into TEMPLATE(keyspace,baselines).TEMPLATE(table,tabular)
(part, clust, data0,data1,data2,data3,data4,data5,data6,data7)
- values ({part_write},{clust_write},{data0},{data1},{data2},{data3},{data4},{data5},{data6},{data7})
+ values ({part_write},{clust_write},{data0},{data1},{data2},{data3},{data4},{data5},{data6},{data7})
\ No newline at end of file
diff --git a/adapter-cqld4/src/main/resources/activities/baselinesv2/cql-timeseries2.yaml b/adapter-cqld4/src/main/resources/activities/baselinesv2/cql-timeseries2.yaml
index 60cac37b0..afec3d713 100644
--- a/adapter-cqld4/src/main/resources/activities/baselinesv2/cql-timeseries2.yaml
+++ b/adapter-cqld4/src/main/resources/activities/baselinesv2/cql-timeseries2.yaml
@@ -7,11 +7,11 @@ scenarios:
default:
schema: run driver=cql tags==block:schema threads==1 cycles==UNDEF
rampup: run driver=cql tags==block:rampup cycles===TEMPLATE(rampup-cycles,10000000) threads=auto
- main: run driver=cql tags==block:"main-*.*" cycles===TEMPLATE(main-cycles,10000000) threads=auto
+ main: run driver=cql tags==block:"main.*" cycles===TEMPLATE(main-cycles,10000000) threads=auto
astra:
schema: run driver=cql tags==block:schema-astra threads==1 cycles==UNDEF
rampup: run driver=cql tags==block:rampup cycles===TEMPLATE(rampup-cycles,10000000) threads=auto
- main: run driver=cql tags==block:"main-*.*" cycles===TEMPLATE(main-cycles,10000000) threads=auto
+ main: run driver=cql tags==block:"main.*" cycles===TEMPLATE(main-cycles,10000000) threads=auto
params:
instrument: TEMPLATE(instrument,false)
diff --git a/adapter-cqld4/src/main/resources/cqlgen/cqlgen.conf b/adapter-cqld4/src/main/resources/cqlgen/cqlgen.conf
index 7c12bcaea..0aab9c35c 100644
--- a/adapter-cqld4/src/main/resources/cqlgen/cqlgen.conf
+++ b/adapter-cqld4/src/main/resources/cqlgen/cqlgen.conf
@@ -126,7 +126,7 @@ timeouts:
blockplan:
- # not needed when tags=block:'schema.*'
+ # not needed when tags=block:"schema.*"
# schema: schema-keyspaces, schema-tables, schema-types
schema-keyspaces: schema-keyspaces
schema-tables: schema-tables
diff --git a/adapter-cqld4/src/main/resources/curate_docs/cql_olddocs.md b/adapter-cqld4/src/main/resources/curate_docs/cql_olddocs.md
index 5f3413716..4a655b948 100644
--- a/adapter-cqld4/src/main/resources/curate_docs/cql_olddocs.md
+++ b/adapter-cqld4/src/main/resources/curate_docs/cql_olddocs.md
@@ -160,7 +160,7 @@ activity types.
- **ssl** - specifies the type of the SSL implementation.
Disabled by default, possible values are `jdk` and `openssl`.
- [Additional parameters may need to be provided](ssl.md).
+ See the ssl help topic for more details with `nb5 help ssl` for more details.
- **jmxreporting** - enable JMX reporting if needed.
Examples:
diff --git a/adapter-cqld4/src/main/resources/data/cql-starter-message.txt b/adapter-cqld4/src/main/resources/data/cql-starter-message.txt
new file mode 100644
index 000000000..2d56aa368
--- /dev/null
+++ b/adapter-cqld4/src/main/resources/data/cql-starter-message.txt
@@ -0,0 +1 @@
+Welcome to cql-starter!
\ No newline at end of file
diff --git a/adapter-cqld4/src/main/resources/curate_docs/ssl.md b/adapter-cqld4/src/main/resources/ssl.md
similarity index 100%
rename from adapter-cqld4/src/main/resources/curate_docs/ssl.md
rename to adapter-cqld4/src/main/resources/ssl.md
diff --git a/adapter-cqld4/src/test/resources/testschemas/cql_alltypes.yaml b/adapter-cqld4/src/test/resources/testschemas/cql_alltypes.yaml
index d952f3c03..dd7c96517 100644
--- a/adapter-cqld4/src/test/resources/testschemas/cql_alltypes.yaml
+++ b/adapter-cqld4/src/test/resources/testschemas/cql_alltypes.yaml
@@ -1,9 +1,9 @@
description: Auto-generated workload from source schema.
scenarios:
default:
- schema: run driver=cql tags=block:'schema.*' threads===UNDEF cycles===UNDEF
- rampup: run driver=cql tags=block:'rampup.*' threads=auto cycles===TEMPLATE(rampup-cycles,10000)
- main: run driver=cql tags=block:'main.*' threads=auto cycles===TEMPLATE(main-cycles,10000)
+ schema: run driver=cql tags=block:"schema.*" threads===UNDEF cycles===UNDEF
+ rampup: run driver=cql tags=block:"rampup.*" threads=auto cycles===TEMPLATE(rampup-cycles,10000)
+ main: run driver=cql tags=block:"main.*" threads=auto cycles===TEMPLATE(main-cycles,10000)
main-insert: run driver=cql tags=block:main-insert threads=auto cycles===TEMPLATE(main-cycles,10000)
main-select: run driver=cql tags=block:main-select threads=auto cycles===TEMPLATE(main-cycles,10000)
main-scan: run driver=cql tags=block:main-scan threads=auto cycles===TEMPLATE(main-cycles,10000)
diff --git a/adapter-dynamodb/src/main/resources/activities/baselinesv2/dynamodb-keyvalue2.yaml b/adapter-dynamodb/src/main/resources/activities/baselinesv2/dynamodb-keyvalue2.yaml
index af52ac337..6509963cd 100644
--- a/adapter-dynamodb/src/main/resources/activities/baselinesv2/dynamodb-keyvalue2.yaml
+++ b/adapter-dynamodb/src/main/resources/activities/baselinesv2/dynamodb-keyvalue2.yaml
@@ -6,9 +6,9 @@ description: |
scenarios:
default:
- schema: run driver=dynamodb tags==block:'schema.*' threads==1 cycles==UNDEF
+ schema: run driver=dynamodb tags==block:"schema.*" threads==1 cycles==UNDEF
rampup: run driver=dynamodb tags==block:rampup cycles===TEMPLATE(rampup-cycles,10000000) threads=auto
- main: run driver=dynamodb tags==block:'main-*.*' cycles===TEMPLATE(main-cycles,10000000) threads=auto
+ main: run driver=dynamodb tags==block:"main.*" cycles===TEMPLATE(main-cycles,10000000) threads=auto
read: run driver=dynamodb tags==block:main-read cycles===TEMPLATE(main-cycles,10000000) threads=auto
write: run driver=dynamodb tags==block:main-write cycles===TEMPLATE(main-cycles,10000000) threads=auto
diff --git a/adapter-dynamodb/src/main/resources/activities/baselinesv2/dynamodb-tabular2.yaml b/adapter-dynamodb/src/main/resources/activities/baselinesv2/dynamodb-tabular2.yaml
index ac531efe8..acc0acd51 100644
--- a/adapter-dynamodb/src/main/resources/activities/baselinesv2/dynamodb-tabular2.yaml
+++ b/adapter-dynamodb/src/main/resources/activities/baselinesv2/dynamodb-tabular2.yaml
@@ -4,10 +4,10 @@ description: |
Run a read/write workload against DynamoDB with varying field sizes and query patterns
scenarios:
- schema: run driver=dynamodb tags=block:'schema.*' region=us-east-1
+ schema: run driver=dynamodb tags=block:"schema.*" region=us-east-1
rampup: run driver=dynamodb tags=block:rampup region=us-east-1
read: run driver=dynamodb tags=block:read region=us-east-1
- main: run driver=dynamodb tags=block:'main-*.*' region=us-east-1
+ main: run driver=dynamodb tags=block:"main.*" region=us-east-1
read01: run driver=dynamodb tags='name:.*main-read-01' region=us-east-1
delete:
table: run driver=dynamodb tags==block:delete threads==1 cycles==UNDEF
diff --git a/adapter-dynamodb/src/main/resources/activities/baselinesv2/dynamodb-timeseries2.yaml b/adapter-dynamodb/src/main/resources/activities/baselinesv2/dynamodb-timeseries2.yaml
index 34a76afb6..2c53654b8 100644
--- a/adapter-dynamodb/src/main/resources/activities/baselinesv2/dynamodb-timeseries2.yaml
+++ b/adapter-dynamodb/src/main/resources/activities/baselinesv2/dynamodb-timeseries2.yaml
@@ -11,7 +11,7 @@ description: |
scenarios:
default:
- schema: run driver=dynamodb tags==block:'schema.*' threads==1 cycles==UNDEF
+ schema: run driver=dynamodb tags==block:"schema.*" threads==1 cycles==UNDEF
rampup: run driver=dynamodb tags==block:rampup cycles===TEMPLATE(rampup-cycles,10000000) threads=auto
main: run driver=dynamodb tags==block:main cycles===TEMPLATE(main-cycles,10000000) threads=auto
delete:
diff --git a/adapter-http/src/main/resources/activities/baselines/http-rest-keyvalue.yaml b/adapter-http/src/main/resources/activities/baselines/http-rest-keyvalue.yaml
index d186f6e99..80a5d127c 100644
--- a/adapter-http/src/main/resources/activities/baselines/http-rest-keyvalue.yaml
+++ b/adapter-http/src/main/resources/activities/baselines/http-rest-keyvalue.yaml
@@ -9,8 +9,8 @@ description: |
scenarios:
default:
schema: run driver=http tags==block:schema threads==1 cycles==UNDEF
- rampup: run driver=http tags==block:"rampup-*.*" cycles===TEMPLATE(rampup-cycles,10000000) threads=auto
- main: run driver=http tags==block:"main-*.*" cycles===TEMPLATE(main-cycles,10000000) threads=auto
+ rampup: run driver=http tags==block:"rampup.*" cycles===TEMPLATE(rampup-cycles,10000000) threads=auto
+ main: run driver=http tags==block:"main.*" cycles===TEMPLATE(main-cycles,10000000) threads=auto
bindings:
# To enable an optional weighted set of hosts in place of a load balancer
diff --git a/adapter-http/src/main/resources/activities/baselines/http-rest-tabular.yaml b/adapter-http/src/main/resources/activities/baselines/http-rest-tabular.yaml
index 8c3534ff0..2a9f278b2 100644
--- a/adapter-http/src/main/resources/activities/baselines/http-rest-tabular.yaml
+++ b/adapter-http/src/main/resources/activities/baselines/http-rest-tabular.yaml
@@ -9,9 +9,9 @@ description: |
scenarios:
default:
- schema: run driver=http tags==block:'schema.*' threads==1 cycles==UNDEF
- rampup: run driver=http tags==block:'rampup-*.*' cycles===TEMPLATE(rampup-cycles,10000000) threads=auto
- main: run driver=http tags==block:'main-*.*' cycles===TEMPLATE(main-cycles,10000000) threads=auto
+ schema: run driver=http tags==block:"schema.*" threads==1 cycles==UNDEF
+ rampup: run driver=http tags==block:"rampup.*" cycles===TEMPLATE(rampup-cycles,10000000) threads=auto
+ main: run driver=http tags==block:"main.*" cycles===TEMPLATE(main-cycles,10000000) threads=auto
bindings:
# To enable an optional weighted set of hosts in place of a load balancer
diff --git a/adapter-http/src/main/resources/activities/baselines/http-rest-timeseries.yaml b/adapter-http/src/main/resources/activities/baselines/http-rest-timeseries.yaml
index cf77c6cb4..be6fedc24 100644
--- a/adapter-http/src/main/resources/activities/baselines/http-rest-timeseries.yaml
+++ b/adapter-http/src/main/resources/activities/baselines/http-rest-timeseries.yaml
@@ -12,10 +12,10 @@ description: |
scenarios:
default:
- schema: run driver=cql tags==block:'schema-*.*' threads==1 cycles==UNDEF
+ schema: run driver=cql tags==block:"schema.*" threads==1 cycles==UNDEF
schema-astra: run driver=cql tags==block:schema-astra threads==1 cycles==UNDEF
- rampup: run driver=http tags==block:'rampup-*.*' cycles===TEMPLATE(rampup-cycles,10000000) threads=auto
- main: run driver=http tags==block:'main-*.*' cycles===TEMPLATE(main-cycles,10000000) threads=auto
+ rampup: run driver=http tags==block:"rampup.*" cycles===TEMPLATE(rampup-cycles,10000000) threads=auto
+ main: run driver=http tags==block:"main.*" cycles===TEMPLATE(main-cycles,10000000) threads=auto
bindings:
# To enable an optional weighted set of hosts in place of a load balancer
diff --git a/adapter-http/src/main/resources/activities/documents-api/http-docsapi-crud-basic.yaml b/adapter-http/src/main/resources/activities/documents-api/http-docsapi-crud-basic.yaml
index bf1cf8417..b644da7d2 100644
--- a/adapter-http/src/main/resources/activities/documents-api/http-docsapi-crud-basic.yaml
+++ b/adapter-http/src/main/resources/activities/documents-api/http-docsapi-crud-basic.yaml
@@ -7,7 +7,7 @@ description: |
scenarios:
default:
- schema: run driver=http tags==block:'schema.*' threads==1 cycles==UNDEF
+ schema: run driver=http tags==block:"schema.*" threads==1 cycles==UNDEF
write: run driver=http tags==block:'write.*' cycles===TEMPLATE(write-cycles,TEMPLATE(docscount,10000000)) threads=auto errors=timer,warn
read: run driver=http tags==block:'read.*' cycles===TEMPLATE(read-cycles,TEMPLATE(docscount,10000000)) threads=auto errors=timer,warn
update: run driver=http tags==block:'update.*' cycles===TEMPLATE(update-cycles,TEMPLATE(docscount,10000000)) threads=auto errors=timer,warn
diff --git a/adapter-http/src/main/resources/activities/documents-api/http-docsapi-crud-dataset.yaml b/adapter-http/src/main/resources/activities/documents-api/http-docsapi-crud-dataset.yaml
index e67d61787..5a93c57ef 100644
--- a/adapter-http/src/main/resources/activities/documents-api/http-docsapi-crud-dataset.yaml
+++ b/adapter-http/src/main/resources/activities/documents-api/http-docsapi-crud-dataset.yaml
@@ -7,7 +7,7 @@ description: |
scenarios:
default:
- schema: run driver=http tags==block:'schema.*' threads==1 cycles==UNDEF
+ schema: run driver=http tags==block:"schema.*" threads==1 cycles==UNDEF
write: run driver=http tags==name:'write.*' cycles===TEMPLATE(write-cycles,TEMPLATE(docscount,10000000)) threads=auto errors=timer,warn
read: run driver=http tags==name:'read.*' cycles===TEMPLATE(read-cycles,TEMPLATE(docscount,10000000)) threads=auto errors=timer,warn
update: run driver=http tags==name:'update.*' cycles===TEMPLATE(update-cycles,TEMPLATE(docscount,10000000)) threads=auto errors=timer,warn
diff --git a/adapter-http/src/main/resources/activities/documents-api/http-docsapi-keyvalue.yaml b/adapter-http/src/main/resources/activities/documents-api/http-docsapi-keyvalue.yaml
index 4ed237879..cd1df59e5 100644
--- a/adapter-http/src/main/resources/activities/documents-api/http-docsapi-keyvalue.yaml
+++ b/adapter-http/src/main/resources/activities/documents-api/http-docsapi-keyvalue.yaml
@@ -10,7 +10,7 @@ description: |
scenarios:
default:
- schema: run driver=http tags==block:'schema.*' threads==1 cycles==UNDEF
+ schema: run driver=http tags==block:"schema.*" threads==1 cycles==UNDEF
rampup: run driver=http tags==block:rampup cycles===TEMPLATE(rampup-cycles,10000000) threads=auto
main: run driver=http tags==block:main cycles===TEMPLATE(main-cycles,10000000) threads=auto
diff --git a/adapter-http/src/main/resources/activities/documents-api/http-docsapi-search-advanced.yaml b/adapter-http/src/main/resources/activities/documents-api/http-docsapi-search-advanced.yaml
index edf6229f3..49cb31a4e 100644
--- a/adapter-http/src/main/resources/activities/documents-api/http-docsapi-search-advanced.yaml
+++ b/adapter-http/src/main/resources/activities/documents-api/http-docsapi-search-advanced.yaml
@@ -15,7 +15,7 @@ description: |
# complex2: (match1 LTE 0 OR match2 EQ "false") AND (match2 EQ "false" OR match3 EQ true)
# complex3: (match1 LTE 0 AND match2 EQ "true") OR (match2 EQ "false" AND match3 EQ true)
scenarios:
- schema: run driver=http tags==block:'schema.*' threads==<> cycles==UNDEF
+ schema: run driver=http tags==block:"schema.*" threads==<> cycles==UNDEF
rampup:
write: run driver=http tags==name:'rampup-put.*' cycles===TEMPLATE(docscount,10000000) docpadding=TEMPLATE(docpadding,0) match-ratio=TEMPLATE(match-ratio,0.01) threads=<> errors=timer,warn
read: run driver=http tags==block:'rampup-get.*' cycles===TEMPLATE(rampup-cycles, 10000000) page-size=TEMPLATE(page-size,3) fields=TEMPLATE(fields,%5b%5d) threads=<> errors=timer,warn
diff --git a/adapter-http/src/main/resources/activities/documents-api/http-docsapi-search-basic.yaml b/adapter-http/src/main/resources/activities/documents-api/http-docsapi-search-basic.yaml
index 8105dce04..1a6250677 100644
--- a/adapter-http/src/main/resources/activities/documents-api/http-docsapi-search-basic.yaml
+++ b/adapter-http/src/main/resources/activities/documents-api/http-docsapi-search-basic.yaml
@@ -7,7 +7,7 @@ description: |
Note that docsapi_port should reflect the port where the Docs API is exposed (defaults to 8180).
scenarios:
- schema: run driver=http tags==block:'schema.*' threads==<> cycles==UNDEF
+ schema: run driver=http tags==block:"schema.*" threads==<> cycles==UNDEF
rampup:
write: run driver=http tags==name:'rampup-put.*' cycles===TEMPLATE(docscount,10000000) docpadding=TEMPLATE(docpadding,0) match-ratio=TEMPLATE(match-ratio,0.01) threads=<> errors=timer,warn
read: run driver=http tags==name:'rampup-get.*' cycles===TEMPLATE(rampup-cycles, 10000000) page-size=TEMPLATE(page-size,3) fields=TEMPLATE(fields,%5b%5d) threads=<> errors=timer,warn
diff --git a/adapter-http/src/main/resources/activities/graphql-cql-first/http-graphql-cql-keyvalue.yaml b/adapter-http/src/main/resources/activities/graphql-cql-first/http-graphql-cql-keyvalue.yaml
index 794120747..b4363eaaf 100644
--- a/adapter-http/src/main/resources/activities/graphql-cql-first/http-graphql-cql-keyvalue.yaml
+++ b/adapter-http/src/main/resources/activities/graphql-cql-first/http-graphql-cql-keyvalue.yaml
@@ -9,7 +9,7 @@ description: |
scenarios:
default:
- schema: run driver=http tags==block:'schema.*' threads==1 cycles==UNDEF
+ schema: run driver=http tags==block:"schema.*" threads==1 cycles==UNDEF
rampup: run driver=http tags==block:rampup cycles===TEMPLATE(rampup-cycles,10000000) threads=auto
main: run driver=http tags==block:"main.*" cycles===TEMPLATE(main-cycles,10000000) threads=auto
diff --git a/adapter-http/src/main/resources/activities/graphql-cql-first/http-graphql-cql-tabular.yaml b/adapter-http/src/main/resources/activities/graphql-cql-first/http-graphql-cql-tabular.yaml
index 777359c92..b44f48f51 100644
--- a/adapter-http/src/main/resources/activities/graphql-cql-first/http-graphql-cql-tabular.yaml
+++ b/adapter-http/src/main/resources/activities/graphql-cql-first/http-graphql-cql-tabular.yaml
@@ -10,9 +10,9 @@ description: |
scenarios:
default:
- schema: run driver=http tags==block:'schema.*' threads==1 cycles==UNDEF
+ schema: run driver=http tags==block:"schema.*" threads==1 cycles==UNDEF
rampup: run driver=http tags==block:rampup cycles===TEMPLATE(rampup-cycles,10000000) threads=auto
- main: run driver=http tags==block:'main.*' cycles===TEMPLATE(main-cycles,10000000) threads=auto
+ main: run driver=http tags==block:"main.*" cycles===TEMPLATE(main-cycles,10000000) threads=auto
bindings:
# To enable an optional weighted set of hosts in place of a load balancer
diff --git a/adapter-http/src/main/resources/activities/graphql-schema-first/http-graphql-keyvalue.yaml b/adapter-http/src/main/resources/activities/graphql-schema-first/http-graphql-keyvalue.yaml
index 19754747c..1901efdfc 100644
--- a/adapter-http/src/main/resources/activities/graphql-schema-first/http-graphql-keyvalue.yaml
+++ b/adapter-http/src/main/resources/activities/graphql-schema-first/http-graphql-keyvalue.yaml
@@ -13,9 +13,9 @@ description: |
scenarios:
default:
- schema: run driver=http tags==block:'schema.*' threads==1 cycles==UNDEF
+ schema: run driver=http tags==block:"schema.*" threads==1 cycles==UNDEF
rampup: run driver=http tags==block:rampup cycles===TEMPLATE(rampup-cycles,10000000) threads=auto
- main: run driver=http tags==block:'main.*' cycles===TEMPLATE(main-cycles,10000000) threads=auto
+ main: run driver=http tags==block:"main.*" cycles===TEMPLATE(main-cycles,10000000) threads=auto
bindings:
# To enable an optional weighted set of hosts in place of a load balancer
diff --git a/adapter-http/src/main/resources/activities/graphql-schema-first/http-graphql-tabular.yaml b/adapter-http/src/main/resources/activities/graphql-schema-first/http-graphql-tabular.yaml
index 8f5004e26..e88b804d2 100644
--- a/adapter-http/src/main/resources/activities/graphql-schema-first/http-graphql-tabular.yaml
+++ b/adapter-http/src/main/resources/activities/graphql-schema-first/http-graphql-tabular.yaml
@@ -15,9 +15,9 @@ description: |
scenarios:
default:
- schema: run driver=http tags==block:'schema.*' threads==1 cycles==UNDEF
+ schema: run driver=http tags==block:"schema.*" threads==1 cycles==UNDEF
rampup: run driver=http tags==block:rampup cycles===TEMPLATE(rampup-cycles,10000000) threads=auto
- man: run driver=http tags==block:'main.*' cycles===TEMPLATE(main-cycles,10000000) threads=auto
+ man: run driver=http tags==block:"main.*" cycles===TEMPLATE(main-cycles,10000000) threads=auto
bindings:
# To enable an optional weighted set of hosts in place of a load balancer
diff --git a/adapter-http/src/main/resources/activities/graphql-schema-first/http-graphql-timeseries.yaml b/adapter-http/src/main/resources/activities/graphql-schema-first/http-graphql-timeseries.yaml
index 7a6c28a9c..7bd663e9d 100644
--- a/adapter-http/src/main/resources/activities/graphql-schema-first/http-graphql-timeseries.yaml
+++ b/adapter-http/src/main/resources/activities/graphql-schema-first/http-graphql-timeseries.yaml
@@ -19,9 +19,9 @@ description: |
scenarios:
default:
- schema: run driver=http tags==block:'schema.*' threads==1 cycles==UNDEF
+ schema: run driver=http tags==block:"schema.*" threads==1 cycles==UNDEF
rampup: run driver=http tags==block:rampup cycles===TEMPLATE(rampup-cycles,10000000) threads=auto
- main: run driver=http tags==block:'main.*' cycles===TEMPLATE(main-cycles,10000000) threads=auto
+ main: run driver=http tags==block:"main.*" cycles===TEMPLATE(main-cycles,10000000) threads=auto
bindings:
# To enable an optional weighted set of hosts in place of a load balancer
diff --git a/adapter-mongodb/src/main/resources/activities/baselinesv2/mongodb-keyvalue2.yaml b/adapter-mongodb/src/main/resources/activities/baselinesv2/mongodb-keyvalue2.yaml
index bae4e0254..7c65abd87 100644
--- a/adapter-mongodb/src/main/resources/activities/baselinesv2/mongodb-keyvalue2.yaml
+++ b/adapter-mongodb/src/main/resources/activities/baselinesv2/mongodb-keyvalue2.yaml
@@ -9,7 +9,7 @@ description: |
scenarios:
default:
- schema: run driver=mongodb tags==block:'schema.*' threads==1 cycles==UNDEF
+ schema: run driver=mongodb tags==block:"schema.*" threads==1 cycles==UNDEF
rampup: run driver=mongodb tags==block:rampup cycles===TEMPLATE(rampup-cycles,10000000) threads=auto
main: run driver=mongodb tags==block:'main-.*' cycles===TEMPLATE(main-cycles,10000000) threads=auto
drop: run driver=mongodb tags==block:drop-collection threads==1 cycles==UNDEF
diff --git a/adapter-mongodb/src/main/resources/activities/baselinesv2/mongodb-tabular2.yaml b/adapter-mongodb/src/main/resources/activities/baselinesv2/mongodb-tabular2.yaml
index 7873cb49e..8539d3157 100644
--- a/adapter-mongodb/src/main/resources/activities/baselinesv2/mongodb-tabular2.yaml
+++ b/adapter-mongodb/src/main/resources/activities/baselinesv2/mongodb-tabular2.yaml
@@ -9,7 +9,7 @@ description: |
scenarios:
default:
- schema: run driver=mongodb tags==block:'schema.*' threads==1 cycles==UNDEF
+ schema: run driver=mongodb tags==block:"schema.*" threads==1 cycles==UNDEF
rampup: run driver=mongodb tags==block:rampup cycles===TEMPLATE(rampup-cycles,10000000) threads=auto
main: run driver=mongodb tags==block:'main-.*' cycles===TEMPLATE(main-cycles,10000000) threads=auto
drop: run driver=mongodb tags==block:drop-collection threads==1 cycles==UNDEF
diff --git a/adapter-mongodb/src/main/resources/activities/baselinesv2/mongodb-timeseries2.yaml b/adapter-mongodb/src/main/resources/activities/baselinesv2/mongodb-timeseries2.yaml
index f7d519786..e8b248899 100644
--- a/adapter-mongodb/src/main/resources/activities/baselinesv2/mongodb-timeseries2.yaml
+++ b/adapter-mongodb/src/main/resources/activities/baselinesv2/mongodb-timeseries2.yaml
@@ -13,7 +13,7 @@ description: |
scenarios:
default:
- schema: run driver=mongodb tags==block:'schema.*' threads==1 cycles==UNDEF
+ schema: run driver=mongodb tags==block:"schema.*" threads==1 cycles==UNDEF
rampup: run driver=mongodb tags==block:rampup cycles===TEMPLATE(rampup-cycles,10000000) threads=auto
main: run driver=mongodb tags==block:'main-.*' cycles===TEMPLATE(main-cycles,10000000) threads=auto
drop: run driver=mongodb tags==block:drop-collection threads==1 cycles==UNDEF
diff --git a/adapter-mongodb/src/main/resources/activities/mongodb-basic.yaml b/adapter-mongodb/src/main/resources/activities/mongodb-basic.yaml
index 7c3deb332..a502a3cf0 100644
--- a/adapter-mongodb/src/main/resources/activities/mongodb-basic.yaml
+++ b/adapter-mongodb/src/main/resources/activities/mongodb-basic.yaml
@@ -3,7 +3,7 @@ description: An example of a basic mongo insert and find.
scenarios:
default:
rampup: run driver=mongodb tags==block:rampup cycles===TEMPLATE(rampup-cycles,1000000) threads=auto
- main: run driver=mongodb tags==block:'main-*.*' cycles===TEMPLATE(main-cycles,10000000) threads=auto
+ main: run driver=mongodb tags==block:"main.*" cycles===TEMPLATE(main-cycles,10000000) threads=auto
bindings:
seq_key: Mod(<>L); ToInt()
diff --git a/adapter-mongodb/src/main/resources/activities/mongodb-crud-basic.yaml b/adapter-mongodb/src/main/resources/activities/mongodb-crud-basic.yaml
index 14bfc0c39..36c4d7a8d 100644
--- a/adapter-mongodb/src/main/resources/activities/mongodb-crud-basic.yaml
+++ b/adapter-mongodb/src/main/resources/activities/mongodb-crud-basic.yaml
@@ -6,7 +6,7 @@ description: |
scenarios:
default:
- schema: run driver=mongodb tags==block:'schema.*' threads==1 cycles==UNDEF
+ schema: run driver=mongodb tags==block:"schema.*" threads==1 cycles==UNDEF
write: run driver=mongodb tags==block:main-write,type:write cycles===TEMPLATE(write-cycles,TEMPLATE(docscount,10000000)) threads=auto errors=timer,warn
read: run driver=mongodb tags==block:main-read,type:read cycles===TEMPLATE(read-cycles,TEMPLATE(docscount,10000000)) threads=auto errors=timer,warn
update: run driver=mongodb tags==block:main-update,type:update cycles===TEMPLATE(update-cycles,TEMPLATE(docscount,10000000)) threads=auto errors=timer,warn
diff --git a/adapter-mongodb/src/main/resources/activities/mongodb-crud-dataset.yaml b/adapter-mongodb/src/main/resources/activities/mongodb-crud-dataset.yaml
index 9b22f455a..fa2c695e8 100644
--- a/adapter-mongodb/src/main/resources/activities/mongodb-crud-dataset.yaml
+++ b/adapter-mongodb/src/main/resources/activities/mongodb-crud-dataset.yaml
@@ -6,7 +6,7 @@ description: |
scenarios:
default:
- schema: run driver=mongodb tags==block:'schema.*' threads==1 cycles==UNDEF
+ schema: run driver=mongodb tags==block:"schema.*" threads==1 cycles==UNDEF
write: run driver=mongodb tags==block:main-write,type:write cycles===TEMPLATE(write-cycles,TEMPLATE(docscount,10000000)) threads=auto errors=timer,warn
read: run driver=mongodb tags==block:main-read,type:read cycles===TEMPLATE(read-cycles,TEMPLATE(docscount,10000000)) threads=auto errors=timer,warn
update: run driver=mongodb tags==block:main-update,type:update cycles===TEMPLATE(update-cycles,TEMPLATE(docscount,10000000)) threads=auto errors=timer,warn
diff --git a/adapter-mongodb/src/main/resources/activities/mongodb-search-basic.yaml b/adapter-mongodb/src/main/resources/activities/mongodb-search-basic.yaml
index 33d9c42be..e3a003199 100644
--- a/adapter-mongodb/src/main/resources/activities/mongodb-search-basic.yaml
+++ b/adapter-mongodb/src/main/resources/activities/mongodb-search-basic.yaml
@@ -6,7 +6,7 @@ description: |
It's a counterpart of the Stargate's Documents API Basic Search workflow.
scenarios:
- schema: run driver=mongodb tags==block:'schema.*' threads==1 cycles==UNDEF
+ schema: run driver=mongodb tags==block:"schema.*" threads==1 cycles==UNDEF
rampup-write: run driver=mongodb tags==block:rampup-write cycles===TEMPLATE(docscount,10000000) docpadding=TEMPLATE(docpadding,0) match-ratio=TEMPLATE(match-ratio,0.01) threads=auto errors=timer,warn
rampup-read: run driver=mongodb tags==block:rampup-read cycles===TEMPLATE(rampup-cycles, 10000000) field-projection=TEMPLATE(fields,null) threads=<> errors=timer,warn
main: run driver=mongodb tags==block:main cycles===TEMPLATE(read-cycles,TEMPLATE(docscount,10000000)) field-projection=TEMPLATE(fields,null) threads=<> errors=timer,warn
diff --git a/adapter-pulsar/src/main/java/io/nosqlbench/adapter/pulsar/PulsarSpace.java b/adapter-pulsar/src/main/java/io/nosqlbench/adapter/pulsar/PulsarSpace.java
index 4aba161f1..9cc5e44c6 100644
--- a/adapter-pulsar/src/main/java/io/nosqlbench/adapter/pulsar/PulsarSpace.java
+++ b/adapter-pulsar/src/main/java/io/nosqlbench/adapter/pulsar/PulsarSpace.java
@@ -32,8 +32,10 @@ import org.apache.pulsar.client.admin.PulsarAdminBuilder;
import org.apache.pulsar.client.api.*;
import org.apache.pulsar.common.schema.KeyValueEncodingType;
+import java.util.List;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
+import java.util.function.Supplier;
public class PulsarSpace implements AutoCloseable {
@@ -50,9 +52,18 @@ public class PulsarSpace implements AutoCloseable {
private PulsarAdmin pulsarAdmin;
private Schema> pulsarSchema;
- private final ConcurrentHashMap> producers = new ConcurrentHashMap<>();
- private final ConcurrentHashMap> consumers = new ConcurrentHashMap<>();
- private final ConcurrentHashMap> readers = new ConcurrentHashMap<>();
+ public record ProducerCacheKey(String producerName, String topicName) {
+ }
+
+ private final ConcurrentHashMap> producers = new ConcurrentHashMap<>();
+
+ public record ConsumerCacheKey(String consumerName, String subscriptionName, List topicNameList, String topicPattern) {
+ }
+ private final ConcurrentHashMap> consumers = new ConcurrentHashMap<>();
+
+ public record ReaderCacheKey(String readerName, String topicName, String startMsgPosStr) {
+ }
+ private final ConcurrentHashMap> readers = new ConcurrentHashMap<>();
public PulsarSpace(String spaceName, NBConfiguration cfg) {
@@ -89,13 +100,11 @@ public class PulsarSpace implements AutoCloseable {
public int getProducerSetCnt() { return producers.size(); }
public int getConsumerSetCnt() { return consumers.size(); }
public int getReaderSetCnt() { return readers.size(); }
- public Producer> getProducer(String name) { return producers.get(name); }
- public void setProducer(String name, Producer> producer) { producers.put(name, producer); }
- public Consumer> getConsumer(String name) { return consumers.get(name); }
- public void setConsumer(String name, Consumer> consumer) { consumers.put(name, consumer); }
+ public Producer> getProducer(ProducerCacheKey key, Supplier> producerSupplier) { return producers.computeIfAbsent(key, __ -> producerSupplier.get()); }
- public Reader> getReader(String name) { return readers.get(name); }
- public void setReader(String name, Reader> reader) { readers.put(name, reader); }
+ public Consumer> getConsumer(ConsumerCacheKey key, Supplier> consumerSupplier) { return consumers.computeIfAbsent(key, __ -> consumerSupplier.get()); }
+
+ public Reader> getReader(ReaderCacheKey key, Supplier> readerSupplier) { return readers.computeIfAbsent(key, __ -> readerSupplier.get()); }
/**
diff --git a/adapter-pulsar/src/main/java/io/nosqlbench/adapter/pulsar/dispensers/PulsarBaseOpDispenser.java b/adapter-pulsar/src/main/java/io/nosqlbench/adapter/pulsar/dispensers/PulsarBaseOpDispenser.java
index 1de99a097..2e1b40230 100644
--- a/adapter-pulsar/src/main/java/io/nosqlbench/adapter/pulsar/dispensers/PulsarBaseOpDispenser.java
+++ b/adapter-pulsar/src/main/java/io/nosqlbench/adapter/pulsar/dispensers/PulsarBaseOpDispenser.java
@@ -37,7 +37,6 @@ import java.util.*;
import java.util.function.LongFunction;
import java.util.function.Predicate;
import java.util.regex.Pattern;
-import java.util.regex.PatternSyntaxException;
import java.util.stream.Collectors;
public abstract class PulsarBaseOpDispenser extends BaseOpDispenser implements NBNamedElement {
@@ -239,10 +238,8 @@ public abstract class PulsarBaseOpDispenser extends BaseOpDispenser producer = pulsarSpace.getProducer(producerCacheKey);
-
- if (producer == null) {
+ PulsarSpace.ProducerCacheKey producerCacheKey = new PulsarSpace.ProducerCacheKey(producerName, topicName);
+ return pulsarSpace.getProducer(producerCacheKey, () -> {
PulsarClient pulsarClient = pulsarSpace.getPulsarClient();
// Get other possible producer settings that are set at global level
@@ -262,21 +259,17 @@ public abstract class PulsarBaseOpDispenser extends BaseOpDispenser producer = producerBuilder.create();
pulsarAdapterMetrics.registerProducerApiMetrics(producer,
getPulsarAPIMetricsPrefix(
PulsarAdapterUtil.PULSAR_API_TYPE.PRODUCER.label,
producerName,
topicName));
- }
- catch (PulsarClientException ple) {
+ return producer;
+ } catch (PulsarClientException ple) {
throw new PulsarAdapterUnexpectedException("Failed to create a Pulsar producer.");
}
- }
-
- return producer;
+ });
}
private List getEffectiveConsumerTopicNameList(String cycleTopicNameListStr) {
@@ -296,24 +289,6 @@ public abstract class PulsarBaseOpDispenser extends BaseOpDispenser topicNameList = getEffectiveConsumerTopicNameList(cycleTopicNameListStr);
- String topicPatternStr = getEffectiveConValue(
+ String topicPatternStr = StringUtils.trimToNull(getEffectiveConValue(
PulsarAdapterUtil.CONF_GATEGORY.Consumer.label,
PulsarAdapterUtil.CONSUMER_CONF_STD_KEY.topicsPattern.label,
- cycleTopicPatternStr);
- Pattern topicPattern = getEffectiveConsumerTopicPattern(cycleTopicPatternStr);
+ cycleTopicPatternStr));
String subscriptionName = getEffectiveConValue(
PulsarAdapterUtil.CONF_GATEGORY.Consumer.label,
@@ -368,28 +342,14 @@ public abstract class PulsarBaseOpDispenser extends BaseOpDispenser 1 || (topicPattern != null));
-
- String consumerTopicListString;
- if (!topicNameList.isEmpty()) {
- consumerTopicListString = String.join("|", topicNameList);
- } else {
- consumerTopicListString = topicPatternStr;
- }
-
- String consumerCacheKey = PulsarAdapterUtil.buildCacheKey(
- consumerName,
- subscriptionName,
- consumerTopicListString);
- Consumer> consumer = pulsarSpace.getConsumer(consumerCacheKey);
-
- if (consumer == null) {
+ return pulsarSpace.getConsumer(
+ new PulsarSpace.ConsumerCacheKey(consumerName, subscriptionName, topicNameList, topicPatternStr), () -> {
PulsarClient pulsarClient = pulsarSpace.getPulsarClient();
// Get other possible consumer settings that are set at global level
@@ -417,6 +377,7 @@ public abstract class PulsarBaseOpDispenser extends BaseOpDispenser 1 || (topicPatternStr != null));
if (!multiTopicConsumer) {
assert (topicNameList.size() == 1);
consumerBuilder = pulsarClient.newConsumer(pulsarSpace.getPulsarSchema());
@@ -429,6 +390,7 @@ public abstract class PulsarBaseOpDispenser extends BaseOpDispenser consumer = consumerBuilder.subscribe();
+ String consumerTopicListString = (!topicNameList.isEmpty()) ? String.join("|", topicNameList) : topicPatternStr;
pulsarAdapterMetrics.registerConsumerApiMetrics(
consumer,
getPulsarAPIMetricsPrefix(
PulsarAdapterUtil.PULSAR_API_TYPE.CONSUMER.label,
consumerName,
consumerTopicListString));
+
+ return consumer;
}
catch (PulsarClientException ple) {
throw new PulsarAdapterUnexpectedException("Failed to create a Pulsar consumer!");
}
- }
-
- return consumer;
+ });
}
private static Range[] parseRanges(String ranges) {
@@ -528,10 +490,7 @@ public abstract class PulsarBaseOpDispenser extends BaseOpDispenser reader = pulsarSpace.getReader(readerCacheKey);
-
- if (reader == null) {
+ return pulsarSpace.getReader(new PulsarSpace.ReaderCacheKey(readerName, topicName, startMsgPosStr), () -> {
PulsarClient pulsarClient = pulsarSpace.getPulsarClient();;
Map readerConf = pulsarSpace.getPulsarNBClientConf().getReaderConfMapTgt();
@@ -558,17 +517,12 @@ public abstract class PulsarBaseOpDispenser extends BaseOpDispenser t.label.equals(param));
- }
///////
// Message processing sequence error simulation types
@@ -77,29 +81,21 @@ public class PulsarAdapterUtil {
this.label = label;
}
- private static final Map MAPPING = new HashMap<>();
-
- static {
- for (MSG_SEQ_ERROR_SIMU_TYPE simuType : values()) {
- MAPPING.put(simuType.label, simuType);
- MAPPING.put(simuType.label.toLowerCase(), simuType);
- MAPPING.put(simuType.label.toUpperCase(), simuType);
- MAPPING.put(simuType.name(), simuType);
- MAPPING.put(simuType.name().toLowerCase(), simuType);
- MAPPING.put(simuType.name().toUpperCase(), simuType);
- }
- }
+ private static final Map MAPPING = Stream.of(values())
+ .flatMap(simuType ->
+ Stream.of(simuType.label,
+ simuType.label.toLowerCase(),
+ simuType.label.toUpperCase(),
+ simuType.name(),
+ simuType.name().toLowerCase(),
+ simuType.name().toUpperCase())
+ .distinct().map(key -> Map.entry(key, simuType)))
+ .collect(Collectors.toUnmodifiableMap(Map.Entry::getKey, Map.Entry::getValue));
public static Optional parseSimuType(String simuTypeString) {
return Optional.ofNullable(MAPPING.get(simuTypeString.trim()));
}
}
- public static boolean isValidSeqErrSimuType(String item) {
- return Arrays.stream(MSG_SEQ_ERROR_SIMU_TYPE.values()).anyMatch(t -> t.label.equals(item));
- }
- public static String getValidSeqErrSimuTypeList() {
- return Arrays.stream(MSG_SEQ_ERROR_SIMU_TYPE.values()).map(t -> t.label).collect(Collectors.joining(", "));
- }
///////
// Valid Pulsar API type
@@ -113,12 +109,15 @@ public class PulsarAdapterUtil {
PULSAR_API_TYPE(String label) {
this.label = label;
}
+
+ private static final Set LABELS = Stream.of(values()).map(v -> v.label).collect(Collectors.toUnmodifiableSet());
+
+ public static boolean isValidLabel(String label) {
+ return LABELS.contains(label);
+ }
}
public static boolean isValidPulsarApiType(String param) {
- return Arrays.stream(PULSAR_API_TYPE.values()).anyMatch(t -> t.label.equals(param));
- }
- public static String getValidPulsarApiTypeList() {
- return Arrays.stream(PULSAR_API_TYPE.values()).map(t -> t.label).collect(Collectors.joining(", "));
+ return PULSAR_API_TYPE.isValidLabel(param);
}
@@ -136,14 +135,16 @@ public class PulsarAdapterUtil {
CONF_GATEGORY(String label) {
this.label = label;
}
+
+ private static final Set LABELS = Stream.of(values()).map(v -> v.label).collect(Collectors.toUnmodifiableSet());
+
+ public static boolean isValidLabel(String label) {
+ return LABELS.contains(label);
+ }
}
public static boolean isValidConfCategory(String item) {
- return Arrays.stream(CONF_GATEGORY.values()).anyMatch(t -> t.label.equals(item));
+ return CONF_GATEGORY.isValidLabel(item);
}
- public static String getValidConfCategoryList() {
- return Arrays.stream(CONF_GATEGORY.values()).map(t -> t.label).collect(Collectors.joining(", "));
- }
-
///////
// Valid persistence type
public enum PERSISTENT_TYPES {
@@ -156,9 +157,6 @@ public class PulsarAdapterUtil {
this.label = label;
}
}
- public static boolean isValidPersistenceType(String type) {
- return Arrays.stream(PERSISTENT_TYPES.values()).anyMatch(t -> t.label.equals(type));
- }
///////
// Valid Pulsar client configuration (activity-level settings)
@@ -194,9 +192,6 @@ public class PulsarAdapterUtil {
this.label = label;
}
}
- public static boolean isValidClientConfItem(String item) {
- return Arrays.stream(CLNT_CONF_KEY.values()).anyMatch(t -> t.label.equals(item));
- }
///////
// Standard producer configuration (activity-level settings)
@@ -222,9 +217,6 @@ public class PulsarAdapterUtil {
this.label = label;
}
}
- public static boolean isStandardProducerConfItem(String item) {
- return Arrays.stream(PRODUCER_CONF_STD_KEY.values()).anyMatch(t -> t.label.equals(item));
- }
// compressionType
public enum COMPRESSION_TYPE {
@@ -239,12 +231,12 @@ public class PulsarAdapterUtil {
COMPRESSION_TYPE(String label) {
this.label = label;
}
+
+ private final static String TYPE_LIST = Stream.of(COMPRESSION_TYPE.values()).map(t -> t.label).collect(Collectors.joining(", "));
}
- public static boolean isValidCompressionType(String item) {
- return Arrays.stream(COMPRESSION_TYPE.values()).anyMatch(t -> t.label.equals(item));
- }
+
public static String getValidCompressionTypeList() {
- return Arrays.stream(COMPRESSION_TYPE.values()).map(t -> t.label).collect(Collectors.joining(", "));
+ return COMPRESSION_TYPE.TYPE_LIST;
}
///////
@@ -284,9 +276,6 @@ public class PulsarAdapterUtil {
this.label = label;
}
}
- public static boolean isStandardConsumerConfItem(String item) {
- return Arrays.stream(CONSUMER_CONF_STD_KEY.values()).anyMatch(t -> t.label.equals(item));
- }
///////
// Custom consumer configuration (activity-level settings)
@@ -301,9 +290,16 @@ public class PulsarAdapterUtil {
CONSUMER_CONF_CUSTOM_KEY(String label) {
this.label = label;
}
+
+ private static final Set LABELS = Stream.of(values()).map(v -> v.label).collect(Collectors.toUnmodifiableSet());
+
+ public static boolean isValidLabel(String label) {
+ return LABELS.contains(label);
+ }
+
}
public static boolean isCustomConsumerConfItem(String item) {
- return Arrays.stream(CONSUMER_CONF_CUSTOM_KEY.values()).anyMatch(t -> t.label.equals(item));
+ return CONSUMER_CONF_CUSTOM_KEY.isValidLabel(item);
}
// subscriptionTyp
@@ -318,12 +314,21 @@ public class PulsarAdapterUtil {
SUBSCRIPTION_TYPE(String label) {
this.label = label;
}
+
+ private static final Set LABELS = Stream.of(values()).map(v -> v.label)
+ .collect(Collectors.toUnmodifiableSet());
+
+ public static boolean isValidLabel(String label) {
+ return LABELS.contains(label);
+ }
+
+ private final static String TYPE_LIST = Stream.of(COMPRESSION_TYPE.values()).map(t -> t.label).collect(Collectors.joining(", "));
}
public static boolean isValidSubscriptionType(String item) {
- return Arrays.stream(SUBSCRIPTION_TYPE.values()).anyMatch(t -> t.label.equals(item));
+ return SUBSCRIPTION_TYPE.isValidLabel(item);
}
public static String getValidSubscriptionTypeList() {
- return Arrays.stream(SUBSCRIPTION_TYPE.values()).map(t -> t.label).collect(Collectors.joining(", "));
+ return SUBSCRIPTION_TYPE.TYPE_LIST;
}
// subscriptionInitialPosition
@@ -336,12 +341,12 @@ public class PulsarAdapterUtil {
SUBSCRIPTION_INITIAL_POSITION(String label) {
this.label = label;
}
- }
- public static boolean isValidSubscriptionInitialPosition(String item) {
- return Arrays.stream(SUBSCRIPTION_INITIAL_POSITION.values()).anyMatch(t -> t.label.equals(item));
+
+ private final static String TYPE_LIST = Stream.of(COMPRESSION_TYPE.values()).map(t -> t.label).collect(Collectors.joining(", "));
+
}
public static String getValidSubscriptionInitialPositionList() {
- return Arrays.stream(SUBSCRIPTION_INITIAL_POSITION.values()).map(t -> t.label).collect(Collectors.joining(", "));
+ return SUBSCRIPTION_INITIAL_POSITION.TYPE_LIST;
}
// regexSubscriptionMode
@@ -355,12 +360,12 @@ public class PulsarAdapterUtil {
REGEX_SUBSCRIPTION_MODE(String label) {
this.label = label;
}
+
+ private final static String TYPE_LIST = Stream.of(COMPRESSION_TYPE.values()).map(t -> t.label).collect(Collectors.joining(", "));
}
- public static boolean isValidRegexSubscriptionMode(String item) {
- return Arrays.stream(REGEX_SUBSCRIPTION_MODE.values()).anyMatch(t -> t.label.equals(item));
- }
+
public static String getValidRegexSubscriptionModeList() {
- return Arrays.stream(REGEX_SUBSCRIPTION_MODE.values()).map(t -> t.label).collect(Collectors.joining(", "));
+ return REGEX_SUBSCRIPTION_MODE.TYPE_LIST;
}
///////
@@ -383,9 +388,6 @@ public class PulsarAdapterUtil {
this.label = label;
}
}
- public static boolean isStandardReaderConfItem(String item) {
- return Arrays.stream(READER_CONF_STD_KEY.values()).anyMatch(t -> t.label.equals(item));
- }
///////
// Custom reader configuration (activity-level settings)
@@ -400,9 +402,6 @@ public class PulsarAdapterUtil {
this.label = label;
}
}
- public static boolean isCustomReaderConfItem(String item) {
- return Arrays.stream(READER_CONF_CUSTOM_KEY.values()).anyMatch(t -> t.label.equals(item));
- }
///////
// Valid read positions for a Pulsar reader
@@ -415,156 +414,84 @@ public class PulsarAdapterUtil {
READER_MSG_POSITION_TYPE(String label) {
this.label = label;
}
+
+ private static final Set LABELS = Stream.of(values()).map(v -> v.label)
+ .collect(Collectors.toUnmodifiableSet());
+
+ public static boolean isValidLabel(String label) {
+ return LABELS.contains(label);
+ }
}
public static boolean isValideReaderStartPosition(String item) {
- return Arrays.stream(READER_MSG_POSITION_TYPE.values()).anyMatch(t -> t.label.equals(item));
+ return READER_MSG_POSITION_TYPE.isValidLabel(item);
}
+ private static final Map> PRIMITIVE_SCHEMA_TYPE_MAPPING = Stream.of(SchemaType.values())
+ .filter(SchemaType::isPrimitive)
+ .collect(Collectors.toUnmodifiableMap(schemaType -> schemaType.name().toUpperCase(),
+ schemaType -> Schema.getSchema(SchemaInfo.builder().type(schemaType).build())));
+
///////
// Primitive Schema type
public static boolean isPrimitiveSchemaTypeStr(String typeStr) {
- boolean isPrimitive = false;
-
- // Use "BYTES" as the default type if the type string is not explicitly specified
- if (StringUtils.isBlank(typeStr)) {
- typeStr = "BYTES";
- }
-
- if (typeStr.equalsIgnoreCase("BOOLEAN") || typeStr.equalsIgnoreCase("INT8") ||
- typeStr.equalsIgnoreCase("INT16") || typeStr.equalsIgnoreCase("INT32") ||
- typeStr.equalsIgnoreCase("INT64") || typeStr.equalsIgnoreCase("FLOAT") ||
- typeStr.equalsIgnoreCase("DOUBLE") || typeStr.equalsIgnoreCase("BYTES") ||
- typeStr.equalsIgnoreCase("DATE") || typeStr.equalsIgnoreCase("TIME") ||
- typeStr.equalsIgnoreCase("TIMESTAMP") || typeStr.equalsIgnoreCase("INSTANT") ||
- typeStr.equalsIgnoreCase("LOCAL_DATE") || typeStr.equalsIgnoreCase("LOCAL_TIME") ||
- typeStr.equalsIgnoreCase("LOCAL_DATE_TIME")) {
- isPrimitive = true;
- }
-
- return isPrimitive;
+ return StringUtils.isBlank(typeStr) || PRIMITIVE_SCHEMA_TYPE_MAPPING.containsKey(typeStr.toUpperCase());
}
+
public static Schema> getPrimitiveTypeSchema(String typeStr) {
- Schema> schema;
-
- if (StringUtils.isBlank(typeStr)) {
- typeStr = "BYTES";
+ String lookupKey = StringUtils.isBlank(typeStr) ? "BYTES" : typeStr.toUpperCase();
+ Schema> schema = PRIMITIVE_SCHEMA_TYPE_MAPPING.get(lookupKey);
+ if (schema == null) {
+ throw new PulsarAdapterInvalidParamException("Invalid Pulsar primitive schema type string : " + typeStr);
}
-
- switch (typeStr.toUpperCase()) {
- case "BOOLEAN":
- schema = Schema.BOOL;
- break;
- case "INT8":
- schema = Schema.INT8;
- break;
- case "INT16":
- schema = Schema.INT16;
- break;
- case "INT32":
- schema = Schema.INT32;
- break;
- case "INT64":
- schema = Schema.INT64;
- break;
- case "FLOAT":
- schema = Schema.FLOAT;
- break;
- case "DOUBLE":
- schema = Schema.DOUBLE;
- break;
- case "DATE":
- schema = Schema.DATE;
- break;
- case "TIME":
- schema = Schema.TIME;
- break;
- case "TIMESTAMP":
- schema = Schema.TIMESTAMP;
- break;
- case "INSTANT":
- schema = Schema.INSTANT;
- break;
- case "LOCAL_DATE":
- schema = Schema.LOCAL_DATE;
- break;
- case "LOCAL_TIME":
- schema = Schema.LOCAL_TIME;
- break;
- case "LOCAL_DATE_TIME":
- schema = Schema.LOCAL_DATE_TIME;
- break;
- case "BYTES":
- schema = Schema.BYTES;
- break;
- // Report an error if non-valid, non-empty schema type string is provided
- default:
- throw new PulsarAdapterInvalidParamException("Invalid Pulsar primitive schema type string : " + typeStr);
- }
-
return schema;
}
///////
// Complex strut type: Avro or Json
public static boolean isAvroSchemaTypeStr(String typeStr) {
- return (StringUtils.isNotBlank(typeStr) && typeStr.equalsIgnoreCase("AVRO"));
+ return "AVRO".equalsIgnoreCase(typeStr);
}
// automatic decode the type from the Registry
public static boolean isAutoConsumeSchemaTypeStr(String typeStr) {
- return (StringUtils.isNotBlank(typeStr) && typeStr.equalsIgnoreCase("AUTO_CONSUME"));
+ return "AUTO_CONSUME".equalsIgnoreCase(typeStr);
}
- public static Schema> getAvroSchema(String typeStr, String definitionStr) {
- String schemaDefinitionStr = definitionStr;
- String filePrefix = "file://";
- Schema> schema;
+ private static final Map> AVRO_SCHEMA_CACHE = new ConcurrentHashMap<>();
+
+ public static Schema> getAvroSchema(String typeStr, final String definitionStr) {
// Check if payloadStr points to a file (e.g. "file:///path/to/a/file")
if (isAvroSchemaTypeStr(typeStr)) {
- if (StringUtils.isBlank(schemaDefinitionStr)) {
- throw new PulsarAdapterInvalidParamException(
- "Schema definition must be provided for \"Avro\" schema type!");
+ if (StringUtils.isBlank(definitionStr)) {
+ throw new PulsarAdapterInvalidParamException("Schema definition must be provided for \"Avro\" schema type!");
}
- else if (schemaDefinitionStr.startsWith(filePrefix)) {
- try {
- Path filePath = Paths.get(URI.create(schemaDefinitionStr));
- schemaDefinitionStr = Files.readString(filePath, StandardCharsets.US_ASCII);
+ return AVRO_SCHEMA_CACHE.computeIfAbsent(definitionStr, __ -> {
+ String schemaDefinitionStr = definitionStr;
+ if (schemaDefinitionStr.startsWith("file://")) {
+ try {
+ Path filePath = Paths.get(URI.create(schemaDefinitionStr));
+ schemaDefinitionStr = Files.readString(filePath, StandardCharsets.UTF_8);
+ } catch (IOException ioe) {
+ throw new PulsarAdapterUnexpectedException("Error reading the specified \"Avro\" schema definition file: " + definitionStr + ": " + ioe.getMessage());
+ }
}
- catch (IOException ioe) {
- throw new PulsarAdapterUnexpectedException(
- "Error reading the specified \"Avro\" schema definition file: " + definitionStr + ": " + ioe.getMessage());
- }
- }
-
- schema = PulsarAvroSchemaUtil.GetSchema_PulsarAvro("NBAvro", schemaDefinitionStr);
+ return PulsarAvroSchemaUtil.GetSchema_PulsarAvro("NBAvro", schemaDefinitionStr);
+ });
+ } else {
+ throw new PulsarAdapterInvalidParamException("Trying to create a \"Avro\" schema for a non-Avro schema type string: " + typeStr);
}
- else {
- throw new PulsarAdapterInvalidParamException(
- "Trying to create a \"Avro\" schema for a non-Avro schema type string: " + typeStr);
- }
-
- return schema;
- }
-
- ///////
- // Generate effective key string
- public static String buildCacheKey(String... keyParts) {
- // Ignore blank keyPart
- String joinedKeyStr =
- Stream.of(keyParts)
- .filter(s -> !StringUtils.isBlank(s))
- .collect(Collectors.joining(","));
-
- return Base64.getEncoder().encodeToString(joinedKeyStr.getBytes());
}
///////
// Convert JSON string to a key/value map
- public static Map convertJsonToMap(String jsonStr) throws Exception {
- ObjectMapper mapper = new ObjectMapper();
- return mapper.readValue(jsonStr, Map.class);
+ private static final ObjectMapper JACKSON_OBJECT_MAPPER = new ObjectMapper();
+ private static final TypeReference