diff --git a/.all-contributorsrc b/.all-contributorsrc new file mode 100644 index 000000000..cd5020d17 --- /dev/null +++ b/.all-contributorsrc @@ -0,0 +1,484 @@ +{ + "projectOwner": "nosqlbench", + "projectName": "nosqlbench", + "repoType": "github", + "repoHost": "https://github.com", + "files": [ + "CONTRIBUTING.md" + ], + "imageSize": 50, + "commit": false, + "commitConvention": "angular", + "contributorsPerLine": 6, + "contributorsSortAlphabetically": false, + "linkToUsage": false, + "skipCi": true, + "contributors": [ + { + "login": "jshook", + "name": "Jonathan Shook", + "avatar_url": "https://avatars.githubusercontent.com/u/2148847?v=4", + "profile": "https://github.com/jshook", + "contributions": [ + "review", + "tool", + "bug", + "business", + "code", + "content", + "data", + "doc", + "design", + "example", + "ideas", + "infra", + "maintenance", + "mentoring", + "platform", + "plugin", + "projectManagement", + "research", + "security", + "test", + "userTesting" + ] + }, + { + "login": "MikeYaacoubStax", + "name": "MikeYaacoubStax", + "avatar_url": "https://avatars.githubusercontent.com/u/117678633?v=4", + "profile": "https://github.com/MikeYaacoubStax", + "contributions": [ + "review", + "tool", + "bug", + "business", + "code", + "content", + "data", + "doc", + "design", + "example", + "ideas", + "infra", + "maintenance", + "mentoring", + "platform", + "plugin", + "projectManagement", + "research", + "security", + "test", + "userTesting" + ] + }, + { + "login": "jeffbanks", + "name": "Jeff Banks", + "avatar_url": "https://avatars.githubusercontent.com/u/4078933?v=4", + "profile": "http://jjbanks.com", + "contributions": [ + "code", + "mentoring", + "test", + "bug", + "business", + "content", + "data", + "doc", + "design", + "example", + "ideas", + "infra", + "maintenance", + "platform", + "plugin", + "projectManagement", + "research", + "review", + "security", + "tool", + "userTesting" + ] + }, + { + "login": "msmygit", + "name": "Madhavan", + "avatar_url": "https://avatars.githubusercontent.com/u/19366623?v=4", + "profile": "https://github.com/msmygit", + "contributions": [ + "code", + "bug", + "doc", + "ideas", + "question", + "research", + "review", + "tool", + "userTesting", + "talk", + "tutorial" + ] + }, + { + "login": "hemidactylus", + "name": "Stefano Lottini", + "avatar_url": "https://avatars.githubusercontent.com/u/14221764?v=4", + "profile": "https://github.com/hemidactylus", + "contributions": [ + "bug", + "blog", + "code", + "content", + "data", + "doc", + "example", + "eventOrganizing", + "mentoring", + "promotion", + "research", + "tutorial", + "userTesting", + "video" + ] + }, + { + "login": "phact", + "name": "SebastiΓ‘n EstΓ©vez", + "avatar_url": "https://avatars.githubusercontent.com/u/1313220?v=4", + "profile": "https://github.com/phact", + "contributions": [ + "bug", + "design", + "business", + "code", + "content", + "data", + "doc", + "ideas", + "promotion", + "research" + ] + }, + { + "login": "smccarthy788", + "name": "Sean McCarthy", + "avatar_url": "https://avatars.githubusercontent.com/u/6601281?v=4", + "profile": "https://github.com/smccarthy788", + "contributions": [ + "mentoring", + "ideas", + "code", + "test" + ] + }, + { + "login": "yabinmeng", + "name": "yabinmeng", + "avatar_url": "https://avatars.githubusercontent.com/u/16789452?v=4", + "profile": "https://github.com/yabinmeng", + "contributions": [ + "bug", + "test" + ] + }, + { + "login": "eolivelli", + "name": "Enrico Olivelli", + "avatar_url": "https://avatars.githubusercontent.com/u/9469110?v=4", + "profile": "http://eolivelli.blogspot.it/", + "contributions": [ + "test", + "code", + "review" + ] + }, + { + "login": "lhotari", + "name": "Lari Hotari", + "avatar_url": "https://avatars.githubusercontent.com/u/66864?v=4", + "profile": "https://github.com/lhotari", + "contributions": [ + "bug", + "code", + "review" + ] + }, + { + "login": "mfleming", + "name": "Matt Fleming", + "avatar_url": "https://avatars.githubusercontent.com/u/94254?v=4", + "profile": "http://www.codeblueprint.co.uk", + "contributions": [ + "bug", + "design" + ] + }, + { + "login": "tjake", + "name": "Jake Luciani", + "avatar_url": "https://avatars.githubusercontent.com/u/44456?v=4", + "profile": "https://github.com/tjake", + "contributions": [ + "bug", + "ideas" + ] + }, + { + "login": "lakshmi-M18", + "name": "Lakshmi Manjunatha", + "avatar_url": "https://avatars.githubusercontent.com/u/89935678?v=4", + "profile": "https://github.com/lakshmi-M18", + "contributions": [ + "bug" + ] + }, + { + "login": "pingtimeout", + "name": "Pierre Laporte", + "avatar_url": "https://avatars.githubusercontent.com/u/1159578?v=4", + "profile": "http://www.pingtimeout.fr", + "contributions": [ + "ideas", + "bug" + ] + }, + { + "login": "tatu-at-datastax", + "name": "Tatu Saloranta", + "avatar_url": "https://avatars.githubusercontent.com/u/87213665?v=4", + "profile": "https://github.com/tatu-at-datastax", + "contributions": [ + "doc" + ] + }, + { + "login": "alexott", + "name": "Alex Ott", + "avatar_url": "https://avatars.githubusercontent.com/u/30342?v=4", + "profile": "http://alexott.net", + "contributions": [ + "platform", + "bug", + "code" + ] + }, + { + "login": "jeffreyscarpenter", + "name": "Jeffrey Carpenter", + "avatar_url": "https://avatars.githubusercontent.com/u/12115970?v=4", + "profile": "https://github.com/jeffreyscarpenter", + "contributions": [ + "bug", + "test", + "maintenance" + ] + }, + { + "login": "yassermohamed81", + "name": "yassermohamed81", + "avatar_url": "https://avatars.githubusercontent.com/u/53837411?v=4", + "profile": "https://github.com/yassermohamed81", + "contributions": [ + "code" + ] + }, + { + "login": "Pierrotws", + "name": "Pierre Sauvage", + "avatar_url": "https://avatars.githubusercontent.com/u/6002161?v=4", + "profile": "https://github.com/Pierrotws", + "contributions": [ + "code" + ] + }, + { + "login": "dougwettlaufer", + "name": "Doug Wettlaufer", + "avatar_url": "https://avatars.githubusercontent.com/u/45750136?v=4", + "profile": "https://github.com/dougwettlaufer", + "contributions": [ + "test" + ] + }, + { + "login": "jeromatron", + "name": "Jeremy Hanna", + "avatar_url": "https://avatars.githubusercontent.com/u/254887?v=4", + "profile": "http://jeromatron.blogspot.com", + "contributions": [ + "test", + "ideas" + ] + }, + { + "login": "alicel", + "name": "Alice Lottini", + "avatar_url": "https://avatars.githubusercontent.com/u/2972347?v=4", + "profile": "https://github.com/alicel", + "contributions": [ + "bug", + "ideas", + "test" + ] + }, + { + "login": "EricBorczuk", + "name": "Eric Borczuk", + "avatar_url": "https://avatars.githubusercontent.com/u/4205492?v=4", + "profile": "https://github.com/EricBorczuk", + "contributions": [ + "code", + "review", + "test" + ] + }, + { + "login": "weideng1", + "name": "weideng1", + "avatar_url": "https://avatars.githubusercontent.com/u/5520525?v=4", + "profile": "https://github.com/weideng1", + "contributions": [ + "test", + "ideas", + "code" + ] + }, + { + "login": "ivansenic", + "name": "Ivan Senic", + "avatar_url": "https://avatars.githubusercontent.com/u/10600041?v=4", + "profile": "https://github.com/ivansenic", + "contributions": [ + "test" + ] + }, + { + "login": "justinchuch", + "name": "Justin Chu", + "avatar_url": "https://avatars.githubusercontent.com/u/15710241?v=4", + "profile": "https://justinchuch.wordpress.com/", + "contributions": [ + "code", + "test", + "review" + ] + }, + { + "login": "ds-steven-matison", + "name": "Steven Matison", + "avatar_url": "https://avatars.githubusercontent.com/u/70520951?v=4", + "profile": "https://ds-steven-matison.github.io/", + "contributions": [ + "test", + "ideas" + ] + }, + { + "login": "szimmer1", + "name": "shahar z", + "avatar_url": "https://avatars.githubusercontent.com/u/8455475?v=4", + "profile": "https://github.com/szimmer1", + "contributions": [ + "ideas", + "code" + ] + }, + { + "login": "ncarvind", + "name": "ncarvind", + "avatar_url": "https://avatars.githubusercontent.com/u/70302571?v=4", + "profile": "https://github.com/ncarvind", + "contributions": [ + "code", + "test" + ] + }, + { + "login": "MMirelli", + "name": "Massimiliano Mirelli", + "avatar_url": "https://avatars.githubusercontent.com/u/22191891?v=4", + "profile": "https://github.com/MMirelli", + "contributions": [ + "platform" + ] + }, + { + "login": "derrickCos", + "name": "Derrick Cosmas", + "avatar_url": "https://avatars.githubusercontent.com/u/25781387?v=4", + "profile": "https://github.com/derrickCos", + "contributions": [ + "code", + "ideas" + ] + }, + { + "login": "grighetto", + "name": "Gianluca Righetto", + "avatar_url": "https://avatars.githubusercontent.com/u/413792?v=4", + "profile": "https://github.com/grighetto", + "contributions": [ + "platform" + ] + }, + { + "login": "BrynCooke", + "name": "Bryn Cooke", + "avatar_url": "https://avatars.githubusercontent.com/u/747836?v=4", + "profile": "https://github.com/BrynCooke", + "contributions": [ + "doc" + ] + }, + { + "login": "KatSarah", + "name": "KatSarah", + "avatar_url": "https://avatars.githubusercontent.com/u/658015?v=4", + "profile": "https://github.com/KatSarah", + "contributions": [ + "ideas" + ] + }, + { + "login": "peytoncasper", + "name": "Peyton Casper", + "avatar_url": "https://avatars.githubusercontent.com/u/8305883?v=4", + "profile": "https://github.com/peytoncasper", + "contributions": [ + "ideas", + "code" + ] + }, + { + "login": "landim", + "name": "Arthur Costa", + "avatar_url": "https://avatars.githubusercontent.com/u/91446?v=4", + "profile": "https://github.com/landim", + "contributions": [ + "bug" + ] + }, + { + "login": "guyboltonking", + "name": "Guy Bolton King", + "avatar_url": "https://avatars.githubusercontent.com/u/98294?v=4", + "profile": "https://github.com/guyboltonking", + "contributions": [ + "bug", + "code", + "ideas" + ] + }, + { + "login": "XN137", + "name": "Christopher Lambert", + "avatar_url": "https://avatars.githubusercontent.com/u/1204398?v=4", + "profile": "https://github.com/XN137", + "contributions": [ + "code", + "ideas" + ] + } + ] +} diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index ed002fbf7..20f1f52b1 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -79,6 +79,7 @@ jobs: builddocs: needs: build runs-on: ubuntu-20.04 + if: ${{ github.repository == 'nosqlbench/nosqlbench' && github.event_name == 'push' && github.ref_name == 'main' }} steps: - name: set git username diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 208e1da06..311e5fcdf 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -1,3 +1,5 @@ +[![All Contributors](https://img.shields.io/github/all-contributors/nosqlbench/nosqlbench?color=ee8449&style=flat-square)](#contributors) + NoSQLBench is an ambitious project. It aims to solve long-standing problems in distributed systems testing. There are *many* ways you can contribute! Please take a moment to review this document in order to make the contribution process easy and effective for everyone involved. @@ -117,5 +119,73 @@ are eager to get it into the hands of users who need it. [discord server](https://discord.gg/dBHRakusMN) and raise your hand! +## Contributors +Thanks to these contributors! :sparkle: +For recognizing contributions, please follow [this documentation](https://allcontributors.org/docs/en/bot/usage) and pick a key/contribution type from [here](https://allcontributors.org/docs/en/emoji-key). + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
Jonathan Shook
Jonathan Shook

πŸ‘€ πŸ”§ πŸ› πŸ’Ό πŸ’» πŸ–‹ πŸ”£ πŸ“– 🎨 πŸ’‘ πŸ€” πŸš‡ 🚧 πŸ§‘β€πŸ« πŸ“¦ πŸ”Œ πŸ“† πŸ”¬ πŸ›‘οΈ ⚠️ πŸ““
MikeYaacoubStax
MikeYaacoubStax

πŸ‘€ πŸ”§ πŸ› πŸ’Ό πŸ’» πŸ–‹ πŸ”£ πŸ“– 🎨 πŸ’‘ πŸ€” πŸš‡ 🚧 πŸ§‘β€πŸ« πŸ“¦ πŸ”Œ πŸ“† πŸ”¬ πŸ›‘οΈ ⚠️ πŸ““
Jeff Banks
Jeff Banks

πŸ’» πŸ§‘β€πŸ« ⚠️ πŸ› πŸ’Ό πŸ–‹ πŸ”£ πŸ“– 🎨 πŸ’‘ πŸ€” πŸš‡ 🚧 πŸ“¦ πŸ”Œ πŸ“† πŸ”¬ πŸ‘€ πŸ›‘οΈ πŸ”§ πŸ““
Madhavan
Madhavan

πŸ’» πŸ› πŸ“– πŸ€” πŸ’¬ πŸ”¬ πŸ‘€ πŸ”§ πŸ““ πŸ“’ βœ…
Stefano Lottini
Stefano Lottini

πŸ› πŸ“ πŸ’» πŸ–‹ πŸ”£ πŸ“– πŸ’‘ πŸ“‹ πŸ§‘β€πŸ« πŸ“£ πŸ”¬ βœ… πŸ““ πŸ“Ή
SebastiΓ‘n EstΓ©vez
SebastiΓ‘n EstΓ©vez

πŸ› 🎨 πŸ’Ό πŸ’» πŸ–‹ πŸ”£ πŸ“– πŸ€” πŸ“£ πŸ”¬
Sean McCarthy
Sean McCarthy

πŸ§‘β€πŸ« πŸ€” πŸ’» ⚠️
yabinmeng
yabinmeng

πŸ› ⚠️
Enrico Olivelli
Enrico Olivelli

⚠️ πŸ’» πŸ‘€
Lari Hotari
Lari Hotari

πŸ› πŸ’» πŸ‘€
Matt Fleming
Matt Fleming

πŸ› 🎨
Jake Luciani
Jake Luciani

πŸ› πŸ€”
Lakshmi Manjunatha
Lakshmi Manjunatha

πŸ›
Pierre Laporte
Pierre Laporte

πŸ€” πŸ›
Tatu Saloranta
Tatu Saloranta

πŸ“–
Alex Ott
Alex Ott

πŸ“¦ πŸ› πŸ’»
Jeffrey Carpenter
Jeffrey Carpenter

πŸ› ⚠️ 🚧
yassermohamed81
yassermohamed81

πŸ’»
Pierre Sauvage
Pierre Sauvage

πŸ’»
Doug Wettlaufer
Doug Wettlaufer

⚠️
Jeremy Hanna
Jeremy Hanna

⚠️ πŸ€”
Alice Lottini
Alice Lottini

πŸ› πŸ€” ⚠️
Eric Borczuk
Eric Borczuk

πŸ’» πŸ‘€ ⚠️
weideng1
weideng1

⚠️ πŸ€” πŸ’»
Ivan Senic
Ivan Senic

⚠️
Justin Chu
Justin Chu

πŸ’» ⚠️ πŸ‘€
Steven Matison
Steven Matison

⚠️ πŸ€”
shahar z
shahar z

πŸ€” πŸ’»
ncarvind
ncarvind

πŸ’» ⚠️
Massimiliano Mirelli
Massimiliano Mirelli

πŸ“¦
Derrick Cosmas
Derrick Cosmas

πŸ’» πŸ€”
Gianluca Righetto
Gianluca Righetto

πŸ“¦
Bryn Cooke
Bryn Cooke

πŸ“–
KatSarah
KatSarah

πŸ€”
Peyton Casper
Peyton Casper

πŸ€” πŸ’»
Arthur Costa
Arthur Costa

πŸ›
Guy Bolton King
Guy Bolton King

πŸ› πŸ’» πŸ€”
Christopher Lambert
Christopher Lambert

πŸ’» πŸ€”
+ + + + + +--- diff --git a/README.md b/README.md index f3e9578b7..dc79a6c55 100644 --- a/README.md +++ b/README.md @@ -1,5 +1,7 @@ [comment]: < ![build](https://github.com/nosqlbench/nosqlbench/workflows/build/badge.svg) > - +[![Maven Central](https://maven-badges.herokuapp.com/maven-central/io.nosqlbench/nosqlbench/badge.svg)](https://maven-badges.herokuapp.com/maven-central/io.nosqlbench/nosqlbench) +[![Star on Github](https://img.shields.io/github/stars/nosqlbench/nosqlbench.svg?style=social)](https://github.com/nosqlbench/nosqlbench/stargazers) +[![Chat on Discord](https://img.shields.io/discord/819995781406130176?logo=discord)](https://discord.gg/dBHRakusMN) # NoSQLBench v5 @@ -104,3 +106,9 @@ available, but more work is needed to support them fully. Here is what is suppor + + +## Contributors +Checkout all our wonderful contributors [here](./CONTRIBUTING.md#contributors). + +--- \ No newline at end of file diff --git a/adapter-cqld4/src/main/java/io/nosqlbench/cqlgen/core/CGWorkloadExporter.java b/adapter-cqld4/src/main/java/io/nosqlbench/cqlgen/core/CGWorkloadExporter.java index d9415a00b..0794067b9 100644 --- a/adapter-cqld4/src/main/java/io/nosqlbench/cqlgen/core/CGWorkloadExporter.java +++ b/adapter-cqld4/src/main/java/io/nosqlbench/cqlgen/core/CGWorkloadExporter.java @@ -320,9 +320,9 @@ public class CGWorkloadExporter implements BundledApp { put("default", new LinkedHashMap<>() {{ - put("schema", "run driver=cql tags=block:'schema-*.*' threads===UNDEF cycles===UNDEF"); + put("schema", "run driver=cql tags=block:\"schema.*\" threads===UNDEF cycles===UNDEF"); put("rampup", "run driver=cql tags=block:rampup threads=auto cycles===TEMPLATE(rampup-cycles,10000)"); - put("main", "run driver=cql tags=block:'main-*.*' threads=auto cycles===TEMPLATE(main-cycles,10000)"); + put("main", "run driver=cql tags=block:\"main.*\" threads=auto cycles===TEMPLATE(main-cycles,10000)"); }}); put("main-insert", "run driver=cql tags=block:main-insert threads=auto cycles===TEMPLATE(main-cycles,10000)"); diff --git a/adapter-cqld4/src/main/resources/activities/baselines/cql-iot-dse.yaml b/adapter-cqld4/src/main/resources/activities/baselines/cql-iot-dse.yaml index 1bc40fad9..ae83d127f 100644 --- a/adapter-cqld4/src/main/resources/activities/baselines/cql-iot-dse.yaml +++ b/adapter-cqld4/src/main/resources/activities/baselines/cql-iot-dse.yaml @@ -2,8 +2,8 @@ description: An IOT workload with more optimal settings for DSE scenarios: default: schema: run driver=cql tags==block:schema threads==1 cycles==UNDEF - rampup: run driver=cql tags==block:rampup cycles===TEMPLATE(rampup-cycles,10000000) threads=auto - main: run driver=cql tags==block:"main-*.*" cycles===TEMPLATE(main-cycles,10000000) threads=auto + rampup: run driver=cql tags==block:rampup cycles===TEMPLATE(rampup-cycles,100) threads=auto + main: run driver=cql tags==block:"main.*" cycles===TEMPLATE(main-cycles,100) threads=auto bindings: machine_id: Mod(<>); ToHashedUUID() -> java.util.UUID diff --git a/adapter-cqld4/src/main/resources/activities/baselines/cql-iot.yaml b/adapter-cqld4/src/main/resources/activities/baselines/cql-iot.yaml index 6250cc8ce..3daaf9eda 100644 --- a/adapter-cqld4/src/main/resources/activities/baselines/cql-iot.yaml +++ b/adapter-cqld4/src/main/resources/activities/baselines/cql-iot.yaml @@ -7,11 +7,11 @@ scenarios: default: schema: run driver=cql tags==block:schema threads==1 cycles==UNDEF rampup: run driver=cql tags==block:rampup cycles===TEMPLATE(rampup-cycles,10000000) threads=auto - main: run driver=cql tags==block:"main-*.*" cycles===TEMPLATE(main-cycles,10000000) threads=auto + main: run driver=cql tags==block:"main.*" cycles===TEMPLATE(main-cycles,10000000) threads=auto astra: schema: run driver=cql tags==block:schema-astra threads==1 cycles==UNDEF rampup: run driver=cql tags==block:rampup cycles===TEMPLATE(rampup-cycles,10000000) threads=auto - main: run driver=cql tags==block:"main-*.*" cycles===TEMPLATE(main-cycles,10000000) threads=auto + main: run driver=cql tags==block:"main.*" cycles===TEMPLATE(main-cycles,10000000) threads=auto params: instrument: TEMPLATE(instrument,false) diff --git a/adapter-cqld4/src/main/resources/activities/baselines/cql-keyvalue-astra.yaml b/adapter-cqld4/src/main/resources/activities/baselines/cql-keyvalue-astra.yaml index 7ef9285d3..17bcc06a4 100644 --- a/adapter-cqld4/src/main/resources/activities/baselines/cql-keyvalue-astra.yaml +++ b/adapter-cqld4/src/main/resources/activities/baselines/cql-keyvalue-astra.yaml @@ -5,11 +5,11 @@ scenarios: default: schema: run driver=cql tags==block:schema threads==1 cycles==UNDEF rampup: run driver=cql tags==block:rampup cycles===TEMPLATE(rampup-cycles,10000000) threads=auto - main: run driver=cql tags==block:'main-*.*' cycles===TEMPLATE(main-cycles,10000000) threads=auto + main: run driver=cql tags==block:"main.*" cycles===TEMPLATE(main-cycles,10000000) threads=auto astra: schema: run driver=cql tags==block:schema-astra threads==1 cycles==UNDEF rampup: run driver=cql tags==block:rampup cycles===TEMPLATE(rampup-cycles,10000000) threads=auto - main: run driver=cql tags==block:'main-*.*' cycles===TEMPLATE(main-cycles,10000000) threads=auto + main: run driver=cql tags==block:"main.*" cycles===TEMPLATE(main-cycles,10000000) threads=auto bindings: seq_key: Mod(<>); ToString() -> String diff --git a/adapter-cqld4/src/main/resources/activities/baselines/cql-starter.yaml b/adapter-cqld4/src/main/resources/activities/baselines/cql-starter.yaml new file mode 100644 index 000000000..120fb6753 --- /dev/null +++ b/adapter-cqld4/src/main/resources/activities/baselines/cql-starter.yaml @@ -0,0 +1,79 @@ +description: | + A cql-starter workload primarily for: + * Cassandra: 3.x, 4.x. + * DataStax Enterprise: 6.8.x. + * DataStax Astra. + +scenarios: + default: + schema: run driver=cql tags==block:schema threads==1 cycles==UNDEF + rampup: run driver=cql tags==block:rampup cycles===TEMPLATE(rampup-cycles,10) threads=auto + main: run driver=cql tags==block:"main.*" cycles===TEMPLATE(main-cycles,10) threads=auto + astra: + schema: run driver=cql tags==block:schema-astra threads==1 cycles==UNDEF + rampup: run driver=cql tags==block:rampup cycles===TEMPLATE(rampup-cycles,10) threads=auto + main: run driver=cql tags==block:"main.*" cycles===TEMPLATE(main-cycles,10) threads=auto + +params: + x: y + +bindings: + machine_id: Mod(<>); ToHashedUUID() -> java.util.UUID + message: ToString(); TextOfFile('data/cql-starter-message.txt') + time: Mul(100L); Div(10000L); ToJavaInstant() + timestamp: Mul(<>L); Div(<>L); Mul(1000L); + +blocks: + schema: + params: + prepared: false + ops: + create-keyspace: | + create keyspace if not exists <> + WITH replication = {'class': 'SimpleStrategy', 'replication_factor': '<>'} + AND durable_writes = true; + create-table: | + create table if not exists <>.<> ( + machine_id UUID, + message text, + time timestamp, + PRIMARY KEY ((machine_id), time) + ) WITH CLUSTERING ORDER BY (time DESC); +# truncate-table: | +# truncate table <>.<>; + schema-astra: + params: + prepared: false + ops: + create-table-astra: | + create table if not exists <>.<> ( + machine_id UUID, + message text, + time timestamp, + PRIMARY KEY ((machine_id), time) + ) WITH CLUSTERING ORDER BY (time DESC); + rampup: + params: + cl: <> + idempotent: true + ops: + insert-rampup: | + insert into <>.<> (machine_id, message, time) + values ({machine_id}, {message}, {time}) using timestamp {timestamp}; + main-read: + params: + ratio: <> + cl: <> + ops: + select-read: | + select * from <>.<> + where machine_id={machine_id}; + main-write: + params: + ratio: <> + cl: <> + idempotent: true + ops: + insert-main: | + insert into <>.<> + (machine_id, message, time) values ({machine_id}, {message}, {time}) using timestamp {timestamp}; \ No newline at end of file diff --git a/adapter-cqld4/src/main/resources/activities/baselines/incremental.yaml b/adapter-cqld4/src/main/resources/activities/baselines/incremental.yaml index 9606236b6..5220be278 100644 --- a/adapter-cqld4/src/main/resources/activities/baselines/incremental.yaml +++ b/adapter-cqld4/src/main/resources/activities/baselines/incremental.yaml @@ -29,11 +29,11 @@ scenarios: default: schema: run tags=block:schema.* threads==1 main: run tags=block:main-.*.* cycles===TEMPLATE(main-cycles,0) threads=auto - default-schema: run tags=block:'schema.*' threads==1 - default-main: run tags=block:'main.*' cycles===TEMPLATE(main-cycles,0) threads=auto + default-schema: run tags=block:"schema.*" threads==1 + default-main: run tags=block:"main.*" cycles===TEMPLATE(main-cycles,0) threads=auto astra: schema: run tags=block:astra-schema threads==1 - main: run tags=block:'main.*' cycles===TEMPLATE(main-cycles,0) threads=auto + main: run tags=block:"main.*" cycles===TEMPLATE(main-cycles,0) threads=auto params: instrument: true diff --git a/adapter-cqld4/src/main/resources/activities/baselinesv2/cql-tabular2.yaml b/adapter-cqld4/src/main/resources/activities/baselinesv2/cql-tabular2.yaml index 67d3947f9..150665129 100644 --- a/adapter-cqld4/src/main/resources/activities/baselinesv2/cql-tabular2.yaml +++ b/adapter-cqld4/src/main/resources/activities/baselinesv2/cql-tabular2.yaml @@ -28,12 +28,12 @@ description: | scenarios: default: schema: run driver=cql tags==block:schema threads==1 cycles==UNDEF - rampup: run driver=cql tags==block:rampup cycles===TEMPLATE(rampup-cycles,10B) threads=auto - main: run driver=cql tags==block:"main-*.*" cycles===TEMPLATE(main-cycles,100M) threads=auto + rampup: run driver=cql tags==block:rampup cycles===TEMPLATE(rampup-cycles,100) threads=auto + main: run driver=cql tags==block:"main.*" cycles===TEMPLATE(main-cycles,100) threads=auto astra: schema: run driver=cql tags==block:schema-astra threads==1 cycles==UNDEF - rampup: run driver=cql tags==block:rampup cycles===TEMPLATE(rampup-cycles,10000000) threads=auto - main: run driver=cql tags==block:"main-*.*" cycles===TEMPLATE(main-cycles,10000000) threads=auto + rampup: run driver=cql tags==block:rampup cycles===TEMPLATE(rampup-cycles,100) threads=auto + main: run driver=cql tags==block:"main.*" cycles===TEMPLATE(main-cycles,100) threads=auto params: instrument: true @@ -100,13 +100,13 @@ blocks: rampup-insert: | insert into TEMPLATE(keyspace,baselines).TEMPLATE(table,tabular) (part,clust,data0,data1,data2,data3,data4,data5,data6,data7) - values ({part_layout},{clust_layout},{data0},{data1},{data2},{data3},{data4},{data5},{data6},{data7}) + values ({part_layout},{clust_layout},{data0},{data1},{data2},{data3},{data4},{data5},{data6},{data7}); verify: params: cl: TEMPLATE(read_cl,LOCAL_QUORUM) ops: verify-select: | - select * from TEMPLATE(keyspace,baselines).TEMPLATE(table,tabular) where part={part_layout} and clust={clust_layout} + select * from TEMPLATE(keyspace,baselines).TEMPLATE(table,tabular) where part={part_layout} and clust={clust_layout}; main-read: params: ratio: 1 @@ -136,4 +136,4 @@ blocks: main-write: | insert into TEMPLATE(keyspace,baselines).TEMPLATE(table,tabular) (part, clust, data0,data1,data2,data3,data4,data5,data6,data7) - values ({part_write},{clust_write},{data0},{data1},{data2},{data3},{data4},{data5},{data6},{data7}) + values ({part_write},{clust_write},{data0},{data1},{data2},{data3},{data4},{data5},{data6},{data7}) \ No newline at end of file diff --git a/adapter-cqld4/src/main/resources/activities/baselinesv2/cql-timeseries2.yaml b/adapter-cqld4/src/main/resources/activities/baselinesv2/cql-timeseries2.yaml index 60cac37b0..afec3d713 100644 --- a/adapter-cqld4/src/main/resources/activities/baselinesv2/cql-timeseries2.yaml +++ b/adapter-cqld4/src/main/resources/activities/baselinesv2/cql-timeseries2.yaml @@ -7,11 +7,11 @@ scenarios: default: schema: run driver=cql tags==block:schema threads==1 cycles==UNDEF rampup: run driver=cql tags==block:rampup cycles===TEMPLATE(rampup-cycles,10000000) threads=auto - main: run driver=cql tags==block:"main-*.*" cycles===TEMPLATE(main-cycles,10000000) threads=auto + main: run driver=cql tags==block:"main.*" cycles===TEMPLATE(main-cycles,10000000) threads=auto astra: schema: run driver=cql tags==block:schema-astra threads==1 cycles==UNDEF rampup: run driver=cql tags==block:rampup cycles===TEMPLATE(rampup-cycles,10000000) threads=auto - main: run driver=cql tags==block:"main-*.*" cycles===TEMPLATE(main-cycles,10000000) threads=auto + main: run driver=cql tags==block:"main.*" cycles===TEMPLATE(main-cycles,10000000) threads=auto params: instrument: TEMPLATE(instrument,false) diff --git a/adapter-cqld4/src/main/resources/cqlgen/cqlgen.conf b/adapter-cqld4/src/main/resources/cqlgen/cqlgen.conf index 7c12bcaea..0aab9c35c 100644 --- a/adapter-cqld4/src/main/resources/cqlgen/cqlgen.conf +++ b/adapter-cqld4/src/main/resources/cqlgen/cqlgen.conf @@ -126,7 +126,7 @@ timeouts: blockplan: - # not needed when tags=block:'schema.*' + # not needed when tags=block:"schema.*" # schema: schema-keyspaces, schema-tables, schema-types schema-keyspaces: schema-keyspaces schema-tables: schema-tables diff --git a/adapter-cqld4/src/main/resources/curate_docs/cql_olddocs.md b/adapter-cqld4/src/main/resources/curate_docs/cql_olddocs.md index 5f3413716..4a655b948 100644 --- a/adapter-cqld4/src/main/resources/curate_docs/cql_olddocs.md +++ b/adapter-cqld4/src/main/resources/curate_docs/cql_olddocs.md @@ -160,7 +160,7 @@ activity types. - **ssl** - specifies the type of the SSL implementation. Disabled by default, possible values are `jdk` and `openssl`. - [Additional parameters may need to be provided](ssl.md). + See the ssl help topic for more details with `nb5 help ssl` for more details. - **jmxreporting** - enable JMX reporting if needed. Examples: diff --git a/adapter-cqld4/src/main/resources/data/cql-starter-message.txt b/adapter-cqld4/src/main/resources/data/cql-starter-message.txt new file mode 100644 index 000000000..2d56aa368 --- /dev/null +++ b/adapter-cqld4/src/main/resources/data/cql-starter-message.txt @@ -0,0 +1 @@ +Welcome to cql-starter! \ No newline at end of file diff --git a/adapter-cqld4/src/main/resources/curate_docs/ssl.md b/adapter-cqld4/src/main/resources/ssl.md similarity index 100% rename from adapter-cqld4/src/main/resources/curate_docs/ssl.md rename to adapter-cqld4/src/main/resources/ssl.md diff --git a/adapter-cqld4/src/test/resources/testschemas/cql_alltypes.yaml b/adapter-cqld4/src/test/resources/testschemas/cql_alltypes.yaml index d952f3c03..dd7c96517 100644 --- a/adapter-cqld4/src/test/resources/testschemas/cql_alltypes.yaml +++ b/adapter-cqld4/src/test/resources/testschemas/cql_alltypes.yaml @@ -1,9 +1,9 @@ description: Auto-generated workload from source schema. scenarios: default: - schema: run driver=cql tags=block:'schema.*' threads===UNDEF cycles===UNDEF - rampup: run driver=cql tags=block:'rampup.*' threads=auto cycles===TEMPLATE(rampup-cycles,10000) - main: run driver=cql tags=block:'main.*' threads=auto cycles===TEMPLATE(main-cycles,10000) + schema: run driver=cql tags=block:"schema.*" threads===UNDEF cycles===UNDEF + rampup: run driver=cql tags=block:"rampup.*" threads=auto cycles===TEMPLATE(rampup-cycles,10000) + main: run driver=cql tags=block:"main.*" threads=auto cycles===TEMPLATE(main-cycles,10000) main-insert: run driver=cql tags=block:main-insert threads=auto cycles===TEMPLATE(main-cycles,10000) main-select: run driver=cql tags=block:main-select threads=auto cycles===TEMPLATE(main-cycles,10000) main-scan: run driver=cql tags=block:main-scan threads=auto cycles===TEMPLATE(main-cycles,10000) diff --git a/adapter-dynamodb/src/main/resources/activities/baselinesv2/dynamodb-keyvalue2.yaml b/adapter-dynamodb/src/main/resources/activities/baselinesv2/dynamodb-keyvalue2.yaml index af52ac337..6509963cd 100644 --- a/adapter-dynamodb/src/main/resources/activities/baselinesv2/dynamodb-keyvalue2.yaml +++ b/adapter-dynamodb/src/main/resources/activities/baselinesv2/dynamodb-keyvalue2.yaml @@ -6,9 +6,9 @@ description: | scenarios: default: - schema: run driver=dynamodb tags==block:'schema.*' threads==1 cycles==UNDEF + schema: run driver=dynamodb tags==block:"schema.*" threads==1 cycles==UNDEF rampup: run driver=dynamodb tags==block:rampup cycles===TEMPLATE(rampup-cycles,10000000) threads=auto - main: run driver=dynamodb tags==block:'main-*.*' cycles===TEMPLATE(main-cycles,10000000) threads=auto + main: run driver=dynamodb tags==block:"main.*" cycles===TEMPLATE(main-cycles,10000000) threads=auto read: run driver=dynamodb tags==block:main-read cycles===TEMPLATE(main-cycles,10000000) threads=auto write: run driver=dynamodb tags==block:main-write cycles===TEMPLATE(main-cycles,10000000) threads=auto diff --git a/adapter-dynamodb/src/main/resources/activities/baselinesv2/dynamodb-tabular2.yaml b/adapter-dynamodb/src/main/resources/activities/baselinesv2/dynamodb-tabular2.yaml index ac531efe8..acc0acd51 100644 --- a/adapter-dynamodb/src/main/resources/activities/baselinesv2/dynamodb-tabular2.yaml +++ b/adapter-dynamodb/src/main/resources/activities/baselinesv2/dynamodb-tabular2.yaml @@ -4,10 +4,10 @@ description: | Run a read/write workload against DynamoDB with varying field sizes and query patterns scenarios: - schema: run driver=dynamodb tags=block:'schema.*' region=us-east-1 + schema: run driver=dynamodb tags=block:"schema.*" region=us-east-1 rampup: run driver=dynamodb tags=block:rampup region=us-east-1 read: run driver=dynamodb tags=block:read region=us-east-1 - main: run driver=dynamodb tags=block:'main-*.*' region=us-east-1 + main: run driver=dynamodb tags=block:"main.*" region=us-east-1 read01: run driver=dynamodb tags='name:.*main-read-01' region=us-east-1 delete: table: run driver=dynamodb tags==block:delete threads==1 cycles==UNDEF diff --git a/adapter-dynamodb/src/main/resources/activities/baselinesv2/dynamodb-timeseries2.yaml b/adapter-dynamodb/src/main/resources/activities/baselinesv2/dynamodb-timeseries2.yaml index 34a76afb6..2c53654b8 100644 --- a/adapter-dynamodb/src/main/resources/activities/baselinesv2/dynamodb-timeseries2.yaml +++ b/adapter-dynamodb/src/main/resources/activities/baselinesv2/dynamodb-timeseries2.yaml @@ -11,7 +11,7 @@ description: | scenarios: default: - schema: run driver=dynamodb tags==block:'schema.*' threads==1 cycles==UNDEF + schema: run driver=dynamodb tags==block:"schema.*" threads==1 cycles==UNDEF rampup: run driver=dynamodb tags==block:rampup cycles===TEMPLATE(rampup-cycles,10000000) threads=auto main: run driver=dynamodb tags==block:main cycles===TEMPLATE(main-cycles,10000000) threads=auto delete: diff --git a/adapter-http/src/main/resources/activities/baselines/http-rest-keyvalue.yaml b/adapter-http/src/main/resources/activities/baselines/http-rest-keyvalue.yaml index d186f6e99..80a5d127c 100644 --- a/adapter-http/src/main/resources/activities/baselines/http-rest-keyvalue.yaml +++ b/adapter-http/src/main/resources/activities/baselines/http-rest-keyvalue.yaml @@ -9,8 +9,8 @@ description: | scenarios: default: schema: run driver=http tags==block:schema threads==1 cycles==UNDEF - rampup: run driver=http tags==block:"rampup-*.*" cycles===TEMPLATE(rampup-cycles,10000000) threads=auto - main: run driver=http tags==block:"main-*.*" cycles===TEMPLATE(main-cycles,10000000) threads=auto + rampup: run driver=http tags==block:"rampup.*" cycles===TEMPLATE(rampup-cycles,10000000) threads=auto + main: run driver=http tags==block:"main.*" cycles===TEMPLATE(main-cycles,10000000) threads=auto bindings: # To enable an optional weighted set of hosts in place of a load balancer diff --git a/adapter-http/src/main/resources/activities/baselines/http-rest-tabular.yaml b/adapter-http/src/main/resources/activities/baselines/http-rest-tabular.yaml index 8c3534ff0..2a9f278b2 100644 --- a/adapter-http/src/main/resources/activities/baselines/http-rest-tabular.yaml +++ b/adapter-http/src/main/resources/activities/baselines/http-rest-tabular.yaml @@ -9,9 +9,9 @@ description: | scenarios: default: - schema: run driver=http tags==block:'schema.*' threads==1 cycles==UNDEF - rampup: run driver=http tags==block:'rampup-*.*' cycles===TEMPLATE(rampup-cycles,10000000) threads=auto - main: run driver=http tags==block:'main-*.*' cycles===TEMPLATE(main-cycles,10000000) threads=auto + schema: run driver=http tags==block:"schema.*" threads==1 cycles==UNDEF + rampup: run driver=http tags==block:"rampup.*" cycles===TEMPLATE(rampup-cycles,10000000) threads=auto + main: run driver=http tags==block:"main.*" cycles===TEMPLATE(main-cycles,10000000) threads=auto bindings: # To enable an optional weighted set of hosts in place of a load balancer diff --git a/adapter-http/src/main/resources/activities/baselines/http-rest-timeseries.yaml b/adapter-http/src/main/resources/activities/baselines/http-rest-timeseries.yaml index cf77c6cb4..be6fedc24 100644 --- a/adapter-http/src/main/resources/activities/baselines/http-rest-timeseries.yaml +++ b/adapter-http/src/main/resources/activities/baselines/http-rest-timeseries.yaml @@ -12,10 +12,10 @@ description: | scenarios: default: - schema: run driver=cql tags==block:'schema-*.*' threads==1 cycles==UNDEF + schema: run driver=cql tags==block:"schema.*" threads==1 cycles==UNDEF schema-astra: run driver=cql tags==block:schema-astra threads==1 cycles==UNDEF - rampup: run driver=http tags==block:'rampup-*.*' cycles===TEMPLATE(rampup-cycles,10000000) threads=auto - main: run driver=http tags==block:'main-*.*' cycles===TEMPLATE(main-cycles,10000000) threads=auto + rampup: run driver=http tags==block:"rampup.*" cycles===TEMPLATE(rampup-cycles,10000000) threads=auto + main: run driver=http tags==block:"main.*" cycles===TEMPLATE(main-cycles,10000000) threads=auto bindings: # To enable an optional weighted set of hosts in place of a load balancer diff --git a/adapter-http/src/main/resources/activities/documents-api/http-docsapi-crud-basic.yaml b/adapter-http/src/main/resources/activities/documents-api/http-docsapi-crud-basic.yaml index bf1cf8417..b644da7d2 100644 --- a/adapter-http/src/main/resources/activities/documents-api/http-docsapi-crud-basic.yaml +++ b/adapter-http/src/main/resources/activities/documents-api/http-docsapi-crud-basic.yaml @@ -7,7 +7,7 @@ description: | scenarios: default: - schema: run driver=http tags==block:'schema.*' threads==1 cycles==UNDEF + schema: run driver=http tags==block:"schema.*" threads==1 cycles==UNDEF write: run driver=http tags==block:'write.*' cycles===TEMPLATE(write-cycles,TEMPLATE(docscount,10000000)) threads=auto errors=timer,warn read: run driver=http tags==block:'read.*' cycles===TEMPLATE(read-cycles,TEMPLATE(docscount,10000000)) threads=auto errors=timer,warn update: run driver=http tags==block:'update.*' cycles===TEMPLATE(update-cycles,TEMPLATE(docscount,10000000)) threads=auto errors=timer,warn diff --git a/adapter-http/src/main/resources/activities/documents-api/http-docsapi-crud-dataset.yaml b/adapter-http/src/main/resources/activities/documents-api/http-docsapi-crud-dataset.yaml index e67d61787..5a93c57ef 100644 --- a/adapter-http/src/main/resources/activities/documents-api/http-docsapi-crud-dataset.yaml +++ b/adapter-http/src/main/resources/activities/documents-api/http-docsapi-crud-dataset.yaml @@ -7,7 +7,7 @@ description: | scenarios: default: - schema: run driver=http tags==block:'schema.*' threads==1 cycles==UNDEF + schema: run driver=http tags==block:"schema.*" threads==1 cycles==UNDEF write: run driver=http tags==name:'write.*' cycles===TEMPLATE(write-cycles,TEMPLATE(docscount,10000000)) threads=auto errors=timer,warn read: run driver=http tags==name:'read.*' cycles===TEMPLATE(read-cycles,TEMPLATE(docscount,10000000)) threads=auto errors=timer,warn update: run driver=http tags==name:'update.*' cycles===TEMPLATE(update-cycles,TEMPLATE(docscount,10000000)) threads=auto errors=timer,warn diff --git a/adapter-http/src/main/resources/activities/documents-api/http-docsapi-keyvalue.yaml b/adapter-http/src/main/resources/activities/documents-api/http-docsapi-keyvalue.yaml index 4ed237879..cd1df59e5 100644 --- a/adapter-http/src/main/resources/activities/documents-api/http-docsapi-keyvalue.yaml +++ b/adapter-http/src/main/resources/activities/documents-api/http-docsapi-keyvalue.yaml @@ -10,7 +10,7 @@ description: | scenarios: default: - schema: run driver=http tags==block:'schema.*' threads==1 cycles==UNDEF + schema: run driver=http tags==block:"schema.*" threads==1 cycles==UNDEF rampup: run driver=http tags==block:rampup cycles===TEMPLATE(rampup-cycles,10000000) threads=auto main: run driver=http tags==block:main cycles===TEMPLATE(main-cycles,10000000) threads=auto diff --git a/adapter-http/src/main/resources/activities/documents-api/http-docsapi-search-advanced.yaml b/adapter-http/src/main/resources/activities/documents-api/http-docsapi-search-advanced.yaml index edf6229f3..49cb31a4e 100644 --- a/adapter-http/src/main/resources/activities/documents-api/http-docsapi-search-advanced.yaml +++ b/adapter-http/src/main/resources/activities/documents-api/http-docsapi-search-advanced.yaml @@ -15,7 +15,7 @@ description: | # complex2: (match1 LTE 0 OR match2 EQ "false") AND (match2 EQ "false" OR match3 EQ true) # complex3: (match1 LTE 0 AND match2 EQ "true") OR (match2 EQ "false" AND match3 EQ true) scenarios: - schema: run driver=http tags==block:'schema.*' threads==<> cycles==UNDEF + schema: run driver=http tags==block:"schema.*" threads==<> cycles==UNDEF rampup: write: run driver=http tags==name:'rampup-put.*' cycles===TEMPLATE(docscount,10000000) docpadding=TEMPLATE(docpadding,0) match-ratio=TEMPLATE(match-ratio,0.01) threads=<> errors=timer,warn read: run driver=http tags==block:'rampup-get.*' cycles===TEMPLATE(rampup-cycles, 10000000) page-size=TEMPLATE(page-size,3) fields=TEMPLATE(fields,%5b%5d) threads=<> errors=timer,warn diff --git a/adapter-http/src/main/resources/activities/documents-api/http-docsapi-search-basic.yaml b/adapter-http/src/main/resources/activities/documents-api/http-docsapi-search-basic.yaml index 8105dce04..1a6250677 100644 --- a/adapter-http/src/main/resources/activities/documents-api/http-docsapi-search-basic.yaml +++ b/adapter-http/src/main/resources/activities/documents-api/http-docsapi-search-basic.yaml @@ -7,7 +7,7 @@ description: | Note that docsapi_port should reflect the port where the Docs API is exposed (defaults to 8180). scenarios: - schema: run driver=http tags==block:'schema.*' threads==<> cycles==UNDEF + schema: run driver=http tags==block:"schema.*" threads==<> cycles==UNDEF rampup: write: run driver=http tags==name:'rampup-put.*' cycles===TEMPLATE(docscount,10000000) docpadding=TEMPLATE(docpadding,0) match-ratio=TEMPLATE(match-ratio,0.01) threads=<> errors=timer,warn read: run driver=http tags==name:'rampup-get.*' cycles===TEMPLATE(rampup-cycles, 10000000) page-size=TEMPLATE(page-size,3) fields=TEMPLATE(fields,%5b%5d) threads=<> errors=timer,warn diff --git a/adapter-http/src/main/resources/activities/graphql-cql-first/http-graphql-cql-keyvalue.yaml b/adapter-http/src/main/resources/activities/graphql-cql-first/http-graphql-cql-keyvalue.yaml index 794120747..b4363eaaf 100644 --- a/adapter-http/src/main/resources/activities/graphql-cql-first/http-graphql-cql-keyvalue.yaml +++ b/adapter-http/src/main/resources/activities/graphql-cql-first/http-graphql-cql-keyvalue.yaml @@ -9,7 +9,7 @@ description: | scenarios: default: - schema: run driver=http tags==block:'schema.*' threads==1 cycles==UNDEF + schema: run driver=http tags==block:"schema.*" threads==1 cycles==UNDEF rampup: run driver=http tags==block:rampup cycles===TEMPLATE(rampup-cycles,10000000) threads=auto main: run driver=http tags==block:"main.*" cycles===TEMPLATE(main-cycles,10000000) threads=auto diff --git a/adapter-http/src/main/resources/activities/graphql-cql-first/http-graphql-cql-tabular.yaml b/adapter-http/src/main/resources/activities/graphql-cql-first/http-graphql-cql-tabular.yaml index 777359c92..b44f48f51 100644 --- a/adapter-http/src/main/resources/activities/graphql-cql-first/http-graphql-cql-tabular.yaml +++ b/adapter-http/src/main/resources/activities/graphql-cql-first/http-graphql-cql-tabular.yaml @@ -10,9 +10,9 @@ description: | scenarios: default: - schema: run driver=http tags==block:'schema.*' threads==1 cycles==UNDEF + schema: run driver=http tags==block:"schema.*" threads==1 cycles==UNDEF rampup: run driver=http tags==block:rampup cycles===TEMPLATE(rampup-cycles,10000000) threads=auto - main: run driver=http tags==block:'main.*' cycles===TEMPLATE(main-cycles,10000000) threads=auto + main: run driver=http tags==block:"main.*" cycles===TEMPLATE(main-cycles,10000000) threads=auto bindings: # To enable an optional weighted set of hosts in place of a load balancer diff --git a/adapter-http/src/main/resources/activities/graphql-schema-first/http-graphql-keyvalue.yaml b/adapter-http/src/main/resources/activities/graphql-schema-first/http-graphql-keyvalue.yaml index 19754747c..1901efdfc 100644 --- a/adapter-http/src/main/resources/activities/graphql-schema-first/http-graphql-keyvalue.yaml +++ b/adapter-http/src/main/resources/activities/graphql-schema-first/http-graphql-keyvalue.yaml @@ -13,9 +13,9 @@ description: | scenarios: default: - schema: run driver=http tags==block:'schema.*' threads==1 cycles==UNDEF + schema: run driver=http tags==block:"schema.*" threads==1 cycles==UNDEF rampup: run driver=http tags==block:rampup cycles===TEMPLATE(rampup-cycles,10000000) threads=auto - main: run driver=http tags==block:'main.*' cycles===TEMPLATE(main-cycles,10000000) threads=auto + main: run driver=http tags==block:"main.*" cycles===TEMPLATE(main-cycles,10000000) threads=auto bindings: # To enable an optional weighted set of hosts in place of a load balancer diff --git a/adapter-http/src/main/resources/activities/graphql-schema-first/http-graphql-tabular.yaml b/adapter-http/src/main/resources/activities/graphql-schema-first/http-graphql-tabular.yaml index 8f5004e26..e88b804d2 100644 --- a/adapter-http/src/main/resources/activities/graphql-schema-first/http-graphql-tabular.yaml +++ b/adapter-http/src/main/resources/activities/graphql-schema-first/http-graphql-tabular.yaml @@ -15,9 +15,9 @@ description: | scenarios: default: - schema: run driver=http tags==block:'schema.*' threads==1 cycles==UNDEF + schema: run driver=http tags==block:"schema.*" threads==1 cycles==UNDEF rampup: run driver=http tags==block:rampup cycles===TEMPLATE(rampup-cycles,10000000) threads=auto - man: run driver=http tags==block:'main.*' cycles===TEMPLATE(main-cycles,10000000) threads=auto + man: run driver=http tags==block:"main.*" cycles===TEMPLATE(main-cycles,10000000) threads=auto bindings: # To enable an optional weighted set of hosts in place of a load balancer diff --git a/adapter-http/src/main/resources/activities/graphql-schema-first/http-graphql-timeseries.yaml b/adapter-http/src/main/resources/activities/graphql-schema-first/http-graphql-timeseries.yaml index 7a6c28a9c..7bd663e9d 100644 --- a/adapter-http/src/main/resources/activities/graphql-schema-first/http-graphql-timeseries.yaml +++ b/adapter-http/src/main/resources/activities/graphql-schema-first/http-graphql-timeseries.yaml @@ -19,9 +19,9 @@ description: | scenarios: default: - schema: run driver=http tags==block:'schema.*' threads==1 cycles==UNDEF + schema: run driver=http tags==block:"schema.*" threads==1 cycles==UNDEF rampup: run driver=http tags==block:rampup cycles===TEMPLATE(rampup-cycles,10000000) threads=auto - main: run driver=http tags==block:'main.*' cycles===TEMPLATE(main-cycles,10000000) threads=auto + main: run driver=http tags==block:"main.*" cycles===TEMPLATE(main-cycles,10000000) threads=auto bindings: # To enable an optional weighted set of hosts in place of a load balancer diff --git a/adapter-mongodb/src/main/resources/activities/baselinesv2/mongodb-keyvalue2.yaml b/adapter-mongodb/src/main/resources/activities/baselinesv2/mongodb-keyvalue2.yaml index bae4e0254..7c65abd87 100644 --- a/adapter-mongodb/src/main/resources/activities/baselinesv2/mongodb-keyvalue2.yaml +++ b/adapter-mongodb/src/main/resources/activities/baselinesv2/mongodb-keyvalue2.yaml @@ -9,7 +9,7 @@ description: | scenarios: default: - schema: run driver=mongodb tags==block:'schema.*' threads==1 cycles==UNDEF + schema: run driver=mongodb tags==block:"schema.*" threads==1 cycles==UNDEF rampup: run driver=mongodb tags==block:rampup cycles===TEMPLATE(rampup-cycles,10000000) threads=auto main: run driver=mongodb tags==block:'main-.*' cycles===TEMPLATE(main-cycles,10000000) threads=auto drop: run driver=mongodb tags==block:drop-collection threads==1 cycles==UNDEF diff --git a/adapter-mongodb/src/main/resources/activities/baselinesv2/mongodb-tabular2.yaml b/adapter-mongodb/src/main/resources/activities/baselinesv2/mongodb-tabular2.yaml index 7873cb49e..8539d3157 100644 --- a/adapter-mongodb/src/main/resources/activities/baselinesv2/mongodb-tabular2.yaml +++ b/adapter-mongodb/src/main/resources/activities/baselinesv2/mongodb-tabular2.yaml @@ -9,7 +9,7 @@ description: | scenarios: default: - schema: run driver=mongodb tags==block:'schema.*' threads==1 cycles==UNDEF + schema: run driver=mongodb tags==block:"schema.*" threads==1 cycles==UNDEF rampup: run driver=mongodb tags==block:rampup cycles===TEMPLATE(rampup-cycles,10000000) threads=auto main: run driver=mongodb tags==block:'main-.*' cycles===TEMPLATE(main-cycles,10000000) threads=auto drop: run driver=mongodb tags==block:drop-collection threads==1 cycles==UNDEF diff --git a/adapter-mongodb/src/main/resources/activities/baselinesv2/mongodb-timeseries2.yaml b/adapter-mongodb/src/main/resources/activities/baselinesv2/mongodb-timeseries2.yaml index f7d519786..e8b248899 100644 --- a/adapter-mongodb/src/main/resources/activities/baselinesv2/mongodb-timeseries2.yaml +++ b/adapter-mongodb/src/main/resources/activities/baselinesv2/mongodb-timeseries2.yaml @@ -13,7 +13,7 @@ description: | scenarios: default: - schema: run driver=mongodb tags==block:'schema.*' threads==1 cycles==UNDEF + schema: run driver=mongodb tags==block:"schema.*" threads==1 cycles==UNDEF rampup: run driver=mongodb tags==block:rampup cycles===TEMPLATE(rampup-cycles,10000000) threads=auto main: run driver=mongodb tags==block:'main-.*' cycles===TEMPLATE(main-cycles,10000000) threads=auto drop: run driver=mongodb tags==block:drop-collection threads==1 cycles==UNDEF diff --git a/adapter-mongodb/src/main/resources/activities/mongodb-basic.yaml b/adapter-mongodb/src/main/resources/activities/mongodb-basic.yaml index 7c3deb332..a502a3cf0 100644 --- a/adapter-mongodb/src/main/resources/activities/mongodb-basic.yaml +++ b/adapter-mongodb/src/main/resources/activities/mongodb-basic.yaml @@ -3,7 +3,7 @@ description: An example of a basic mongo insert and find. scenarios: default: rampup: run driver=mongodb tags==block:rampup cycles===TEMPLATE(rampup-cycles,1000000) threads=auto - main: run driver=mongodb tags==block:'main-*.*' cycles===TEMPLATE(main-cycles,10000000) threads=auto + main: run driver=mongodb tags==block:"main.*" cycles===TEMPLATE(main-cycles,10000000) threads=auto bindings: seq_key: Mod(<>L); ToInt() diff --git a/adapter-mongodb/src/main/resources/activities/mongodb-crud-basic.yaml b/adapter-mongodb/src/main/resources/activities/mongodb-crud-basic.yaml index 14bfc0c39..36c4d7a8d 100644 --- a/adapter-mongodb/src/main/resources/activities/mongodb-crud-basic.yaml +++ b/adapter-mongodb/src/main/resources/activities/mongodb-crud-basic.yaml @@ -6,7 +6,7 @@ description: | scenarios: default: - schema: run driver=mongodb tags==block:'schema.*' threads==1 cycles==UNDEF + schema: run driver=mongodb tags==block:"schema.*" threads==1 cycles==UNDEF write: run driver=mongodb tags==block:main-write,type:write cycles===TEMPLATE(write-cycles,TEMPLATE(docscount,10000000)) threads=auto errors=timer,warn read: run driver=mongodb tags==block:main-read,type:read cycles===TEMPLATE(read-cycles,TEMPLATE(docscount,10000000)) threads=auto errors=timer,warn update: run driver=mongodb tags==block:main-update,type:update cycles===TEMPLATE(update-cycles,TEMPLATE(docscount,10000000)) threads=auto errors=timer,warn diff --git a/adapter-mongodb/src/main/resources/activities/mongodb-crud-dataset.yaml b/adapter-mongodb/src/main/resources/activities/mongodb-crud-dataset.yaml index 9b22f455a..fa2c695e8 100644 --- a/adapter-mongodb/src/main/resources/activities/mongodb-crud-dataset.yaml +++ b/adapter-mongodb/src/main/resources/activities/mongodb-crud-dataset.yaml @@ -6,7 +6,7 @@ description: | scenarios: default: - schema: run driver=mongodb tags==block:'schema.*' threads==1 cycles==UNDEF + schema: run driver=mongodb tags==block:"schema.*" threads==1 cycles==UNDEF write: run driver=mongodb tags==block:main-write,type:write cycles===TEMPLATE(write-cycles,TEMPLATE(docscount,10000000)) threads=auto errors=timer,warn read: run driver=mongodb tags==block:main-read,type:read cycles===TEMPLATE(read-cycles,TEMPLATE(docscount,10000000)) threads=auto errors=timer,warn update: run driver=mongodb tags==block:main-update,type:update cycles===TEMPLATE(update-cycles,TEMPLATE(docscount,10000000)) threads=auto errors=timer,warn diff --git a/adapter-mongodb/src/main/resources/activities/mongodb-search-basic.yaml b/adapter-mongodb/src/main/resources/activities/mongodb-search-basic.yaml index 33d9c42be..e3a003199 100644 --- a/adapter-mongodb/src/main/resources/activities/mongodb-search-basic.yaml +++ b/adapter-mongodb/src/main/resources/activities/mongodb-search-basic.yaml @@ -6,7 +6,7 @@ description: | It's a counterpart of the Stargate's Documents API Basic Search workflow. scenarios: - schema: run driver=mongodb tags==block:'schema.*' threads==1 cycles==UNDEF + schema: run driver=mongodb tags==block:"schema.*" threads==1 cycles==UNDEF rampup-write: run driver=mongodb tags==block:rampup-write cycles===TEMPLATE(docscount,10000000) docpadding=TEMPLATE(docpadding,0) match-ratio=TEMPLATE(match-ratio,0.01) threads=auto errors=timer,warn rampup-read: run driver=mongodb tags==block:rampup-read cycles===TEMPLATE(rampup-cycles, 10000000) field-projection=TEMPLATE(fields,null) threads=<> errors=timer,warn main: run driver=mongodb tags==block:main cycles===TEMPLATE(read-cycles,TEMPLATE(docscount,10000000)) field-projection=TEMPLATE(fields,null) threads=<> errors=timer,warn diff --git a/adapter-pulsar/src/main/java/io/nosqlbench/adapter/pulsar/PulsarSpace.java b/adapter-pulsar/src/main/java/io/nosqlbench/adapter/pulsar/PulsarSpace.java index 4aba161f1..9cc5e44c6 100644 --- a/adapter-pulsar/src/main/java/io/nosqlbench/adapter/pulsar/PulsarSpace.java +++ b/adapter-pulsar/src/main/java/io/nosqlbench/adapter/pulsar/PulsarSpace.java @@ -32,8 +32,10 @@ import org.apache.pulsar.client.admin.PulsarAdminBuilder; import org.apache.pulsar.client.api.*; import org.apache.pulsar.common.schema.KeyValueEncodingType; +import java.util.List; import java.util.Map; import java.util.concurrent.ConcurrentHashMap; +import java.util.function.Supplier; public class PulsarSpace implements AutoCloseable { @@ -50,9 +52,18 @@ public class PulsarSpace implements AutoCloseable { private PulsarAdmin pulsarAdmin; private Schema pulsarSchema; - private final ConcurrentHashMap> producers = new ConcurrentHashMap<>(); - private final ConcurrentHashMap> consumers = new ConcurrentHashMap<>(); - private final ConcurrentHashMap> readers = new ConcurrentHashMap<>(); + public record ProducerCacheKey(String producerName, String topicName) { + } + + private final ConcurrentHashMap> producers = new ConcurrentHashMap<>(); + + public record ConsumerCacheKey(String consumerName, String subscriptionName, List topicNameList, String topicPattern) { + } + private final ConcurrentHashMap> consumers = new ConcurrentHashMap<>(); + + public record ReaderCacheKey(String readerName, String topicName, String startMsgPosStr) { + } + private final ConcurrentHashMap> readers = new ConcurrentHashMap<>(); public PulsarSpace(String spaceName, NBConfiguration cfg) { @@ -89,13 +100,11 @@ public class PulsarSpace implements AutoCloseable { public int getProducerSetCnt() { return producers.size(); } public int getConsumerSetCnt() { return consumers.size(); } public int getReaderSetCnt() { return readers.size(); } - public Producer getProducer(String name) { return producers.get(name); } - public void setProducer(String name, Producer producer) { producers.put(name, producer); } - public Consumer getConsumer(String name) { return consumers.get(name); } - public void setConsumer(String name, Consumer consumer) { consumers.put(name, consumer); } + public Producer getProducer(ProducerCacheKey key, Supplier> producerSupplier) { return producers.computeIfAbsent(key, __ -> producerSupplier.get()); } - public Reader getReader(String name) { return readers.get(name); } - public void setReader(String name, Reader reader) { readers.put(name, reader); } + public Consumer getConsumer(ConsumerCacheKey key, Supplier> consumerSupplier) { return consumers.computeIfAbsent(key, __ -> consumerSupplier.get()); } + + public Reader getReader(ReaderCacheKey key, Supplier> readerSupplier) { return readers.computeIfAbsent(key, __ -> readerSupplier.get()); } /** diff --git a/adapter-pulsar/src/main/java/io/nosqlbench/adapter/pulsar/dispensers/PulsarBaseOpDispenser.java b/adapter-pulsar/src/main/java/io/nosqlbench/adapter/pulsar/dispensers/PulsarBaseOpDispenser.java index 1de99a097..2e1b40230 100644 --- a/adapter-pulsar/src/main/java/io/nosqlbench/adapter/pulsar/dispensers/PulsarBaseOpDispenser.java +++ b/adapter-pulsar/src/main/java/io/nosqlbench/adapter/pulsar/dispensers/PulsarBaseOpDispenser.java @@ -37,7 +37,6 @@ import java.util.*; import java.util.function.LongFunction; import java.util.function.Predicate; import java.util.regex.Pattern; -import java.util.regex.PatternSyntaxException; import java.util.stream.Collectors; public abstract class PulsarBaseOpDispenser extends BaseOpDispenser implements NBNamedElement { @@ -239,10 +238,8 @@ public abstract class PulsarBaseOpDispenser extends BaseOpDispenser producer = pulsarSpace.getProducer(producerCacheKey); - - if (producer == null) { + PulsarSpace.ProducerCacheKey producerCacheKey = new PulsarSpace.ProducerCacheKey(producerName, topicName); + return pulsarSpace.getProducer(producerCacheKey, () -> { PulsarClient pulsarClient = pulsarSpace.getPulsarClient(); // Get other possible producer settings that are set at global level @@ -262,21 +259,17 @@ public abstract class PulsarBaseOpDispenser extends BaseOpDispenser producer = producerBuilder.create(); pulsarAdapterMetrics.registerProducerApiMetrics(producer, getPulsarAPIMetricsPrefix( PulsarAdapterUtil.PULSAR_API_TYPE.PRODUCER.label, producerName, topicName)); - } - catch (PulsarClientException ple) { + return producer; + } catch (PulsarClientException ple) { throw new PulsarAdapterUnexpectedException("Failed to create a Pulsar producer."); } - } - - return producer; + }); } private List getEffectiveConsumerTopicNameList(String cycleTopicNameListStr) { @@ -296,24 +289,6 @@ public abstract class PulsarBaseOpDispenser extends BaseOpDispenser topicNameList = getEffectiveConsumerTopicNameList(cycleTopicNameListStr); - String topicPatternStr = getEffectiveConValue( + String topicPatternStr = StringUtils.trimToNull(getEffectiveConValue( PulsarAdapterUtil.CONF_GATEGORY.Consumer.label, PulsarAdapterUtil.CONSUMER_CONF_STD_KEY.topicsPattern.label, - cycleTopicPatternStr); - Pattern topicPattern = getEffectiveConsumerTopicPattern(cycleTopicPatternStr); + cycleTopicPatternStr)); String subscriptionName = getEffectiveConValue( PulsarAdapterUtil.CONF_GATEGORY.Consumer.label, @@ -368,28 +342,14 @@ public abstract class PulsarBaseOpDispenser extends BaseOpDispenser 1 || (topicPattern != null)); - - String consumerTopicListString; - if (!topicNameList.isEmpty()) { - consumerTopicListString = String.join("|", topicNameList); - } else { - consumerTopicListString = topicPatternStr; - } - - String consumerCacheKey = PulsarAdapterUtil.buildCacheKey( - consumerName, - subscriptionName, - consumerTopicListString); - Consumer consumer = pulsarSpace.getConsumer(consumerCacheKey); - - if (consumer == null) { + return pulsarSpace.getConsumer( + new PulsarSpace.ConsumerCacheKey(consumerName, subscriptionName, topicNameList, topicPatternStr), () -> { PulsarClient pulsarClient = pulsarSpace.getPulsarClient(); // Get other possible consumer settings that are set at global level @@ -417,6 +377,7 @@ public abstract class PulsarBaseOpDispenser extends BaseOpDispenser 1 || (topicPatternStr != null)); if (!multiTopicConsumer) { assert (topicNameList.size() == 1); consumerBuilder = pulsarClient.newConsumer(pulsarSpace.getPulsarSchema()); @@ -429,6 +390,7 @@ public abstract class PulsarBaseOpDispenser extends BaseOpDispenser consumer = consumerBuilder.subscribe(); + String consumerTopicListString = (!topicNameList.isEmpty()) ? String.join("|", topicNameList) : topicPatternStr; pulsarAdapterMetrics.registerConsumerApiMetrics( consumer, getPulsarAPIMetricsPrefix( PulsarAdapterUtil.PULSAR_API_TYPE.CONSUMER.label, consumerName, consumerTopicListString)); + + return consumer; } catch (PulsarClientException ple) { throw new PulsarAdapterUnexpectedException("Failed to create a Pulsar consumer!"); } - } - - return consumer; + }); } private static Range[] parseRanges(String ranges) { @@ -528,10 +490,7 @@ public abstract class PulsarBaseOpDispenser extends BaseOpDispenser reader = pulsarSpace.getReader(readerCacheKey); - - if (reader == null) { + return pulsarSpace.getReader(new PulsarSpace.ReaderCacheKey(readerName, topicName, startMsgPosStr), () -> { PulsarClient pulsarClient = pulsarSpace.getPulsarClient();; Map readerConf = pulsarSpace.getPulsarNBClientConf().getReaderConfMapTgt(); @@ -558,17 +517,12 @@ public abstract class PulsarBaseOpDispenser extends BaseOpDispenser t.label.equals(param)); - } /////// // Message processing sequence error simulation types @@ -77,29 +81,21 @@ public class PulsarAdapterUtil { this.label = label; } - private static final Map MAPPING = new HashMap<>(); - - static { - for (MSG_SEQ_ERROR_SIMU_TYPE simuType : values()) { - MAPPING.put(simuType.label, simuType); - MAPPING.put(simuType.label.toLowerCase(), simuType); - MAPPING.put(simuType.label.toUpperCase(), simuType); - MAPPING.put(simuType.name(), simuType); - MAPPING.put(simuType.name().toLowerCase(), simuType); - MAPPING.put(simuType.name().toUpperCase(), simuType); - } - } + private static final Map MAPPING = Stream.of(values()) + .flatMap(simuType -> + Stream.of(simuType.label, + simuType.label.toLowerCase(), + simuType.label.toUpperCase(), + simuType.name(), + simuType.name().toLowerCase(), + simuType.name().toUpperCase()) + .distinct().map(key -> Map.entry(key, simuType))) + .collect(Collectors.toUnmodifiableMap(Map.Entry::getKey, Map.Entry::getValue)); public static Optional parseSimuType(String simuTypeString) { return Optional.ofNullable(MAPPING.get(simuTypeString.trim())); } } - public static boolean isValidSeqErrSimuType(String item) { - return Arrays.stream(MSG_SEQ_ERROR_SIMU_TYPE.values()).anyMatch(t -> t.label.equals(item)); - } - public static String getValidSeqErrSimuTypeList() { - return Arrays.stream(MSG_SEQ_ERROR_SIMU_TYPE.values()).map(t -> t.label).collect(Collectors.joining(", ")); - } /////// // Valid Pulsar API type @@ -113,12 +109,15 @@ public class PulsarAdapterUtil { PULSAR_API_TYPE(String label) { this.label = label; } + + private static final Set LABELS = Stream.of(values()).map(v -> v.label).collect(Collectors.toUnmodifiableSet()); + + public static boolean isValidLabel(String label) { + return LABELS.contains(label); + } } public static boolean isValidPulsarApiType(String param) { - return Arrays.stream(PULSAR_API_TYPE.values()).anyMatch(t -> t.label.equals(param)); - } - public static String getValidPulsarApiTypeList() { - return Arrays.stream(PULSAR_API_TYPE.values()).map(t -> t.label).collect(Collectors.joining(", ")); + return PULSAR_API_TYPE.isValidLabel(param); } @@ -136,14 +135,16 @@ public class PulsarAdapterUtil { CONF_GATEGORY(String label) { this.label = label; } + + private static final Set LABELS = Stream.of(values()).map(v -> v.label).collect(Collectors.toUnmodifiableSet()); + + public static boolean isValidLabel(String label) { + return LABELS.contains(label); + } } public static boolean isValidConfCategory(String item) { - return Arrays.stream(CONF_GATEGORY.values()).anyMatch(t -> t.label.equals(item)); + return CONF_GATEGORY.isValidLabel(item); } - public static String getValidConfCategoryList() { - return Arrays.stream(CONF_GATEGORY.values()).map(t -> t.label).collect(Collectors.joining(", ")); - } - /////// // Valid persistence type public enum PERSISTENT_TYPES { @@ -156,9 +157,6 @@ public class PulsarAdapterUtil { this.label = label; } } - public static boolean isValidPersistenceType(String type) { - return Arrays.stream(PERSISTENT_TYPES.values()).anyMatch(t -> t.label.equals(type)); - } /////// // Valid Pulsar client configuration (activity-level settings) @@ -194,9 +192,6 @@ public class PulsarAdapterUtil { this.label = label; } } - public static boolean isValidClientConfItem(String item) { - return Arrays.stream(CLNT_CONF_KEY.values()).anyMatch(t -> t.label.equals(item)); - } /////// // Standard producer configuration (activity-level settings) @@ -222,9 +217,6 @@ public class PulsarAdapterUtil { this.label = label; } } - public static boolean isStandardProducerConfItem(String item) { - return Arrays.stream(PRODUCER_CONF_STD_KEY.values()).anyMatch(t -> t.label.equals(item)); - } // compressionType public enum COMPRESSION_TYPE { @@ -239,12 +231,12 @@ public class PulsarAdapterUtil { COMPRESSION_TYPE(String label) { this.label = label; } + + private final static String TYPE_LIST = Stream.of(COMPRESSION_TYPE.values()).map(t -> t.label).collect(Collectors.joining(", ")); } - public static boolean isValidCompressionType(String item) { - return Arrays.stream(COMPRESSION_TYPE.values()).anyMatch(t -> t.label.equals(item)); - } + public static String getValidCompressionTypeList() { - return Arrays.stream(COMPRESSION_TYPE.values()).map(t -> t.label).collect(Collectors.joining(", ")); + return COMPRESSION_TYPE.TYPE_LIST; } /////// @@ -284,9 +276,6 @@ public class PulsarAdapterUtil { this.label = label; } } - public static boolean isStandardConsumerConfItem(String item) { - return Arrays.stream(CONSUMER_CONF_STD_KEY.values()).anyMatch(t -> t.label.equals(item)); - } /////// // Custom consumer configuration (activity-level settings) @@ -301,9 +290,16 @@ public class PulsarAdapterUtil { CONSUMER_CONF_CUSTOM_KEY(String label) { this.label = label; } + + private static final Set LABELS = Stream.of(values()).map(v -> v.label).collect(Collectors.toUnmodifiableSet()); + + public static boolean isValidLabel(String label) { + return LABELS.contains(label); + } + } public static boolean isCustomConsumerConfItem(String item) { - return Arrays.stream(CONSUMER_CONF_CUSTOM_KEY.values()).anyMatch(t -> t.label.equals(item)); + return CONSUMER_CONF_CUSTOM_KEY.isValidLabel(item); } // subscriptionTyp @@ -318,12 +314,21 @@ public class PulsarAdapterUtil { SUBSCRIPTION_TYPE(String label) { this.label = label; } + + private static final Set LABELS = Stream.of(values()).map(v -> v.label) + .collect(Collectors.toUnmodifiableSet()); + + public static boolean isValidLabel(String label) { + return LABELS.contains(label); + } + + private final static String TYPE_LIST = Stream.of(COMPRESSION_TYPE.values()).map(t -> t.label).collect(Collectors.joining(", ")); } public static boolean isValidSubscriptionType(String item) { - return Arrays.stream(SUBSCRIPTION_TYPE.values()).anyMatch(t -> t.label.equals(item)); + return SUBSCRIPTION_TYPE.isValidLabel(item); } public static String getValidSubscriptionTypeList() { - return Arrays.stream(SUBSCRIPTION_TYPE.values()).map(t -> t.label).collect(Collectors.joining(", ")); + return SUBSCRIPTION_TYPE.TYPE_LIST; } // subscriptionInitialPosition @@ -336,12 +341,12 @@ public class PulsarAdapterUtil { SUBSCRIPTION_INITIAL_POSITION(String label) { this.label = label; } - } - public static boolean isValidSubscriptionInitialPosition(String item) { - return Arrays.stream(SUBSCRIPTION_INITIAL_POSITION.values()).anyMatch(t -> t.label.equals(item)); + + private final static String TYPE_LIST = Stream.of(COMPRESSION_TYPE.values()).map(t -> t.label).collect(Collectors.joining(", ")); + } public static String getValidSubscriptionInitialPositionList() { - return Arrays.stream(SUBSCRIPTION_INITIAL_POSITION.values()).map(t -> t.label).collect(Collectors.joining(", ")); + return SUBSCRIPTION_INITIAL_POSITION.TYPE_LIST; } // regexSubscriptionMode @@ -355,12 +360,12 @@ public class PulsarAdapterUtil { REGEX_SUBSCRIPTION_MODE(String label) { this.label = label; } + + private final static String TYPE_LIST = Stream.of(COMPRESSION_TYPE.values()).map(t -> t.label).collect(Collectors.joining(", ")); } - public static boolean isValidRegexSubscriptionMode(String item) { - return Arrays.stream(REGEX_SUBSCRIPTION_MODE.values()).anyMatch(t -> t.label.equals(item)); - } + public static String getValidRegexSubscriptionModeList() { - return Arrays.stream(REGEX_SUBSCRIPTION_MODE.values()).map(t -> t.label).collect(Collectors.joining(", ")); + return REGEX_SUBSCRIPTION_MODE.TYPE_LIST; } /////// @@ -383,9 +388,6 @@ public class PulsarAdapterUtil { this.label = label; } } - public static boolean isStandardReaderConfItem(String item) { - return Arrays.stream(READER_CONF_STD_KEY.values()).anyMatch(t -> t.label.equals(item)); - } /////// // Custom reader configuration (activity-level settings) @@ -400,9 +402,6 @@ public class PulsarAdapterUtil { this.label = label; } } - public static boolean isCustomReaderConfItem(String item) { - return Arrays.stream(READER_CONF_CUSTOM_KEY.values()).anyMatch(t -> t.label.equals(item)); - } /////// // Valid read positions for a Pulsar reader @@ -415,156 +414,84 @@ public class PulsarAdapterUtil { READER_MSG_POSITION_TYPE(String label) { this.label = label; } + + private static final Set LABELS = Stream.of(values()).map(v -> v.label) + .collect(Collectors.toUnmodifiableSet()); + + public static boolean isValidLabel(String label) { + return LABELS.contains(label); + } } public static boolean isValideReaderStartPosition(String item) { - return Arrays.stream(READER_MSG_POSITION_TYPE.values()).anyMatch(t -> t.label.equals(item)); + return READER_MSG_POSITION_TYPE.isValidLabel(item); } + private static final Map> PRIMITIVE_SCHEMA_TYPE_MAPPING = Stream.of(SchemaType.values()) + .filter(SchemaType::isPrimitive) + .collect(Collectors.toUnmodifiableMap(schemaType -> schemaType.name().toUpperCase(), + schemaType -> Schema.getSchema(SchemaInfo.builder().type(schemaType).build()))); + /////// // Primitive Schema type public static boolean isPrimitiveSchemaTypeStr(String typeStr) { - boolean isPrimitive = false; - - // Use "BYTES" as the default type if the type string is not explicitly specified - if (StringUtils.isBlank(typeStr)) { - typeStr = "BYTES"; - } - - if (typeStr.equalsIgnoreCase("BOOLEAN") || typeStr.equalsIgnoreCase("INT8") || - typeStr.equalsIgnoreCase("INT16") || typeStr.equalsIgnoreCase("INT32") || - typeStr.equalsIgnoreCase("INT64") || typeStr.equalsIgnoreCase("FLOAT") || - typeStr.equalsIgnoreCase("DOUBLE") || typeStr.equalsIgnoreCase("BYTES") || - typeStr.equalsIgnoreCase("DATE") || typeStr.equalsIgnoreCase("TIME") || - typeStr.equalsIgnoreCase("TIMESTAMP") || typeStr.equalsIgnoreCase("INSTANT") || - typeStr.equalsIgnoreCase("LOCAL_DATE") || typeStr.equalsIgnoreCase("LOCAL_TIME") || - typeStr.equalsIgnoreCase("LOCAL_DATE_TIME")) { - isPrimitive = true; - } - - return isPrimitive; + return StringUtils.isBlank(typeStr) || PRIMITIVE_SCHEMA_TYPE_MAPPING.containsKey(typeStr.toUpperCase()); } + public static Schema getPrimitiveTypeSchema(String typeStr) { - Schema schema; - - if (StringUtils.isBlank(typeStr)) { - typeStr = "BYTES"; + String lookupKey = StringUtils.isBlank(typeStr) ? "BYTES" : typeStr.toUpperCase(); + Schema schema = PRIMITIVE_SCHEMA_TYPE_MAPPING.get(lookupKey); + if (schema == null) { + throw new PulsarAdapterInvalidParamException("Invalid Pulsar primitive schema type string : " + typeStr); } - - switch (typeStr.toUpperCase()) { - case "BOOLEAN": - schema = Schema.BOOL; - break; - case "INT8": - schema = Schema.INT8; - break; - case "INT16": - schema = Schema.INT16; - break; - case "INT32": - schema = Schema.INT32; - break; - case "INT64": - schema = Schema.INT64; - break; - case "FLOAT": - schema = Schema.FLOAT; - break; - case "DOUBLE": - schema = Schema.DOUBLE; - break; - case "DATE": - schema = Schema.DATE; - break; - case "TIME": - schema = Schema.TIME; - break; - case "TIMESTAMP": - schema = Schema.TIMESTAMP; - break; - case "INSTANT": - schema = Schema.INSTANT; - break; - case "LOCAL_DATE": - schema = Schema.LOCAL_DATE; - break; - case "LOCAL_TIME": - schema = Schema.LOCAL_TIME; - break; - case "LOCAL_DATE_TIME": - schema = Schema.LOCAL_DATE_TIME; - break; - case "BYTES": - schema = Schema.BYTES; - break; - // Report an error if non-valid, non-empty schema type string is provided - default: - throw new PulsarAdapterInvalidParamException("Invalid Pulsar primitive schema type string : " + typeStr); - } - return schema; } /////// // Complex strut type: Avro or Json public static boolean isAvroSchemaTypeStr(String typeStr) { - return (StringUtils.isNotBlank(typeStr) && typeStr.equalsIgnoreCase("AVRO")); + return "AVRO".equalsIgnoreCase(typeStr); } // automatic decode the type from the Registry public static boolean isAutoConsumeSchemaTypeStr(String typeStr) { - return (StringUtils.isNotBlank(typeStr) && typeStr.equalsIgnoreCase("AUTO_CONSUME")); + return "AUTO_CONSUME".equalsIgnoreCase(typeStr); } - public static Schema getAvroSchema(String typeStr, String definitionStr) { - String schemaDefinitionStr = definitionStr; - String filePrefix = "file://"; - Schema schema; + private static final Map> AVRO_SCHEMA_CACHE = new ConcurrentHashMap<>(); + + public static Schema getAvroSchema(String typeStr, final String definitionStr) { // Check if payloadStr points to a file (e.g. "file:///path/to/a/file") if (isAvroSchemaTypeStr(typeStr)) { - if (StringUtils.isBlank(schemaDefinitionStr)) { - throw new PulsarAdapterInvalidParamException( - "Schema definition must be provided for \"Avro\" schema type!"); + if (StringUtils.isBlank(definitionStr)) { + throw new PulsarAdapterInvalidParamException("Schema definition must be provided for \"Avro\" schema type!"); } - else if (schemaDefinitionStr.startsWith(filePrefix)) { - try { - Path filePath = Paths.get(URI.create(schemaDefinitionStr)); - schemaDefinitionStr = Files.readString(filePath, StandardCharsets.US_ASCII); + return AVRO_SCHEMA_CACHE.computeIfAbsent(definitionStr, __ -> { + String schemaDefinitionStr = definitionStr; + if (schemaDefinitionStr.startsWith("file://")) { + try { + Path filePath = Paths.get(URI.create(schemaDefinitionStr)); + schemaDefinitionStr = Files.readString(filePath, StandardCharsets.UTF_8); + } catch (IOException ioe) { + throw new PulsarAdapterUnexpectedException("Error reading the specified \"Avro\" schema definition file: " + definitionStr + ": " + ioe.getMessage()); + } } - catch (IOException ioe) { - throw new PulsarAdapterUnexpectedException( - "Error reading the specified \"Avro\" schema definition file: " + definitionStr + ": " + ioe.getMessage()); - } - } - - schema = PulsarAvroSchemaUtil.GetSchema_PulsarAvro("NBAvro", schemaDefinitionStr); + return PulsarAvroSchemaUtil.GetSchema_PulsarAvro("NBAvro", schemaDefinitionStr); + }); + } else { + throw new PulsarAdapterInvalidParamException("Trying to create a \"Avro\" schema for a non-Avro schema type string: " + typeStr); } - else { - throw new PulsarAdapterInvalidParamException( - "Trying to create a \"Avro\" schema for a non-Avro schema type string: " + typeStr); - } - - return schema; - } - - /////// - // Generate effective key string - public static String buildCacheKey(String... keyParts) { - // Ignore blank keyPart - String joinedKeyStr = - Stream.of(keyParts) - .filter(s -> !StringUtils.isBlank(s)) - .collect(Collectors.joining(",")); - - return Base64.getEncoder().encodeToString(joinedKeyStr.getBytes()); } /////// // Convert JSON string to a key/value map - public static Map convertJsonToMap(String jsonStr) throws Exception { - ObjectMapper mapper = new ObjectMapper(); - return mapper.readValue(jsonStr, Map.class); + private static final ObjectMapper JACKSON_OBJECT_MAPPER = new ObjectMapper(); + private static final TypeReference> MAP_TYPE_REF = new TypeReference<>() {}; + + public static Map convertJsonToMap(String jsonStr) throws IOException { + return JACKSON_OBJECT_MAPPER.readValue(jsonStr, MAP_TYPE_REF); } + /////// // Get full namespace name (/) from a Pulsar topic URI public static String getFullNamespaceName(String topicUri) { diff --git a/adapter-pulsar/src/test/java/io/nosqlbench/adapter/pulsar/util/MessageSequenceNumberSendingHandlerTest.java b/adapter-pulsar/src/test/java/io/nosqlbench/adapter/pulsar/util/MessageSequenceNumberSendingHandlerTest.java new file mode 100644 index 000000000..9b16a7892 --- /dev/null +++ b/adapter-pulsar/src/test/java/io/nosqlbench/adapter/pulsar/util/MessageSequenceNumberSendingHandlerTest.java @@ -0,0 +1,90 @@ +/* + * Copyright (c) 2022 nosqlbench + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package io.nosqlbench.adapter.pulsar.util; + +import org.junit.jupiter.api.Test; + +import java.util.Arrays; +import java.util.Collections; +import java.util.HashSet; +import java.util.Set; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; + +class MessageSequenceNumberSendingHandlerTest { + MessageSequenceNumberSendingHandler sequenceNumberSendingHandler = new MessageSequenceNumberSendingHandler(); + + @Test + void shouldAddMonotonicSequence() { + for (long l = 1; l <= 100; l++) { + assertEquals(l, sequenceNumberSendingHandler.getNextSequenceNumber(Collections.emptySet())); + } + } + + @Test + void shouldInjectMessageLoss() { + assertEquals(1L, sequenceNumberSendingHandler.getNextSequenceNumber(Collections.emptySet())); + assertEquals(3L, sequenceNumberSendingHandler.getNextSequenceNumber(Collections.singleton(PulsarAdapterUtil.MSG_SEQ_ERROR_SIMU_TYPE.MsgLoss), 100)); + } + + @Test + void shouldInjectMessageDuplication() { + assertEquals(1L, sequenceNumberSendingHandler.getNextSequenceNumber(Collections.emptySet())); + assertEquals(1L, sequenceNumberSendingHandler.getNextSequenceNumber(Collections.singleton(PulsarAdapterUtil.MSG_SEQ_ERROR_SIMU_TYPE.MsgDup), 100)); + } + + @Test + void shouldInjectMessageOutOfOrder() { + assertEquals(1L, sequenceNumberSendingHandler.getNextSequenceNumber(Collections.emptySet())); + assertEquals(4L, sequenceNumberSendingHandler.getNextSequenceNumber(Collections.singleton(PulsarAdapterUtil.MSG_SEQ_ERROR_SIMU_TYPE.OutOfOrder), 100)); + assertEquals(2L, sequenceNumberSendingHandler.getNextSequenceNumber(Collections.emptySet())); + assertEquals(3L, sequenceNumberSendingHandler.getNextSequenceNumber(Collections.emptySet())); + assertEquals(5L, sequenceNumberSendingHandler.getNextSequenceNumber(Collections.emptySet())); + assertEquals(6, sequenceNumberSendingHandler.getNextSequenceNumber(Collections.emptySet())); + } + + @Test + void shouldInjectOneOfTheSimulatedErrorsRandomly() { + Set allErrorTypes = new HashSet<>(Arrays.asList(PulsarAdapterUtil.MSG_SEQ_ERROR_SIMU_TYPE.values())); + + assertEquals(1L, sequenceNumberSendingHandler.getNextSequenceNumber(Collections.emptySet())); + long previousSequenceNumber = 1L; + int outOfSequenceInjectionCounter = 0; + int messageDupCounter = 0; + int messageLossCounter = 0; + int successCounter = 0; + for (int i = 0; i < 1000; i++) { + long nextSequenceNumber = sequenceNumberSendingHandler.getNextSequenceNumber(allErrorTypes); + if (nextSequenceNumber >= previousSequenceNumber + 3) { + outOfSequenceInjectionCounter++; + } else if (nextSequenceNumber <= previousSequenceNumber) { + messageDupCounter++; + } else if (nextSequenceNumber >= previousSequenceNumber + 2) { + messageLossCounter++; + } else if (nextSequenceNumber == previousSequenceNumber + 1) { + successCounter++; + } + previousSequenceNumber = nextSequenceNumber; + } + assertTrue(outOfSequenceInjectionCounter > 0); + assertTrue(messageDupCounter > 0); + assertTrue(messageLossCounter > 0); + assertEquals(1000, outOfSequenceInjectionCounter + messageDupCounter + messageLossCounter + successCounter); + } + +} diff --git a/adapter-pulsar/src/test/java/io/nosqlbench/adapter/pulsar/util/ReceivedMessageSequenceTrackerTest.java b/adapter-pulsar/src/test/java/io/nosqlbench/adapter/pulsar/util/ReceivedMessageSequenceTrackerTest.java new file mode 100644 index 000000000..6b66e5f4d --- /dev/null +++ b/adapter-pulsar/src/test/java/io/nosqlbench/adapter/pulsar/util/ReceivedMessageSequenceTrackerTest.java @@ -0,0 +1,247 @@ +/* + * Copyright (c) 2022 nosqlbench + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package io.nosqlbench.adapter.pulsar.util; + +import com.codahale.metrics.Counter; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.ValueSource; + +import static org.junit.jupiter.api.Assertions.assertEquals; + +class ReceivedMessageSequenceTrackerTest { + Counter msgErrOutOfSeqCounter = new Counter(); + Counter msgErrDuplicateCounter = new Counter(); + Counter msgErrLossCounter = new Counter(); + ReceivedMessageSequenceTracker messageSequenceTracker = new ReceivedMessageSequenceTracker(msgErrOutOfSeqCounter, msgErrDuplicateCounter, msgErrLossCounter, 20, 20); + + @Test + void shouldCountersBeZeroWhenSequenceDoesntContainGaps() { + // when + for (long l = 0; l < 100L; l++) { + messageSequenceTracker.sequenceNumberReceived(l); + } + messageSequenceTracker.close(); + // then + assertEquals(0, msgErrOutOfSeqCounter.getCount()); + assertEquals(0, msgErrDuplicateCounter.getCount()); + assertEquals(0, msgErrLossCounter.getCount()); + } + + @ParameterizedTest + @ValueSource(longs = {10L, 11L, 19L, 20L, 21L, 100L}) + void shouldDetectMsgLossWhenEverySecondMessageIsLost(long totalMessages) { + doShouldDetectMsgLoss(totalMessages, 2); + } + + @ParameterizedTest + @ValueSource(longs = {10L, 11L, 19L, 20L, 21L, 100L}) + void shouldDetectMsgLossWhenEveryThirdMessageIsLost(long totalMessages) { + doShouldDetectMsgLoss(totalMessages, 3); + } + + @ParameterizedTest + @ValueSource(longs = {20L, 21L, 40L, 41L, 42L, 43L, 100L}) + void shouldDetectMsgLossWhenEvery21stMessageIsLost(long totalMessages) { + doShouldDetectMsgLoss(totalMessages, 21); + } + + private void doShouldDetectMsgLoss(long totalMessages, int looseEveryNthMessage) { + int messagesLost = 0; + // when + boolean lastMessageWasLost = false; + for (long l = 0; l < totalMessages; l++) { + if (l % looseEveryNthMessage == 1) { + messagesLost++; + lastMessageWasLost = true; + continue; + } else { + lastMessageWasLost = false; + } + messageSequenceTracker.sequenceNumberReceived(l); + } + if (lastMessageWasLost) { + messageSequenceTracker.sequenceNumberReceived(totalMessages); + } + messageSequenceTracker.close(); + // then + assertEquals(0, msgErrOutOfSeqCounter.getCount()); + assertEquals(0, msgErrDuplicateCounter.getCount()); + assertEquals(messagesLost, msgErrLossCounter.getCount()); + } + + @ParameterizedTest + @ValueSource(longs = {10L, 11L, 19L, 20L, 21L, 100L}) + void shouldDetectMsgDuplication(long totalMessages) { + int messagesDuplicated = 0; + // when + for (long l = 0; l < totalMessages; l++) { + if (l % 2 == 1) { + messagesDuplicated++; + messageSequenceTracker.sequenceNumberReceived(l); + } + messageSequenceTracker.sequenceNumberReceived(l); + } + if (totalMessages % 2 == 0) { + messageSequenceTracker.sequenceNumberReceived(totalMessages); + } + if (totalMessages < 2 * messageSequenceTracker.getMaxTrackOutOfOrderSequenceNumbers()) { + messageSequenceTracker.close(); + } + + // then + assertEquals(0, msgErrOutOfSeqCounter.getCount()); + assertEquals(messagesDuplicated, msgErrDuplicateCounter.getCount()); + assertEquals(0, msgErrLossCounter.getCount()); + } + + @Test + void shouldDetectSingleMessageOutOfSequence() { + // when + for (long l = 0; l < 10L; l++) { + messageSequenceTracker.sequenceNumberReceived(l); + } + messageSequenceTracker.sequenceNumberReceived(10L); + messageSequenceTracker.sequenceNumberReceived(12L); + messageSequenceTracker.sequenceNumberReceived(11L); + for (long l = 13L; l < 100L; l++) { + messageSequenceTracker.sequenceNumberReceived(l); + } + + // then + assertEquals(1, msgErrOutOfSeqCounter.getCount()); + assertEquals(0, msgErrDuplicateCounter.getCount()); + assertEquals(0, msgErrLossCounter.getCount()); + } + + @Test + void shouldDetectMultipleMessagesOutOfSequence() { + // when + for (long l = 0; l < 10L; l++) { + messageSequenceTracker.sequenceNumberReceived(l); + } + messageSequenceTracker.sequenceNumberReceived(10L); + messageSequenceTracker.sequenceNumberReceived(14L); + messageSequenceTracker.sequenceNumberReceived(13L); + messageSequenceTracker.sequenceNumberReceived(11L); + messageSequenceTracker.sequenceNumberReceived(12L); + for (long l = 15L; l < 100L; l++) { + messageSequenceTracker.sequenceNumberReceived(l); + } + + // then + assertEquals(2, msgErrOutOfSeqCounter.getCount()); + assertEquals(0, msgErrDuplicateCounter.getCount()); + assertEquals(0, msgErrLossCounter.getCount()); + } + + @Test + void shouldDetectIndividualMessageLoss() { + // when + for (long l = 0; l < 100L; l++) { + if (l != 11L) { + messageSequenceTracker.sequenceNumberReceived(l); + } + } + messageSequenceTracker.close(); + + // then + assertEquals(0, msgErrOutOfSeqCounter.getCount()); + assertEquals(0, msgErrDuplicateCounter.getCount()); + assertEquals(1, msgErrLossCounter.getCount()); + } + + @Test + void shouldDetectGapAndMessageDuplication() { + // when + for (long l = 0; l < 100L; l++) { + if (l != 11L) { + messageSequenceTracker.sequenceNumberReceived(l); + } + if (l == 12L) { + messageSequenceTracker.sequenceNumberReceived(l); + } + } + messageSequenceTracker.close(); + + // then + assertEquals(0, msgErrOutOfSeqCounter.getCount()); + assertEquals(1, msgErrDuplicateCounter.getCount()); + assertEquals(1, msgErrLossCounter.getCount()); + } + + @Test + void shouldDetectGapAndMessageDuplicationTimes2() { + // when + for (long l = 0; l < 100L; l++) { + if (l != 11L) { + messageSequenceTracker.sequenceNumberReceived(l); + } + if (l == 12L) { + messageSequenceTracker.sequenceNumberReceived(l); + messageSequenceTracker.sequenceNumberReceived(l); + } + } + messageSequenceTracker.close(); + + // then + assertEquals(0, msgErrOutOfSeqCounter.getCount()); + assertEquals(2, msgErrDuplicateCounter.getCount()); + assertEquals(1, msgErrLossCounter.getCount()); + } + + + @Test + void shouldDetectDelayedOutOfOrderDelivery() { + // when + for (long l = 0; l < 5 * messageSequenceTracker.getMaxTrackOutOfOrderSequenceNumbers(); l++) { + if (l != 10) { + messageSequenceTracker.sequenceNumberReceived(l); + } + if (l == messageSequenceTracker.getMaxTrackOutOfOrderSequenceNumbers() * 2) { + messageSequenceTracker.sequenceNumberReceived(10); + } + } + messageSequenceTracker.close(); + + // then + assertEquals(1, msgErrOutOfSeqCounter.getCount()); + assertEquals(0, msgErrDuplicateCounter.getCount()); + assertEquals(0, msgErrLossCounter.getCount()); + } + + @Test + void shouldDetectDelayedOutOfOrderDeliveryOf2ConsecutiveSequenceNumbers() { + // when + for (long l = 0; l < 5 * messageSequenceTracker.getMaxTrackOutOfOrderSequenceNumbers(); l++) { + if (l != 10 && l != 11) { + messageSequenceTracker.sequenceNumberReceived(l); + } + if (l == messageSequenceTracker.getMaxTrackOutOfOrderSequenceNumbers() * 2) { + messageSequenceTracker.sequenceNumberReceived(10); + messageSequenceTracker.sequenceNumberReceived(11); + } + } + messageSequenceTracker.close(); + + // then + assertEquals(2, msgErrOutOfSeqCounter.getCount()); + assertEquals(0, msgErrDuplicateCounter.getCount()); + assertEquals(0, msgErrLossCounter.getCount()); + } +} diff --git a/adapter-tcp/src/main/resources/tcpclient.md b/adapter-tcp/src/main/resources/tcpclient.md index 641b66ca3..a2dbbbc60 100644 --- a/adapter-tcp/src/main/resources/tcpclient.md +++ b/adapter-tcp/src/main/resources/tcpclient.md @@ -33,7 +33,7 @@ Run a stdout activity named 'stdout-test', with definitions from activities/stdo To enable, specifies the type of the SSL implementation with either `jdk` or `openssl`. - [Additional parameters may need to be provided](../../../../driver-cql/src/main/resources/ssl.md). + See the ssl help topic for more details with `nb5 help ssl` for more details. - **host** - this is the name to bind to (local interface address) - default: localhost diff --git a/adapter-tcp/src/main/resources/tcpserver.md b/adapter-tcp/src/main/resources/tcpserver.md index 73a4c0222..7b77dda78 100644 --- a/adapter-tcp/src/main/resources/tcpserver.md +++ b/adapter-tcp/src/main/resources/tcpserver.md @@ -45,7 +45,7 @@ Run a stdout activity named 'stdout-test', with definitions from activities/stdo To enable, specifies the type of the SSL implementation with either `jdk` or `openssl`. - [Additional parameters may need to be provided](../../../../driver-cql/src/main/resources/ssl.md). + See the ssl help topic for more details with `nb5 help ssl` for more details. - **host** - this is the name to bind to (local interface address) - default: localhost diff --git a/adapters-api/src/test/java/io/nosqlbench/engine/api/activityconfig/rawyaml/RawYamlTemplateLoaderTest.java b/adapters-api/src/test/java/io/nosqlbench/engine/api/activityconfig/rawyaml/RawYamlTemplateLoaderTest.java index d1b7b98fa..9edddf52d 100644 --- a/adapters-api/src/test/java/io/nosqlbench/engine/api/activityconfig/rawyaml/RawYamlTemplateLoaderTest.java +++ b/adapters-api/src/test/java/io/nosqlbench/engine/api/activityconfig/rawyaml/RawYamlTemplateLoaderTest.java @@ -77,7 +77,7 @@ public class RawYamlTemplateLoaderTest { assertThat(schemaOnlyScenario.keySet()) .containsExactly("000"); assertThat(schemaOnlyScenario.values()) - .containsExactly("run driver=blah tags=block:'schema.*'"); + .containsExactly("run driver=blah tags=block:\"schema.*\""); assertThat(rawOpsDoc.getName()).isEqualTo("doc1"); assertThat(blocks).hasSize(1); diff --git a/adapters-api/src/test/resources/testdocs/docs_blocks_ops.yaml b/adapters-api/src/test/resources/testdocs/docs_blocks_ops.yaml index 4e76e12e7..c73305222 100644 --- a/adapters-api/src/test/resources/testdocs/docs_blocks_ops.yaml +++ b/adapters-api/src/test/resources/testdocs/docs_blocks_ops.yaml @@ -7,7 +7,7 @@ scenarios: - run driver=stdout alias=step1 - run driver=stdout alias=step2 schema-only: - - run driver=blah tags=block:'schema.*' + - run driver=blah tags=block:"schema.*" tags: atagname: atagvalue diff --git a/engine-api/src/main/java/io/nosqlbench/engine/api/activityimpl/uniform/StandardActivity.java b/engine-api/src/main/java/io/nosqlbench/engine/api/activityimpl/uniform/StandardActivity.java index f21c8a468..b2377b9f1 100644 --- a/engine-api/src/main/java/io/nosqlbench/engine/api/activityimpl/uniform/StandardActivity.java +++ b/engine-api/src/main/java/io/nosqlbench/engine/api/activityimpl/uniform/StandardActivity.java @@ -80,6 +80,7 @@ public class StandardActivity extends SimpleActivity implements for (OpTemplate ot : opTemplates) { ParsedOp incompleteOpDef = new ParsedOp(ot, NBConfiguration.empty(), List.of()); String driverName = incompleteOpDef.takeOptionalStaticValue("driver", String.class) + .or(() -> incompleteOpDef.takeOptionalStaticValue("type",String.class)) .or(() -> activityDef.getParams().getOptionalString("driver")) .orElseThrow(() -> new OpConfigError("Unable to identify driver name for op template:\n" + ot)); diff --git a/engine-api/src/main/java/io/nosqlbench/engine/api/activityimpl/uniform/StandardActivityType.java b/engine-api/src/main/java/io/nosqlbench/engine/api/activityimpl/uniform/StandardActivityType.java index c319852e6..b45b91de6 100644 --- a/engine-api/src/main/java/io/nosqlbench/engine/api/activityimpl/uniform/StandardActivityType.java +++ b/engine-api/src/main/java/io/nosqlbench/engine/api/activityimpl/uniform/StandardActivityType.java @@ -16,15 +16,15 @@ package io.nosqlbench.engine.api.activityimpl.uniform; +import io.nosqlbench.api.config.standard.NBConfigModel; +import io.nosqlbench.api.config.standard.NBConfiguration; +import io.nosqlbench.api.config.standard.NBReconfigurable; +import io.nosqlbench.api.engine.activityimpl.ActivityDef; import io.nosqlbench.engine.api.activityapi.core.ActionDispenser; import io.nosqlbench.engine.api.activityapi.core.ActivityType; import io.nosqlbench.engine.api.activityconfig.OpsLoader; import io.nosqlbench.engine.api.activityconfig.yaml.OpsDocList; -import io.nosqlbench.api.engine.activityimpl.ActivityDef; import io.nosqlbench.engine.api.activityimpl.SimpleActivity; -import io.nosqlbench.api.config.standard.NBConfigModel; -import io.nosqlbench.api.config.standard.NBConfiguration; -import io.nosqlbench.api.config.standard.NBReconfigurable; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; @@ -39,7 +39,10 @@ public class StandardActivityType> extends Simpl private final Map adapters = new HashMap<>(); public StandardActivityType(DriverAdapter adapter, ActivityDef activityDef) { - super(activityDef); + super(activityDef + .deprecate("type","driver") + .deprecate("yaml", "workload") + ); this.adapters.put(adapter.getAdapterName(),adapter); if (adapter instanceof ActivityDefAware) { ((ActivityDefAware) adapter).setActivityDef(activityDef); diff --git a/engine-api/src/main/java/io/nosqlbench/engine/api/scenarios/NBCLIScenarioParser.java b/engine-api/src/main/java/io/nosqlbench/engine/api/scenarios/NBCLIScenarioParser.java index ae5f83050..16ca54d9d 100644 --- a/engine-api/src/main/java/io/nosqlbench/engine/api/scenarios/NBCLIScenarioParser.java +++ b/engine-api/src/main/java/io/nosqlbench/engine/api/scenarios/NBCLIScenarioParser.java @@ -139,7 +139,7 @@ public class NBCLIScenarioParser { if (selectedScenario.containsKey(stepname)) { namedSteps.put(stepname,selectedScenario.get(stepname)); } else { - throw new BasicError("Unable to find named scenario.step'" + scenarioName + "' in workload '" + workloadName + throw new BasicError("Unable to find named scenario.step '" + scenarioName + "' in workload '" + workloadName + "', but you can pick from one of: " + selectedScenario.keySet().stream().map(n -> nameparts[0].concat(".").concat(n)).collect(Collectors.joining(", "))); } } diff --git a/engine-cli/src/main/resources/scripts/auto/fm.js b/engine-cli/src/main/resources/scripts/auto/fm.js index d720b47b0..87bdf5cb7 100644 --- a/engine-cli/src/main/resources/scripts/auto/fm.js +++ b/engine-cli/src/main/resources/scripts/auto/fm.js @@ -48,7 +48,7 @@ schema_activitydef = params.withDefaults({ }); schema_activitydef.alias="findmax_schema"; schema_activitydef.threads="1"; -schema_activitydef.tags="TEMPLATE(schematags,block:'schema.*')"; +schema_activitydef.tags="TEMPLATE(schematags,block:\"schema.*\")"; print("Creating schema with schematags:" + schema_activitydef.tags); scenario.run(schema_activitydef); diff --git a/engine-cli/src/test/java/io/nosqlbench/engine/cli/NBCLIScenarioParserTest.java b/engine-cli/src/test/java/io/nosqlbench/engine/cli/NBCLIScenarioParserTest.java index 9ba47e884..d3aff8cc1 100644 --- a/engine-cli/src/test/java/io/nosqlbench/engine/cli/NBCLIScenarioParserTest.java +++ b/engine-cli/src/test/java/io/nosqlbench/engine/cli/NBCLIScenarioParserTest.java @@ -120,7 +120,7 @@ public class NBCLIScenarioParserTest { "alias", "scenariotest_schemaonly_schema", "cycles-test", "20", "driver", "stdout", - "tags", "block:'schema.*'", + "tags", "block:\"schema.*\"", "workload", "scenario-test" )); NBCLIOptions opts1 = new NBCLIOptions(new String[]{"scenario-test", "schema-only", "doundef=20"}); @@ -170,7 +170,7 @@ public class NBCLIScenarioParserTest { "alias", "scenariotest_schemaonly_schema", "cycles-test", "20", "driver", "stdout", - "tags", "block:'schema.*'", + "tags", "block:\"schema.*\"", "workload", "scenario-test" )); NBCLIOptions opts1 = new NBCLIOptions(new String[]{"local/example-scenarios", "namedsteps.one", "testparam1=testvalue2"}); diff --git a/engine-cli/src/test/resources/activities/scenario-formats-test.yaml b/engine-cli/src/test/resources/activities/scenario-formats-test.yaml index 9e9e4afcf..0af761189 100644 --- a/engine-cli/src/test/resources/activities/scenario-formats-test.yaml +++ b/engine-cli/src/test/resources/activities/scenario-formats-test.yaml @@ -1,6 +1,6 @@ name: alternate-format-test scenarios: default: - schema: run driver=cql protocol_version=v4 tags=block:'schema.*' threads==1 cycles=UNDEF + schema: run driver=cql protocol_version=v4 tags=block:"schema.*" threads==1 cycles=UNDEF rampup: run driver=cql protocol_version=v4 tags=block:rampup cycles=10000 main: run driver=cql protocol_version=v4 tags=block:main_mixed cycles=10000 diff --git a/engine-cli/src/test/resources/activities/scenario-test.yaml b/engine-cli/src/test/resources/activities/scenario-test.yaml index fd7eeb28d..31ab4f815 100644 --- a/engine-cli/src/test/resources/activities/scenario-test.yaml +++ b/engine-cli/src/test/resources/activities/scenario-test.yaml @@ -2,11 +2,11 @@ min_version: "5.17.1" scenarios: default: - schema: run driver==stdout workload===scenario-test tags=block:'schema.*' + schema: run driver==stdout workload===scenario-test tags=block:"schema.*" rampup: run driver=stdout workload===scenario-test tags=block:rampup cycles=TEMPLATE(cycles1,10) - main: run driver=stdout workload===scenario-test tags=block:'main.*' cycles=TEMPLATE(cycles2,10) + main: run driver=stdout workload===scenario-test tags=block:"main.*" cycles=TEMPLATE(cycles2,10) schema-only: - schema: run driver=stdout workload==scenario-test tags=block:'schema.*' doundef==undef + schema: run driver=stdout workload==scenario-test tags=block:"schema.*" doundef==undef template-test: with-template: run driver=stdout cycles=TEMPLATE(cycles-test,10) diff --git a/engine-core/src/main/java/io/nosqlbench/engine/core/lifecycle/activity/ActivityLoader.java b/engine-core/src/main/java/io/nosqlbench/engine/core/lifecycle/activity/ActivityLoader.java index fb0f9bd95..97634ced7 100644 --- a/engine-core/src/main/java/io/nosqlbench/engine/core/lifecycle/activity/ActivityLoader.java +++ b/engine-core/src/main/java/io/nosqlbench/engine/core/lifecycle/activity/ActivityLoader.java @@ -1,5 +1,5 @@ /* - * Copyright (c) 2022 nosqlbench + * Copyright (c) 2022-2023 nosqlbench * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -41,6 +41,7 @@ public class ActivityLoader { } public synchronized Activity loadActivity(ActivityDef activityDef) { + activityDef= activityDef.deprecate("yaml","workload").deprecate("type","driver"); Activity activity = new StandardActivityType(activityDef).getAssembledActivity(activityDef, activityMap); activityMap.put(activity.getAlias(),activity); logger.debug("Resolved activity for alias '" + activityDef.getAlias() + "'"); diff --git a/engine-docs/src/main/resources/docs-for-nb/getting_started/01_example_commands.md b/engine-docs/src/main/resources/docs-for-nb/getting_started/01_example_commands.md index fe51887d0..270635dc4 100644 --- a/engine-docs/src/main/resources/docs-for-nb/getting_started/01_example_commands.md +++ b/engine-docs/src/main/resources/docs-for-nb/getting_started/01_example_commands.md @@ -15,7 +15,7 @@ command line, go ahead and execute the following command, replacing the `host=` with that of one of your database nodes. ```text -./nb run driver=cql workload=cql-keyvalue tags=block:'schema.*' host= +./nb run driver=cql workload=cql-keyvalue tags=block:"schema.*" host= ``` This command is creating the following schema in your database: diff --git a/mvn-defaults/pom.xml b/mvn-defaults/pom.xml index 136cd7921..1043a94f9 100644 --- a/mvn-defaults/pom.xml +++ b/mvn-defaults/pom.xml @@ -501,7 +501,6 @@ org.apache.maven.plugins maven-resources-plugin - 3.2.0 ISO-8859-1 @@ -535,7 +534,6 @@ org.apache.maven.plugins maven-compiler-plugin - 3.10.1 true 17 @@ -551,7 +549,6 @@ org.apache.maven.plugins maven-surefire-plugin - 3.0.0-M6 -ea ${argLine} @@ -576,7 +573,6 @@ org.jacoco jacoco-maven-plugin - 0.8.8 prepare-agent @@ -627,7 +623,6 @@ org.apache.maven.plugins maven-failsafe-plugin - 3.0.0-M6 run-tests @@ -655,7 +650,6 @@ org.apache.maven.plugins maven-javadoc-plugin - 3.4.1 17 ${javadoc.name} @@ -686,7 +680,6 @@ org.apache.maven.plugins maven-source-plugin - 3.2.1 attach-sources @@ -702,7 +695,6 @@ org.sonatype.plugins nexus-staging-maven-plugin - 1.6.13 true ossrh @@ -714,7 +706,6 @@ org.apache.maven.plugins maven-gpg-plugin - 3.0.1 @@ -744,7 +735,6 @@ org.apache.maven.plugins maven-enforcer-plugin - 3.0.0-M3 enforce-java @@ -768,18 +758,18 @@ org.apache.maven.plugins maven-assembly-plugin - 3.3.0 + 3.4.2 org.apache.maven.plugins maven-release-plugin - 3.0.0-M6 + 3.0.0-M7 org.apache.maven.plugins maven-gpg-plugin - 1.6 + 3.0.1 org.apache.maven.plugins @@ -789,27 +779,27 @@ org.apache.maven.plugins maven-surefire-plugin - 3.0.0-M4 + 3.0.0-M8 org.apache.maven.plugins maven-failsafe-plugin - 3.0.0-M4 + 3.0.0-M8 org.apache.maven.plugins maven-javadoc-plugin - 3.1.1 + 3.4.1 org.apache.maven.plugins maven-source-plugin - 3.0.1 + 3.2.1 org.sonatype.plugins nexus-staging-maven-plugin - 1.6.8 + 1.6.13 org.antlr @@ -819,43 +809,43 @@ org.codehaus.mojo exec-maven-plugin - 1.6.0 + 3.1.0 org.apache.maven.plugins maven-enforcer-plugin - 3.0.0-M3 + 3.2.1 org.apache.maven.plugins maven-clean-plugin - 3.1.0 + 3.2.0 org.apache.maven.plugins maven-resources-plugin - 3.2.0 + 3.3.0 org.jacoco - org.jacoco.ant + jacoco-maven-plugin ${jacoco.version} org.apache.maven.plugins maven-jar-plugin - 3.1.1 + 3.3.0 org.apache.maven.plugins maven-install-plugin - 3.0.0-M1 + 3.1.0 org.apache.maven.plugins maven-deploy-plugin - 3.0.0-M1 + 3.0.0 diff --git a/nb-api/src/main/java/io/nosqlbench/api/engine/activityimpl/ActivityDef.java b/nb-api/src/main/java/io/nosqlbench/api/engine/activityimpl/ActivityDef.java index 1ab2d6ebc..26bca1168 100644 --- a/nb-api/src/main/java/io/nosqlbench/api/engine/activityimpl/ActivityDef.java +++ b/nb-api/src/main/java/io/nosqlbench/api/engine/activityimpl/ActivityDef.java @@ -18,6 +18,7 @@ package io.nosqlbench.api.engine.activityimpl; import io.nosqlbench.api.config.NBNamedElement; import io.nosqlbench.api.engine.util.Unit; +import io.nosqlbench.api.errors.BasicError; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; @@ -213,4 +214,22 @@ public class ActivityDef implements NBNamedElement { public String getName() { return getAlias(); } + + public ActivityDef deprecate(String deprecatedName, String newName) { + Object deprecatedParam = this.parameterMap.get(deprecatedName); + if (deprecatedParam==null) { + return this; + } + if (deprecatedParam instanceof CharSequence chars) { + if (this.parameterMap.containsKey(newName)) { + throw new BasicError("You have specified activity param '" + deprecatedName + "' in addition to the valid name '" + newName +"'. Remove '" + deprecatedName + "'."); + } else { + logger.warn("Auto replacing deprecated activity param '" + deprecatedName + "="+ chars +"' with new '" + newName +"="+ chars +"'."); + parameterMap.put(newName,parameterMap.remove(deprecatedName)); + } + } else { + throw new BasicError("Can't replace deprecated name with value of type " + deprecatedName.getClass().getCanonicalName()); + } + return this; + } } diff --git a/nbr/src/main/java/io/nosqlbench/api/docsapi/docexporter/BundledMarkdownZipExporter.java b/nbr/src/main/java/io/nosqlbench/api/docsapi/docexporter/BundledMarkdownZipExporter.java index 5c2d752a2..9ab351ce3 100644 --- a/nbr/src/main/java/io/nosqlbench/api/docsapi/docexporter/BundledMarkdownZipExporter.java +++ b/nbr/src/main/java/io/nosqlbench/api/docsapi/docexporter/BundledMarkdownZipExporter.java @@ -33,9 +33,6 @@ import java.nio.file.StandardOpenOption; import java.util.Date; import java.util.Locale; import java.util.Map; -import java.util.concurrent.ExecutorService; -import java.util.concurrent.Executors; -import java.util.concurrent.Future; import java.util.function.Function; import java.util.zip.ZipEntry; import java.util.zip.ZipOutputStream; @@ -65,10 +62,7 @@ public class BundledMarkdownZipExporter { } } - ExecutorService executorService = Executors.newSingleThreadExecutor(); - Future> future = executorService.submit(new VirtDataGenDocsApp(null)); - Map builderMap = future.get(); - executorService.shutdown(); + Map builderMap= new VirtDataGenDocsApp(null).call(); String bindingsPrefix ="bindings/"; for(Map.Entry entry : builderMap.entrySet()) { diff --git a/nbr/src/main/resources/examples/bindings-bigdecimal.yaml b/nbr/src/main/resources/examples/bindings-bigdecimal.yaml index 2d05000b9..47f388678 100644 --- a/nbr/src/main/resources/examples/bindings-bigdecimal.yaml +++ b/nbr/src/main/resources/examples/bindings-bigdecimal.yaml @@ -10,13 +10,16 @@ scenarios: # a scale parameter or a custom MathContext, but not both. The scale parameter # is not supported for String or Double input forms. +# As of Java 17, the roundingMode enums are UP DOWN CEILING FLOOR HALF_UP HALF_DOWN HALF_EVEN UNNECESSARY +# and the precision must be zero or greater + bindings: # convert an example double with some fractional values, then convert it to BigDecimal from_double: ToDouble(); Div(100.0d); ToBigDecimal(); # convert doubles to BigDecimal, with custom precision and rounding - from_double_custom5: ToDouble(); Div(100.0d); ToBigDecimal('precision=5 roundingMode=HALF'); + from_double_custom5: ToDouble(); Div(100.0d); ToBigDecimal('precision=5 roundingMode=HALF_EVEN'); # convert directly to BigDecimal from long as whole numbers from_long: ToBigDecimal(); diff --git a/nbr/src/main/resources/examples/bindings-bytebuffers.yaml b/nbr/src/main/resources/examples/bindings-bytebuffers.yaml index 19e3f51a0..510809000 100644 --- a/nbr/src/main/resources/examples/bindings-bytebuffers.yaml +++ b/nbr/src/main/resources/examples/bindings-bytebuffers.yaml @@ -21,9 +21,6 @@ bindings: # control the size of the extracted sample with a hash range hashed_bb_sizerange: ByteBufferSizedHashed(HashRange(1,10)); ToHexString(); - # control the size of the extracted sample based on a continuous function - hashed_bb_size_normal: ByteBufferSizedHashed(Normal(5.0, 1.0)); ToHexString(); - # control the size of the extracted sample based on a discrete function hashed_bb_size_binomial: ByteBufferSizedHashed(Binomial(8,0.5)); ToHexString(); diff --git a/nbr/src/main/resources/examples/bindings-premade.yaml b/nbr/src/main/resources/examples/bindings-premade.yaml index ec16c5271..8343a1f85 100644 --- a/nbr/src/main/resources/examples/bindings-premade.yaml +++ b/nbr/src/main/resources/examples/bindings-premade.yaml @@ -5,12 +5,12 @@ description: | scenarios: default: run driver===stdout format=readout - names: run driver===stdout format=readout bindings=names - cities: run driver===stdout format=readout bindings=cities - states: run driver===stdout format=readout bindings=states - zips: run driver===stdout format=readout bindings=zips - counties: run driver===stdout format=readout bindings=counties - countries: run driver===stdout format=readout bindings=country + names: run driver===stdout format=readout bindings='names.*' + cities: run driver===stdout format=readout bindings='cities.*' + states: run driver===stdout format=readout bindings='states.*' + zips: run driver===stdout format=readout bindings='zips.*' + counties: run driver===stdout format=readout bindings='counties.*' + countries: run driver===stdout format=readout bindings='country.*' # Each binding is named, so the bindings is a map of names to diff --git a/nbr/src/main/resources/examples/bindings-utilities.yaml b/nbr/src/main/resources/examples/bindings-utilities.yaml index 3f78c04cd..adfed6198 100644 --- a/nbr/src/main/resources/examples/bindings-utilities.yaml +++ b/nbr/src/main/resources/examples/bindings-utilities.yaml @@ -21,10 +21,10 @@ bindings: # The TypeOf() function tells you the java class of its input - typeof: TypeOf(); + typeof: ToJavaInstant(); TypeOf(); # The Show() function provides a snapshot of what is in the thread-local # variable map as a String - show: Show(); + show: SaveLong('var42'); Show(); diff --git a/virtdata-lib-basics/src/main/java/io/nosqlbench/virtdata/library/basics/shared/util/MathContextReader.java b/virtdata-lib-basics/src/main/java/io/nosqlbench/virtdata/library/basics/shared/util/MathContextReader.java index df2c6bcbe..7e124231b 100644 --- a/virtdata-lib-basics/src/main/java/io/nosqlbench/virtdata/library/basics/shared/util/MathContextReader.java +++ b/virtdata-lib-basics/src/main/java/io/nosqlbench/virtdata/library/basics/shared/util/MathContextReader.java @@ -1,5 +1,5 @@ /* - * Copyright (c) 2022 nosqlbench + * Copyright (c) 2022-2023 nosqlbench * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -39,7 +39,7 @@ public class MathContextReader { } } catch (IllegalArgumentException iae) { throw new BasicError("'" + name + "' was not a valid format for a new MathContext(String), try something " + - "like 'precision=17 roundingMode=UP"); + "like 'precision=17 roundingMode=UP', original exception:" + iae); } } diff --git a/virtdata-lib-realer/src/main/java/io/nosqlbench/virtdata/library/realer/CountryNames.java b/virtdata-lib-realer/src/main/java/io/nosqlbench/virtdata/library/realer/CountryNames.java index aacba6d47..295e4b56b 100644 --- a/virtdata-lib-realer/src/main/java/io/nosqlbench/virtdata/library/realer/CountryNames.java +++ b/virtdata-lib-realer/src/main/java/io/nosqlbench/virtdata/library/realer/CountryNames.java @@ -1,5 +1,5 @@ /* - * Copyright (c) 2022 nosqlbench + * Copyright (c) 2022-2023 nosqlbench * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -33,7 +33,7 @@ public class CountryNames extends CSVSampler implements LongFunction { @Example("CountryNames()") public CountryNames() { - super("COUNTRY_NAME","n/a","name","countries.csv"); + super("COUNTRY_NAME","n/a","name","data/countries.csv"); } } diff --git a/virtdata-userlibs/src/main/java/io/nosqlbench/virtdata/userlibs/apps/docsapp/VirtDataGenDocsApp.java b/virtdata-userlibs/src/main/java/io/nosqlbench/virtdata/userlibs/apps/docsapp/VirtDataGenDocsApp.java index dc872c7f4..2740e2410 100644 --- a/virtdata-userlibs/src/main/java/io/nosqlbench/virtdata/userlibs/apps/docsapp/VirtDataGenDocsApp.java +++ b/virtdata-userlibs/src/main/java/io/nosqlbench/virtdata/userlibs/apps/docsapp/VirtDataGenDocsApp.java @@ -29,7 +29,6 @@ import io.nosqlbench.virtdata.userlibs.apps.docsapp.fdocs.FDocFuncs; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; -import java.io.FileWriter; import java.io.IOException; import java.io.Writer; import java.nio.charset.StandardCharsets; @@ -73,48 +72,6 @@ public class VirtDataGenDocsApp implements Callable> public Map call() { - /*LinkedList largs = new LinkedList<>(Arrays.asList(args)); - if (args.length > 0 && args[0].contains("help")) { - System.out.println( - "usage:\n" + - "[basefile ] [basedir ] [categories combined|split] [format json|markdown] " + - "[blurbsdirs [:...]]\n\n" - ); - return result; - } - while (largs.peekFirst() != null) { - String argtype = largs.removeFirst(); - if (largs.peekFirst() == null) { - throw new RuntimeException(VirtDataGenDocsApp.class + " expects args in param value couplets."); - } - - String argval = largs.removeFirst().toLowerCase(); - switch (argtype) { - case "basefile": - this.baseFileName = argval; - break; - case "basedir": - this.basedir = argval; - break; - case BLURBS_DIRS: - this.blurbsDirs = argval; - break; - case CATEGORIES: - if (!argval.equals(CATEGORIES_SPLIT) && !argval.equals(CATEGORIES_COMBINED)) { - throw new RuntimeException("categories must either be " + CATEGORIES_SPLIT + ", or " + CATEGORIES_COMBINED + "."); - } - this.categories = argval; - break; - case FORMAT: - if (!argval.equals(FORMAT_MARKDOWN) && !argval.equals(FORMAT_JSON)) { - throw new RuntimeException("format must either be " + FORMAT_MARKDOWN + ", or " + FORMAT_JSON + "."); - } - this.format = argval; - break; - default: - } - }*/ - Optional docsinfo = loadAllDocs(); if (!docsinfo.isPresent()) { @@ -180,6 +137,7 @@ public class VirtDataGenDocsApp implements Callable> logger.debug("writing blurb to " + outputname); builder.append(blurb); + break; } } }