post-merge fixups

This commit is contained in:
Jonathan Shook 2021-11-03 13:58:51 -05:00
commit 53ab5832e2
105 changed files with 2964 additions and 1522 deletions

View File

@ -2,6 +2,8 @@ name: release
on:
push:
branches:
- main
paths:
- RELEASENOTES.**
branches:

View File

@ -1,3 +1,8 @@
- fa78ca54 (HEAD -> main, origin/main) Merge pull request #372 from lhotari/lh-detect-duplicates-after-gap
- 71c3b190 Detect delayed out-of-order delivery
- e694eaec Merge pull request #373 from lhotari/lh-upgrade-pulsar-2.8.1
- 85e1f8a6 Upgrade Pulsar client to 2.8.1 version
- 6b50060a Detect duplicates after a gap
- 3b674983 (HEAD -> main, origin/main) Merge pull request #362 from yabinmeng/main
- bf98d644 Merge branch 'nosqlbench:main' into main
- 793af965 Ignore abnormal message processing error for Shared and Key_Shared subscription type.

View File

@ -1,10 +1,12 @@
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<project xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<parent>
<groupId>io.nosqlbench</groupId>
<artifactId>mvn-defaults</artifactId>
<version>4.15.58-SNAPSHOT</version>
<version>4.15.64-SNAPSHOT</version>
<relativePath>../mvn-defaults</relativePath>
</parent>
@ -23,13 +25,13 @@
<dependency>
<groupId>io.nosqlbench</groupId>
<artifactId>drivers-api</artifactId>
<version>4.15.58-SNAPSHOT</version>
<version>4.15.64-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>io.nosqlbench</groupId>
<artifactId>virtdata-lib-basics</artifactId>
<version>4.15.58-SNAPSHOT</version>
<version>4.15.64-SNAPSHOT</version>
<scope>compile</scope>
</dependency>

View File

@ -7,7 +7,7 @@
<parent>
<artifactId>mvn-defaults</artifactId>
<groupId>io.nosqlbench</groupId>
<version>4.15.58-SNAPSHOT</version>
<version>4.15.64-SNAPSHOT</version>
<relativePath>../mvn-defaults</relativePath>
</parent>
@ -25,20 +25,14 @@
<dependency>
<groupId>io.nosqlbench</groupId>
<artifactId>nb-api</artifactId>
<version>4.15.58-SNAPSHOT</version>
<version>4.15.64-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>io.nosqlbench</groupId>
<artifactId>virtdata-userlibs</artifactId>
<version>4.15.58-SNAPSHOT</version>
<version>4.15.64-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>io.nosqlbench</groupId>
<artifactId>nb-api</artifactId>
<version>4.15.58-SNAPSHOT</version>
<scope>compile</scope>
</dependency>
</dependencies>

View File

@ -1 +0,0 @@
c1

View File

@ -12,7 +12,7 @@
<parent>
<artifactId>mvn-defaults</artifactId>
<groupId>io.nosqlbench</groupId>
<version>4.15.58-SNAPSHOT</version>
<version>4.15.64-SNAPSHOT</version>
<relativePath>../mvn-defaults</relativePath>
</parent>
@ -21,7 +21,7 @@
<dependency>
<groupId>io.nosqlbench</groupId>
<artifactId>nb-api</artifactId>
<version>4.15.58-SNAPSHOT</version>
<version>4.15.64-SNAPSHOT</version>
</dependency>
<dependency>
@ -117,7 +117,7 @@
<dependency>
<groupId>io.nosqlbench</groupId>
<artifactId>virtdata-api</artifactId>
<version>4.15.58-SNAPSHOT</version>
<version>4.15.64-SNAPSHOT</version>
</dependency>
</dependencies>

View File

@ -5,7 +5,7 @@
<parent>
<groupId>io.nosqlbench</groupId>
<artifactId>mvn-defaults</artifactId>
<version>4.15.58-SNAPSHOT</version>
<version>4.15.64-SNAPSHOT</version>
<relativePath>../mvn-defaults</relativePath>
</parent>
@ -21,7 +21,7 @@
<dependency>
<groupId>io.nosqlbench</groupId>
<artifactId>driver-jdbc</artifactId>
<version>4.15.58-SNAPSHOT</version>
<version>4.15.64-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>org.postgresql</groupId>

View File

@ -4,7 +4,7 @@
<parent>
<groupId>io.nosqlbench</groupId>
<artifactId>mvn-defaults</artifactId>
<version>4.15.58-SNAPSHOT</version>
<version>4.15.64-SNAPSHOT</version>
<relativePath>../mvn-defaults</relativePath>
</parent>
@ -23,13 +23,13 @@
<dependency>
<groupId>io.nosqlbench</groupId>
<artifactId>engine-api</artifactId>
<version>4.15.58-SNAPSHOT</version>
<version>4.15.64-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>io.nosqlbench</groupId>
<artifactId>drivers-api</artifactId>
<version>4.15.58-SNAPSHOT</version>
<version>4.15.64-SNAPSHOT</version>
</dependency>
<dependency>
@ -81,15 +81,13 @@
<dependency>
<groupId>io.netty</groupId>
<artifactId>netty-codec-haproxy</artifactId>
<version>4.1.54.Final</version>
</dependency>
<!-- <dependency>-->
<!-- <groupId>io.netty</groupId>-->
<!-- <artifactId>netty-transport-native-epoll</artifactId>-->
<!-- <version>4.1.47.Final</version>-->
<!-- <classifier>linux-x86_64</classifier>-->
<!-- </dependency>-->
<dependency>
<groupId>io.netty</groupId>
<artifactId>netty-transport-native-epoll</artifactId>
<classifier>linux-x86_64</classifier>
</dependency>
<!-- test only scope -->
@ -185,21 +183,11 @@
<createDependencyReducedPom>false</createDependencyReducedPom>
<promoteTransitiveDependencies>true</promoteTransitiveDependencies>
<createSourcesJar>true</createSourcesJar>
<!-- <shadedArtifactAttached>true</shadedArtifactAttached>-->
<!-- <shadedClassifierName>shaded</shadedClassifierName>-->
<relocations>
<relocation>
<pattern>com.google.common</pattern>
<shadedPattern>com.datastax.internal.com_google_common</shadedPattern>
</relocation>
<!-- <relocation>-->
<!-- <pattern>com.datastax</pattern>-->
<!-- <shadedPattern>dse19.com.datastax</shadedPattern>-->
<!-- </relocation>-->
<!-- <relocation>-->
<!-- <pattern>io.netty</pattern>-->
<!-- <shadedPattern>dse19.io.netty</shadedPattern>-->
<!-- </relocation>-->
</relocations>
<artifactSet>
<includes>
@ -212,7 +200,6 @@
</transformer>
<transformer implementation="org.apache.maven.plugins.shade.resource.ServicesResourceTransformer" />
</transformers>
<!-- <finalName>${project.artifactId}</finalName>-->
<filters>
<filter>
<artifact>*:*</artifact>

View File

@ -94,7 +94,8 @@ blocks:
create table if not exists <<keyspace:baselines>>.<<table:tabular>> (
part text,
clust text,
data text,
data0 text, data1 text, data2 text, data3 text,
data4 text, data5 text, data6 text, data7 text,
PRIMARY KEY (part,clust)
);
tags:

View File

@ -4,7 +4,7 @@
<parent>
<groupId>io.nosqlbench</groupId>
<artifactId>mvn-defaults</artifactId>
<version>4.15.58-SNAPSHOT</version>
<version>4.15.64-SNAPSHOT</version>
<relativePath>../mvn-defaults</relativePath>
</parent>
@ -24,13 +24,13 @@
<dependency>
<groupId>io.nosqlbench</groupId>
<artifactId>engine-api</artifactId>
<version>4.15.58-SNAPSHOT</version>
<version>4.15.64-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>io.nosqlbench</groupId>
<artifactId>drivers-api</artifactId>
<version>4.15.58-SNAPSHOT</version>
<version>4.15.64-SNAPSHOT</version>
</dependency>
<dependency>
@ -71,15 +71,14 @@
<dependency>
<groupId>io.netty</groupId>
<artifactId>netty-codec-haproxy</artifactId>
<version>4.1.54.Final</version>
</dependency>
<!-- <dependency>-->
<!-- <groupId>io.netty</groupId>-->
<!-- <artifactId>netty-transport-native-epoll</artifactId>-->
<!-- <version>4.1.47.Final</version>-->
<!-- <classifier>linux-x86_64</classifier>-->
<!-- </dependency>-->
<dependency>
<groupId>io.netty</groupId>
<artifactId>netty-transport-native-epoll</artifactId>
<classifier>linux-x86_64</classifier>
</dependency>
<!-- test only scope -->
@ -198,10 +197,10 @@
<pattern>io.nosqlbench.generators.cql</pattern>
<shadedPattern>io.nosqlbench.generators.cql3.shaded</shadedPattern>
</relocation>
<!-- <relocation>-->
<!-- <pattern>io.netty</pattern>-->
<!-- <shadedPattern>dse19.io.netty</shadedPattern>-->
<!-- </relocation>-->
<relocation>
<pattern>io.netty</pattern>
<shadedPattern>dse19.io.netty</shadedPattern>
</relocation>
</relocations>
<artifactSet>
<includes>

View File

@ -1,38 +1,38 @@
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<modelVersion>4.0.0</modelVersion>
<parent>
<groupId>io.nosqlbench</groupId>
<artifactId>mvn-defaults</artifactId>
<version>4.15.58-SNAPSHOT</version>
<relativePath>../mvn-defaults</relativePath>
</parent>
<parent>
<groupId>io.nosqlbench</groupId>
<artifactId>mvn-defaults</artifactId>
<version>4.15.64-SNAPSHOT</version>
<relativePath>../mvn-defaults</relativePath>
</parent>
<artifactId>driver-cqlverify</artifactId>
<packaging>jar</packaging>
<name>${project.artifactId}</name>
<artifactId>driver-cqlverify</artifactId>
<packaging>jar</packaging>
<name>${project.artifactId}</name>
<description>
A CQL content verifier ActivityType, based on the CQL ActivityType
built on http://nosqlbench.io/
</description>
<description>
A CQL content verifier ActivityType, based on the CQL ActivityType
built on http://nosqlbench.io/
</description>
<dependencies>
<dependencies>
<!-- core dependencies -->
<!-- core dependencies -->
<dependency>
<groupId>io.nosqlbench</groupId>
<artifactId>driver-cql-shaded</artifactId>
<version>4.15.58-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>io.nosqlbench</groupId>
<artifactId>driver-cql-shaded</artifactId>
<version>4.15.64-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>io.nosqlbench</groupId>
<artifactId>drivers-api</artifactId>
<version>4.15.58-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>io.nosqlbench</groupId>
<artifactId>drivers-api</artifactId>
<version>4.15.64-SNAPSHOT</version>
</dependency>
</dependencies>
</dependencies>
</project>

View File

@ -5,7 +5,7 @@
<parent>
<artifactId>mvn-defaults</artifactId>
<groupId>io.nosqlbench</groupId>
<version>4.15.58-SNAPSHOT</version>
<version>4.15.64-SNAPSHOT</version>
<relativePath>../mvn-defaults</relativePath>
</parent>
@ -21,14 +21,14 @@
<dependency>
<groupId>io.nosqlbench</groupId>
<artifactId>engine-api</artifactId>
<version>4.15.58-SNAPSHOT</version>
<version>4.15.64-SNAPSHOT</version>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>io.nosqlbench</groupId>
<artifactId>drivers-api</artifactId>
<version>4.15.58-SNAPSHOT</version>
<version>4.15.64-SNAPSHOT</version>
<scope>compile</scope>
</dependency>

View File

@ -1,263 +1,263 @@
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<modelVersion>4.0.0</modelVersion>
<parent>
<groupId>io.nosqlbench</groupId>
<artifactId>mvn-defaults</artifactId>
<version>4.15.58-SNAPSHOT</version>
<relativePath>../mvn-defaults</relativePath>
</parent>
<parent>
<groupId>io.nosqlbench</groupId>
<artifactId>mvn-defaults</artifactId>
<version>4.15.64-SNAPSHOT</version>
<relativePath>../mvn-defaults</relativePath>
</parent>
<artifactId>driver-dsegraph-shaded</artifactId>
<packaging>jar</packaging>
<name>${project.artifactId}</name>
<artifactId>driver-dsegraph-shaded</artifactId>
<packaging>jar</packaging>
<name>${project.artifactId}</name>
<description>
A DSE Graph ActivityType driver for nosqlbench, based on http://nosqlbench.io/
</description>
<description>
A DSE Graph ActivityType driver for nosqlbench, based on http://nosqlbench.io/
</description>
<dependencies>
<dependencies>
<!-- core dependencies -->
<!-- core dependencies -->
<dependency>
<groupId>io.nosqlbench</groupId>
<artifactId>engine-api</artifactId>
<version>4.15.58-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>io.nosqlbench</groupId>
<artifactId>engine-api</artifactId>
<version>4.15.64-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>io.nosqlbench</groupId>
<artifactId>drivers-api</artifactId>
<version>4.15.58-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>io.nosqlbench</groupId>
<artifactId>drivers-api</artifactId>
<version>4.15.64-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>com.datastax.dse</groupId>
<artifactId>dse-java-driver-graph</artifactId>
<version>1.9.0</version>
<exclusions>
<exclusion>
<groupId>org.slf4j</groupId>
<artifactId>jcl-over-slf4j</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>com.datastax.dse</groupId>
<artifactId>dse-java-driver-graph</artifactId>
<version>1.9.0</version>
<exclusions>
<exclusion>
<groupId>org.slf4j</groupId>
<artifactId>jcl-over-slf4j</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>com.datastax.dse</groupId>
<artifactId>dse-java-driver-core</artifactId>
<version>1.9.0</version>
</dependency>
<dependency>
<groupId>com.datastax.dse</groupId>
<artifactId>dse-java-driver-core</artifactId>
<version>1.9.0</version>
</dependency>
<dependency>
<groupId>com.datastax.dse</groupId>
<artifactId>dse-java-driver-extras</artifactId>
<version>1.9.0</version>
</dependency>
<dependency>
<groupId>com.datastax.dse</groupId>
<artifactId>dse-java-driver-mapping</artifactId>
<version>1.9.0</version>
</dependency>
<dependency>
<groupId>com.datastax.dse</groupId>
<artifactId>dse-java-driver-extras</artifactId>
<version>1.9.0</version>
</dependency>
<dependency>
<groupId>com.datastax.dse</groupId>
<artifactId>dse-java-driver-mapping</artifactId>
<version>1.9.0</version>
</dependency>
<!-- For CQL compression option -->
<dependency>
<groupId>org.lz4</groupId>
<artifactId>lz4-java</artifactId>
</dependency>
<!-- For CQL compression option -->
<dependency>
<groupId>org.lz4</groupId>
<artifactId>lz4-java</artifactId>
</dependency>
<!-- For CQL compression option -->
<dependency>
<groupId>org.xerial.snappy</groupId>
<artifactId>snappy-java</artifactId>
</dependency>
<!-- For CQL compression option -->
<dependency>
<groupId>org.xerial.snappy</groupId>
<artifactId>snappy-java</artifactId>
</dependency>
<dependency>
<groupId>org.antlr</groupId>
<artifactId>antlr4-runtime</artifactId>
</dependency>
<dependency>
<groupId>org.antlr</groupId>
<artifactId>antlr4-runtime</artifactId>
</dependency>
<!-- <dependency>-->
<!-- <groupId>io.netty</groupId>-->
<!-- <artifactId>netty-transport-native-epoll</artifactId>-->
<!-- <version>4.1.47.Final</version>-->
<!-- <classifier>linux-x86_64</classifier>-->
<!-- </dependency>-->
<!-- <dependency>-->
<!-- <groupId>io.netty</groupId>-->
<!-- <artifactId>netty-transport-native-epoll</artifactId>-->
<!-- <version>4.1.47.Final</version>-->
<!-- <classifier>linux-x86_64</classifier>-->
<!-- </dependency>-->
<!-- test only scope -->
<!-- test only scope -->
<!-- This is added as shaded to satisfy old jmx reporting dependencies-->
<!-- This is added as shaded to satisfy old jmx reporting dependencies-->
<dependency>
<groupId>io.dropwizard.metrics</groupId>
<artifactId>metrics-core</artifactId>
<version>3.2.2</version>
</dependency>
<!-- <dependency>-->
<!-- <groupId>org.apache.commons</groupId>-->
<!-- <artifactId>commons-lang3</artifactId>-->
<!-- <version>3.7</version>-->
<!-- </dependency>-->
<dependency>
<groupId>io.dropwizard.metrics</groupId>
<artifactId>metrics-core</artifactId>
<version>3.2.2</version>
</dependency>
<!-- <dependency>-->
<!-- <groupId>org.apache.commons</groupId>-->
<!-- <artifactId>commons-lang3</artifactId>-->
<!-- <version>3.7</version>-->
<!-- </dependency>-->
<!-- test only scope -->
<!-- test only scope -->
<dependency>
<groupId>org.testng</groupId>
<artifactId>testng</artifactId>
<version>6.13.1</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.testng</groupId>
<artifactId>testng</artifactId>
<version>6.13.1</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.assertj</groupId>
<artifactId>assertj-core-java8</artifactId>
<version>1.0.0m1</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.assertj</groupId>
<artifactId>assertj-core-java8</artifactId>
<version>1.0.0m1</version>
<scope>test</scope>
</dependency>
<!-- compile only scope -->
<!-- compile only scope -->
<dependency>
<groupId>org.yaml</groupId>
<artifactId>snakeyaml</artifactId>
<version>1.23</version>
</dependency>
<dependency>
<groupId>org.yaml</groupId>
<artifactId>snakeyaml</artifactId>
<version>1.23</version>
</dependency>
</dependencies>
</dependencies>
<build>
<plugins>
<build>
<plugins>
<!--
If this plugin is re-enabled, the local CQL grammar will
be overwritten. The grammar has some syntax issues, so
fixes will be made to it before it is submitted back.
<!--
If this plugin is re-enabled, the local CQL grammar will
be overwritten. The grammar has some syntax issues, so
fixes will be made to it before it is submitted back.
(lack of composite key syntax, nested type syntax, etc)
-->
<!-- <plugin>-->
<!-- <groupId>com.googlecode.maven-download-plugin</groupId>-->
<!-- <artifactId>download-maven-plugin</artifactId>-->
<!-- <version>1.4.0</version>-->
<!-- <executions>-->
<!-- <execution>-->
<!-- <id>get-cql-lexer</id>-->
<!-- <phase>generate-sources</phase>-->
<!-- <goals>-->
<!-- <goal>wget</goal>-->
<!-- </goals>-->
<!-- <configuration>-->
<!-- <url>-->
<!-- https://raw.githubusercontent.com/antlr/grammars-v4/master/cql3/CqlLexer.g4-->
<!-- </url>-->
<!-- <outputFileName>CqlLexer.g4</outputFileName>-->
<!-- <outputDirectory>src/main/grammars/cql3/-->
<!-- </outputDirectory>-->
<!-- </configuration>-->
<!-- </execution>-->
<!-- <execution>-->
<!-- <id>get-cql-parser</id>-->
<!-- <phase>generate-sources</phase>-->
<!-- <goals>-->
<!-- <goal>wget</goal>-->
<!-- </goals>-->
<!-- <configuration>-->
<!-- <url>-->
<!-- https://raw.githubusercontent.com/antlr/grammars-v4/master/cql3/CqlParser.g4-->
<!-- </url>-->
<!-- <outputFileName>CqlParser.g4</outputFileName>-->
<!-- <outputDirectory>src/main/grammars/cql3/-->
<!-- </outputDirectory>-->
<!-- </configuration>-->
<!-- </execution>-->
<!-- </executions>-->
<!-- </plugin>-->
(lack of composite key syntax, nested type syntax, etc)
-->
<!-- <plugin>-->
<!-- <groupId>com.googlecode.maven-download-plugin</groupId>-->
<!-- <artifactId>download-maven-plugin</artifactId>-->
<!-- <version>1.4.0</version>-->
<!-- <executions>-->
<!-- <execution>-->
<!-- <id>get-cql-lexer</id>-->
<!-- <phase>generate-sources</phase>-->
<!-- <goals>-->
<!-- <goal>wget</goal>-->
<!-- </goals>-->
<!-- <configuration>-->
<!-- <url>-->
<!-- https://raw.githubusercontent.com/antlr/grammars-v4/master/cql3/CqlLexer.g4-->
<!-- </url>-->
<!-- <outputFileName>CqlLexer.g4</outputFileName>-->
<!-- <outputDirectory>src/main/grammars/cql3/-->
<!-- </outputDirectory>-->
<!-- </configuration>-->
<!-- </execution>-->
<!-- <execution>-->
<!-- <id>get-cql-parser</id>-->
<!-- <phase>generate-sources</phase>-->
<!-- <goals>-->
<!-- <goal>wget</goal>-->
<!-- </goals>-->
<!-- <configuration>-->
<!-- <url>-->
<!-- https://raw.githubusercontent.com/antlr/grammars-v4/master/cql3/CqlParser.g4-->
<!-- </url>-->
<!-- <outputFileName>CqlParser.g4</outputFileName>-->
<!-- <outputDirectory>src/main/grammars/cql3/-->
<!-- </outputDirectory>-->
<!-- </configuration>-->
<!-- </execution>-->
<!-- </executions>-->
<!-- </plugin>-->
<plugin>
<groupId>org.antlr</groupId>
<artifactId>antlr4-maven-plugin</artifactId>
<configuration>
<sourceDirectory>src/main/grammars/cql3
</sourceDirectory>
<arguments>
<argument>-package</argument>
<argument>io.nosqlbench.generators.cql.generated
</argument>
</arguments>
<outputDirectory>
src/main/java/io/nosqlbench/generators/cql/generated
</outputDirectory>
</configuration>
<executions>
<execution>
<id>antlr</id>
<goals>
<goal>antlr4</goal>
</goals>
<phase>generate-sources</phase>
</execution>
</executions>
</plugin>
<plugin>
<groupId>org.antlr</groupId>
<artifactId>antlr4-maven-plugin</artifactId>
<configuration>
<sourceDirectory>src/main/grammars/cql3
</sourceDirectory>
<arguments>
<argument>-package</argument>
<argument>io.nosqlbench.generators.cql.generated
</argument>
</arguments>
<outputDirectory>
src/main/java/io/nosqlbench/generators/cql/generated
</outputDirectory>
</configuration>
<executions>
<execution>
<id>antlr</id>
<goals>
<goal>antlr4</goal>
</goals>
<phase>generate-sources</phase>
</execution>
</executions>
</plugin>
<plugin>
<artifactId>maven-shade-plugin</artifactId>
<version>3.2.3</version>
<executions>
<execution>
<phase>package</phase>
<goals>
<goal>shade</goal>
</goals>
</execution>
</executions>
<configuration>
<createDependencyReducedPom>false</createDependencyReducedPom>
<promoteTransitiveDependencies>true</promoteTransitiveDependencies>
<createSourcesJar>true</createSourcesJar>
<!-- <shadedArtifactAttached>true</shadedArtifactAttached>-->
<!-- <shadedClassifierName>shaded</shadedClassifierName>-->
<relocations>
<relocation>
<pattern>com.google.common</pattern>
<shadedPattern>com.datastax.internal.com_google_common</shadedPattern>
</relocation>
<!-- <relocation>-->
<!-- <pattern>com.datastax</pattern>-->
<!-- <shadedPattern>dse19.com.datastax</shadedPattern>-->
<!-- </relocation>-->
<!-- <relocation>-->
<!-- <pattern>io.netty</pattern>-->
<!-- <shadedPattern>dse19.io.netty</shadedPattern>-->
<!-- </relocation>-->
</relocations>
<artifactSet>
<includes>
<include>*:*</include>
</includes>
</artifactSet>
<transformers combine.children="append">
<transformer implementation="org.apache.maven.plugins.shade.resource.ManifestResourceTransformer">
<mainClass>io.nosqlbench.engine.cli.NBCLI</mainClass>
</transformer>
</transformers>
<!-- <finalName>${project.artifactId}</finalName>-->
<filters>
<filter>
<artifact>*:*</artifact>
<excludes>
<exclude>META-INF/*.SF</exclude>
<exclude>META-INF/*.DSA</exclude>
<exclude>META-INF/*.RSA</exclude>
</excludes>
</filter>
</filters>
</configuration>
</plugin>
</plugins>
</build>
<plugin>
<artifactId>maven-shade-plugin</artifactId>
<version>3.2.3</version>
<executions>
<execution>
<phase>package</phase>
<goals>
<goal>shade</goal>
</goals>
</execution>
</executions>
<configuration>
<createDependencyReducedPom>false</createDependencyReducedPom>
<promoteTransitiveDependencies>true</promoteTransitiveDependencies>
<createSourcesJar>true</createSourcesJar>
<!-- <shadedArtifactAttached>true</shadedArtifactAttached>-->
<!-- <shadedClassifierName>shaded</shadedClassifierName>-->
<relocations>
<relocation>
<pattern>com.google.common</pattern>
<shadedPattern>com.datastax.internal.com_google_common</shadedPattern>
</relocation>
<!-- <relocation>-->
<!-- <pattern>com.datastax</pattern>-->
<!-- <shadedPattern>dse19.com.datastax</shadedPattern>-->
<!-- </relocation>-->
<!-- <relocation>-->
<!-- <pattern>io.netty</pattern>-->
<!-- <shadedPattern>dse19.io.netty</shadedPattern>-->
<!-- </relocation>-->
</relocations>
<artifactSet>
<includes>
<include>*:*</include>
</includes>
</artifactSet>
<transformers combine.children="append">
<transformer implementation="org.apache.maven.plugins.shade.resource.ManifestResourceTransformer">
<mainClass>io.nosqlbench.engine.cli.NBCLI</mainClass>
</transformer>
</transformers>
<!-- <finalName>${project.artifactId}</finalName>-->
<filters>
<filter>
<artifact>*:*</artifact>
<excludes>
<exclude>META-INF/*.SF</exclude>
<exclude>META-INF/*.DSA</exclude>
<exclude>META-INF/*.RSA</exclude>
</excludes>
</filter>
</filters>
</configuration>
</plugin>
</plugins>
</build>
</project>

View File

@ -4,7 +4,7 @@
<parent>
<artifactId>mvn-defaults</artifactId>
<groupId>io.nosqlbench</groupId>
<version>4.15.58-SNAPSHOT</version>
<version>4.15.64-SNAPSHOT</version>
<relativePath>../mvn-defaults/pom.xml</relativePath>
</parent>
@ -21,7 +21,7 @@
<dependency>
<groupId>io.nosqlbench</groupId>
<artifactId>drivers-api</artifactId>
<version>4.15.58-SNAPSHOT</version>
<version>4.15.64-SNAPSHOT</version>
</dependency>
<dependency>

View File

@ -4,7 +4,7 @@
<parent>
<artifactId>mvn-defaults</artifactId>
<groupId>io.nosqlbench</groupId>
<version>4.15.58-SNAPSHOT</version>
<version>4.15.64-SNAPSHOT</version>
<relativePath>../mvn-defaults</relativePath>
</parent>
@ -22,14 +22,14 @@
<dependency>
<groupId>io.nosqlbench</groupId>
<artifactId>engine-api</artifactId>
<version>4.15.58-SNAPSHOT</version>
<version>4.15.64-SNAPSHOT</version>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>io.nosqlbench</groupId>
<artifactId>drivers-api</artifactId>
<version>4.15.58-SNAPSHOT</version>
<version>4.15.64-SNAPSHOT</version>
<scope>compile</scope>
</dependency>

View File

@ -3,7 +3,7 @@
<parent>
<artifactId>nosqlbench</artifactId>
<groupId>io.nosqlbench</groupId>
<version>4.15.58-SNAPSHOT</version>
<version>4.15.64-SNAPSHOT</version>
</parent>
<modelVersion>4.0.0</modelVersion>
@ -19,7 +19,7 @@
<dependency>
<groupId>io.nosqlbench</groupId>
<artifactId>engine-api</artifactId>
<version>4.15.58-SNAPSHOT</version>
<version>4.15.64-SNAPSHOT</version>
<scope>compile</scope>
</dependency>

View File

@ -4,7 +4,7 @@
<parent>
<artifactId>mvn-defaults</artifactId>
<groupId>io.nosqlbench</groupId>
<version>4.15.58-SNAPSHOT</version>
<version>4.15.64-SNAPSHOT</version>
<relativePath>../mvn-defaults</relativePath>
</parent>
@ -17,36 +17,36 @@
into a pulsar system via JMS 2.0 compatibile APIs.
NOTE: this is JMS compatible driver from DataStax that allows using a Pulsar cluster
as the potential JMS Destination
as the potential JMS Destination
</description>
<!-- <repositories>-->
<!-- &lt;!&ndash; Tempoarily needed for Pulsar JMS Java library &ndash;&gt;-->
<!-- <repository>-->
<!-- <id>datastax-releases-local</id>-->
<!-- <name>DataStax Local Releases</name>-->
<!-- <url>https://repo.sjc.dsinternal.org/artifactory/datastax-snapshots-local/</url>-->
<!-- <releases>-->
<!-- <enabled>false</enabled>-->
<!-- </releases>-->
<!-- <snapshots>-->
<!-- <enabled>true</enabled>-->
<!-- </snapshots>-->
<!-- </repository>-->
<!-- </repositories>-->
<!-- <repositories>-->
<!-- &lt;!&ndash; Tempoarily needed for Pulsar JMS Java library &ndash;&gt;-->
<!-- <repository>-->
<!-- <id>datastax-releases-local</id>-->
<!-- <name>DataStax Local Releases</name>-->
<!-- <url>https://repo.sjc.dsinternal.org/artifactory/datastax-snapshots-local/</url>-->
<!-- <releases>-->
<!-- <enabled>false</enabled>-->
<!-- </releases>-->
<!-- <snapshots>-->
<!-- <enabled>true</enabled>-->
<!-- </snapshots>-->
<!-- </repository>-->
<!-- </repositories>-->
<dependencies>
<!-- core dependencies -->
<dependency>
<groupId>io.nosqlbench</groupId>
<artifactId>engine-api</artifactId>
<version>4.15.58-SNAPSHOT</version>
<version>4.15.64-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>io.nosqlbench</groupId>
<artifactId>driver-stdout</artifactId>
<version>4.15.58-SNAPSHOT</version>
<version>4.15.64-SNAPSHOT</version>
</dependency>
<!-- https://mvnrepository.com/artifact/org.apache.commons/commons-lang3 -->

View File

@ -5,7 +5,7 @@
<parent>
<artifactId>mvn-defaults</artifactId>
<groupId>io.nosqlbench</groupId>
<version>4.15.58-SNAPSHOT</version>
<version>4.15.64-SNAPSHOT</version>
<relativePath>../mvn-defaults</relativePath>
</parent>
@ -22,13 +22,13 @@
<dependency>
<groupId>io.nosqlbench</groupId>
<artifactId>drivers-api</artifactId>
<version>4.15.58-SNAPSHOT</version>
<version>4.15.64-SNAPSHOT</version>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>io.nosqlbench</groupId>
<artifactId>engine-api</artifactId>
<version>4.15.58-SNAPSHOT</version>
<version>4.15.64-SNAPSHOT</version>
<scope>compile</scope>
</dependency>

View File

@ -4,7 +4,7 @@
<parent>
<artifactId>mvn-defaults</artifactId>
<groupId>io.nosqlbench</groupId>
<version>4.15.58-SNAPSHOT</version>
<version>4.15.64-SNAPSHOT</version>
<relativePath>../mvn-defaults</relativePath>
</parent>
@ -37,27 +37,27 @@
<!-- https://mvnrepository.com/artifact/io.confluent/kafka-avro-serializer -->
<dependency>
<groupId>io.confluent</groupId>
<artifactId>kafka-avro-serializer</artifactId>
<version>5.5.1</version>
<artifactId>kafka-avro-serializer</artifactId>
<version>5.5.1</version>
</dependency>
<dependency>
<groupId>io.nosqlbench</groupId>
<artifactId>engine-api</artifactId>
<version>4.15.58-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>io.nosqlbench</groupId>
<artifactId>engine-api</artifactId>
<version>4.15.64-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>io.nosqlbench</groupId>
<artifactId>driver-stdout</artifactId>
<version>4.15.58-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>io.nosqlbench</groupId>
<artifactId>driver-stdout</artifactId>
<version>4.15.64-SNAPSHOT</version>
</dependency>
<!-- <dependency>-->
<!-- <groupId>org.slf4j</groupId>-->
<!-- <artifactId>slf4j-api</artifactId>-->
<!-- <version>1.7.25</version>-->
<!-- </dependency>-->
<!-- <dependency>-->
<!-- <groupId>org.slf4j</groupId>-->
<!-- <artifactId>slf4j-api</artifactId>-->
<!-- <version>1.7.25</version>-->
<!-- </dependency>-->
</dependencies>
<repositories>
@ -71,23 +71,23 @@
</snapshots>
</repository>
</repositories>
<!-- <profiles>-->
<!-- <profile>-->
<!-- <id>shade</id>-->
<!-- <activation>-->
<!-- <activeByDefault>true</activeByDefault>-->
<!-- </activation>-->
<!-- <build>-->
<!-- <plugins>-->
<!-- <plugin>-->
<!-- <artifactId>maven-shade-plugin</artifactId>-->
<!-- <configuration>-->
<!-- <finalName>${project.artifactId}</finalName>-->
<!-- </configuration>-->
<!-- </plugin>-->
<!-- </plugins>-->
<!-- </build>-->
<!-- </profile>-->
<!-- </profiles>-->
<!-- <profiles>-->
<!-- <profile>-->
<!-- <id>shade</id>-->
<!-- <activation>-->
<!-- <activeByDefault>true</activeByDefault>-->
<!-- </activation>-->
<!-- <build>-->
<!-- <plugins>-->
<!-- <plugin>-->
<!-- <artifactId>maven-shade-plugin</artifactId>-->
<!-- <configuration>-->
<!-- <finalName>${project.artifactId}</finalName>-->
<!-- </configuration>-->
<!-- </plugin>-->
<!-- </plugins>-->
<!-- </build>-->
<!-- </profile>-->
<!-- </profiles>-->
</project>

View File

@ -7,7 +7,7 @@
<parent>
<artifactId>mvn-defaults</artifactId>
<groupId>io.nosqlbench</groupId>
<version>4.15.58-SNAPSHOT</version>
<version>4.15.64-SNAPSHOT</version>
<relativePath>../mvn-defaults</relativePath>
</parent>
@ -21,13 +21,13 @@
<dependency>
<groupId>io.nosqlbench</groupId>
<artifactId>engine-api</artifactId>
<version>4.15.58-SNAPSHOT</version>
<version>4.15.64-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>io.nosqlbench</groupId>
<artifactId>drivers-api</artifactId>
<version>4.15.58-SNAPSHOT</version>
<version>4.15.64-SNAPSHOT</version>
</dependency>
<dependency>

View File

@ -4,7 +4,7 @@
<parent>
<artifactId>mvn-defaults</artifactId>
<groupId>io.nosqlbench</groupId>
<version>4.15.58-SNAPSHOT</version>
<version>4.15.64-SNAPSHOT</version>
<relativePath>../mvn-defaults</relativePath>
</parent>
@ -18,7 +18,7 @@
</description>
<properties>
<pulsar.version>2.8.0</pulsar.version>
<pulsar.version>2.8.1</pulsar.version>
</properties>
<dependencies>
@ -40,13 +40,13 @@
<dependency>
<groupId>io.nosqlbench</groupId>
<artifactId>engine-api</artifactId>
<version>4.15.58-SNAPSHOT</version>
<version>4.15.64-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>io.nosqlbench</groupId>
<artifactId>driver-stdout</artifactId>
<version>4.15.58-SNAPSHOT</version>
<version>4.15.64-SNAPSHOT</version>
</dependency>
<!-- https://mvnrepository.com/artifact/commons-beanutils/commons-beanutils -->

View File

@ -10,6 +10,8 @@ import io.nosqlbench.driver.pulsar.util.PulsarNBClientConf;
import io.nosqlbench.engine.api.activityapi.core.ActivityDefObserver;
import io.nosqlbench.engine.api.activityapi.errorhandling.modular.NBErrorHandler;
import io.nosqlbench.engine.api.activityapi.planning.OpSequence;
import io.nosqlbench.engine.api.activityapi.ratelimits.RateLimiter;
import io.nosqlbench.engine.api.activityapi.ratelimits.RateLimiters;
import io.nosqlbench.engine.api.activityimpl.ActivityDef;
import io.nosqlbench.engine.api.activityimpl.OpDispenser;
import io.nosqlbench.engine.api.activityimpl.SimpleActivity;
@ -38,6 +40,12 @@ public class PulsarActivity extends SimpleActivity implements ActivityDefObserve
// Metrics for NB Pulsar driver milestone: https://github.com/nosqlbench/nosqlbench/milestone/11
// - end-to-end latency
private Histogram e2eMsgProcLatencyHistogram;
// - message out of sequence error counter
private Counter msgErrOutOfSeqCounter;
// - message loss counter
private Counter msgErrLossCounter;
// - message duplicate (when dedup is enabled) error counter
private Counter msgErrDuplicateCounter;
private PulsarSpaceCache pulsarCache;
@ -51,6 +59,7 @@ public class PulsarActivity extends SimpleActivity implements ActivityDefObserve
private NBErrorHandler errorHandler;
private OpSequence<OpDispenser<PulsarOp>> sequencer;
private volatile Throwable asyncOperationFailure;
private boolean cycleratePerThread;
public PulsarActivity(ActivityDef activityDef) {
super(activityDef);
@ -76,6 +85,9 @@ public class PulsarActivity extends SimpleActivity implements ActivityDefObserve
commitTransactionTimer = ActivityMetrics.timer(activityDef, "commit_transaction");
e2eMsgProcLatencyHistogram = ActivityMetrics.histogram(activityDef, "e2e_msg_latency");
msgErrOutOfSeqCounter = ActivityMetrics.counter(activityDef, "err_msg_oos");
msgErrLossCounter = ActivityMetrics.counter(activityDef, "err_msg_loss");
msgErrDuplicateCounter = ActivityMetrics.counter(activityDef, "err_msg_dup");
String pulsarClntConfFile =
activityDef.getParams().getOptionalString("config").orElse("config.properties");
@ -99,11 +111,26 @@ public class PulsarActivity extends SimpleActivity implements ActivityDefObserve
() -> activityDef.getParams().getOptionalString("errors").orElse("stop"),
this::getExceptionMetrics
);
cycleratePerThread = activityDef.getParams().takeBoolOrDefault("cyclerate_per_thread", false);
}
private final ThreadLocal<RateLimiter> cycleLimiterThreadLocal = ThreadLocal.withInitial(() -> {
if (super.getCycleLimiter() != null) {
return RateLimiters.createOrUpdate(this.getActivityDef(), "cycles", null,
super.getCycleLimiter().getRateSpec());
} else {
return null;
}
});
@Override
public synchronized void onActivityDefUpdate(ActivityDef activityDef) {
super.onActivityDefUpdate(activityDef);
public RateLimiter getCycleLimiter() {
if (cycleratePerThread) {
return cycleLimiterThreadLocal.get();
} else {
return super.getCycleLimiter();
}
}
public NBErrorHandler getErrorHandler() { return errorHandler; }
@ -231,4 +258,7 @@ public class PulsarActivity extends SimpleActivity implements ActivityDefObserve
public Timer getCommitTransactionTimer() { return commitTransactionTimer; }
public Histogram getE2eMsgProcLatencyHistogram() { return e2eMsgProcLatencyHistogram; }
public Counter getMsgErrOutOfSeqCounter() { return msgErrOutOfSeqCounter; }
public Counter getMsgErrLossCounter() { return msgErrLossCounter; }
public Counter getMsgErrDuplicateCounter() { return msgErrDuplicateCounter; }
}

View File

@ -1,10 +0,0 @@
package io.nosqlbench.driver.pulsar.exception;
public class PulsarMsgDuplicateException extends RuntimeException {
public PulsarMsgDuplicateException(boolean asyncPulsarOp, long nbCycleNum, long curMsgSeqId, long prevMsgSeqId) {
super("" + (asyncPulsarOp ? "[AsyncAPI]" : "[SyncAPI]") +
" Detected duplicate message when message deduplication is enabled (curCycleNum=" + nbCycleNum +
", curMsgSeqId=" + curMsgSeqId + ", prevMsgSeqId=" + prevMsgSeqId + ").");
}
}

View File

@ -1,11 +0,0 @@
package io.nosqlbench.driver.pulsar.exception;
public class PulsarMsgLossException extends RuntimeException {
public PulsarMsgLossException(boolean asyncPulsarOp, long nbCycleNum, long curMsgSeqId, long prevMsgSeqId) {
super("" + (asyncPulsarOp ? "[AsyncAPI]" : "[SyncAPI]") +
" Detected message sequence id gap (curCycleNum=" + nbCycleNum +
", curMsgSeqId=" + curMsgSeqId + ", prevMsgSeqId=" + prevMsgSeqId + "). " +
"Some published messages are not received!");
}
}

View File

@ -1,11 +0,0 @@
package io.nosqlbench.driver.pulsar.exception;
public class PulsarMsgOutOfOrderException extends RuntimeException {
public PulsarMsgOutOfOrderException(boolean asyncPulsarOp, long nbCycleNum, long curMsgSeqId, long prevMsgSeqId) {
super("" + (asyncPulsarOp ? "[AsyncAPI]" : "[SyncAPI]" ) +
" Detected message ordering is not guaranteed (curCycleNum=" + nbCycleNum +
", curMsgSeqId=" + curMsgSeqId + ", prevMsgSeqId=" + prevMsgSeqId + "). " +
"Older messages are received earlier!");
}
}

View File

@ -0,0 +1,87 @@
package io.nosqlbench.driver.pulsar.ops;
import io.nosqlbench.driver.pulsar.util.PulsarActivityUtil;
import java.util.*;
import org.apache.commons.lang3.RandomUtils;
/**
* Handles adding a monotonic sequence number to message properties of sent messages
*/
class MessageSequenceNumberSendingHandler {
static final int SIMULATED_ERROR_PROBABILITY_PERCENTAGE = 10;
long number = 1;
Queue<Long> outOfOrderNumbers;
public long getNextSequenceNumber(Set<PulsarActivityUtil.SEQ_ERROR_SIMU_TYPE> simulatedErrorTypes) {
return getNextSequenceNumber(simulatedErrorTypes, SIMULATED_ERROR_PROBABILITY_PERCENTAGE);
}
long getNextSequenceNumber(Set<PulsarActivityUtil.SEQ_ERROR_SIMU_TYPE> simulatedErrorTypes, int errorProbabilityPercentage) {
simulateError(simulatedErrorTypes, errorProbabilityPercentage);
return nextNumber();
}
private void simulateError(Set<PulsarActivityUtil.SEQ_ERROR_SIMU_TYPE> simulatedErrorTypes, int errorProbabilityPercentage) {
if (!simulatedErrorTypes.isEmpty() && shouldSimulateError(errorProbabilityPercentage)) {
int selectIndex = 0;
int numberOfErrorTypes = simulatedErrorTypes.size();
if (numberOfErrorTypes > 1) {
// pick one of the simulated error type randomly
selectIndex = RandomUtils.nextInt(0, numberOfErrorTypes);
}
PulsarActivityUtil.SEQ_ERROR_SIMU_TYPE errorType = simulatedErrorTypes.stream()
.skip(selectIndex)
.findFirst()
.get();
switch (errorType) {
case OutOfOrder:
// simulate message out of order
injectMessagesOutOfOrder();
break;
case MsgDup:
// simulate message duplication
injectMessageDuplication();
break;
case MsgLoss:
// simulate message loss
injectMessageLoss();
break;
}
}
}
private boolean shouldSimulateError(int errorProbabilityPercentage) {
// Simulate error with the specified probability
return RandomUtils.nextInt(0, 100) < errorProbabilityPercentage;
}
long nextNumber() {
if (outOfOrderNumbers != null) {
long nextNumber = outOfOrderNumbers.poll();
if (outOfOrderNumbers.isEmpty()) {
outOfOrderNumbers = null;
}
return nextNumber;
}
return number++;
}
void injectMessagesOutOfOrder() {
if (outOfOrderNumbers == null) {
outOfOrderNumbers = new ArrayDeque<>(Arrays.asList(number + 2, number, number + 1));
number += 3;
}
}
void injectMessageDuplication() {
if (outOfOrderNumbers == null) {
number--;
}
}
void injectMessageLoss() {
if (outOfOrderNumbers == null) {
number++;
}
}
}

View File

@ -1,15 +1,13 @@
package io.nosqlbench.driver.pulsar.ops;
import com.codahale.metrics.Counter;
import com.codahale.metrics.Histogram;
import com.codahale.metrics.Timer;
import io.nosqlbench.driver.pulsar.PulsarActivity;
import io.nosqlbench.driver.pulsar.PulsarSpace;
import io.nosqlbench.engine.api.templating.CommandTemplate;
import java.util.HashMap;
import java.util.Map;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.apache.pulsar.client.api.Consumer;
import org.apache.pulsar.client.api.Schema;
import org.apache.pulsar.client.api.transaction.Transaction;
import java.util.function.LongFunction;
@ -54,15 +52,16 @@ public class PulsarConsumerMapper extends PulsarTransactOpMapper {
@Override
public PulsarOp apply(long value) {
boolean seqTracking = seqTrackingFunc.apply(value);
Consumer<?> consumer = consumerFunc.apply(value);
boolean asyncApi = asyncApiFunc.apply(value);
boolean useTransaction = useTransactionFunc.apply(value);
boolean seqTracking = seqTrackingFunc.apply(value);
Supplier<Transaction> transactionSupplier = transactionSupplierFunc.apply(value);
boolean topicMsgDedup = topicMsgDedupFunc.apply(value);
String subscriptionType = subscriptionTypeFunc.apply(value);
return new PulsarConsumerOp(
this,
pulsarActivity,
asyncApi,
useTransaction,
@ -74,6 +73,23 @@ public class PulsarConsumerMapper extends PulsarTransactOpMapper {
clientSpace.getPulsarSchema(),
clientSpace.getPulsarClientConf().getConsumerTimeoutSeconds(),
value,
e2eMsProc);
e2eMsProc,
this::getReceivedMessageSequenceTracker);
}
private ReceivedMessageSequenceTracker getReceivedMessageSequenceTracker(String topicName) {
return receivedMessageSequenceTrackersForTopicThreadLocal.get()
.computeIfAbsent(topicName, k -> createReceivedMessageSequenceTracker());
}
private ReceivedMessageSequenceTracker createReceivedMessageSequenceTracker() {
return new ReceivedMessageSequenceTracker(pulsarActivity.getMsgErrOutOfSeqCounter(),
pulsarActivity.getMsgErrDuplicateCounter(),
pulsarActivity.getMsgErrLossCounter());
}
private final ThreadLocal<Map<String, ReceivedMessageSequenceTracker>> receivedMessageSequenceTrackersForTopicThreadLocal =
ThreadLocal.withInitial(HashMap::new);
}

View File

@ -7,6 +7,7 @@ import io.nosqlbench.driver.pulsar.PulsarActivity;
import io.nosqlbench.driver.pulsar.exception.*;
import io.nosqlbench.driver.pulsar.util.AvroUtil;
import io.nosqlbench.driver.pulsar.util.PulsarActivityUtil;
import java.util.function.Function;
import org.apache.commons.lang3.StringUtils;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
@ -22,6 +23,7 @@ public class PulsarConsumerOp implements PulsarOp {
private final static Logger logger = LogManager.getLogger(PulsarConsumerOp.class);
private final PulsarConsumerMapper consumerMapper;
private final PulsarActivity pulsarActivity;
private final boolean asyncPulsarOp;
@ -37,17 +39,16 @@ public class PulsarConsumerOp implements PulsarOp {
private final boolean e2eMsgProc;
private final long curCycleNum;
private long curMsgSeqId;
private long prevMsgSeqId;
private final Counter bytesCounter;
private final Histogram messageSizeHistogram;
private final Timer transactionCommitTimer;
// keep track of end-to-end message latency
private final Histogram e2eMsgProcLatencyHistogram;
private final Function<String, ReceivedMessageSequenceTracker> receivedMessageSequenceTrackerForTopic;
public PulsarConsumerOp(
PulsarConsumerMapper consumerMapper,
PulsarActivity pulsarActivity,
boolean asyncPulsarOp,
boolean useTransaction,
@ -59,8 +60,10 @@ public class PulsarConsumerOp implements PulsarOp {
Schema<?> schema,
int timeoutSeconds,
long curCycleNum,
boolean e2eMsgProc)
boolean e2eMsgProc,
Function<String, ReceivedMessageSequenceTracker> receivedMessageSequenceTrackerForTopic)
{
this.consumerMapper = consumerMapper;
this.pulsarActivity = pulsarActivity;
this.asyncPulsarOp = asyncPulsarOp;
@ -76,14 +79,22 @@ public class PulsarConsumerOp implements PulsarOp {
this.curCycleNum = curCycleNum;
this.e2eMsgProc = e2eMsgProc;
this.curMsgSeqId = 0;
this.prevMsgSeqId = (curCycleNum - 1);
this.bytesCounter = pulsarActivity.getBytesCounter();
this.messageSizeHistogram = pulsarActivity.getMessageSizeHistogram();
this.transactionCommitTimer = pulsarActivity.getCommitTransactionTimer();
this.e2eMsgProcLatencyHistogram = pulsarActivity.getE2eMsgProcLatencyHistogram();
this.receivedMessageSequenceTrackerForTopic = receivedMessageSequenceTrackerForTopic;
}
private void checkAndUpdateMessageErrorCounter(Message message) {
String msgSeqIdStr = message.getProperty(PulsarActivityUtil.MSG_SEQUENCE_NUMBER);
if ( !StringUtils.isBlank(msgSeqIdStr) ) {
long sequenceNumber = Long.parseLong(msgSeqIdStr);
ReceivedMessageSequenceTracker receivedMessageSequenceTracker = receivedMessageSequenceTrackerForTopic.apply(message.getTopicName());
receivedMessageSequenceTracker.sequenceNumberReceived(sequenceNumber);
}
}
@Override
@ -124,13 +135,15 @@ public class PulsarConsumerOp implements PulsarOp {
org.apache.avro.generic.GenericRecord avroGenericRecord =
AvroUtil.GetGenericRecord_ApacheAvro(avroSchema, message.getData());
logger.debug("Sync message received: msg-key={}; msg-properties={}; msg-payload={}",
logger.debug("({}) Sync message received: msg-key={}; msg-properties={}; msg-payload={}",
consumer.getConsumerName(),
message.getKey(),
message.getProperties(),
avroGenericRecord.toString());
}
else {
logger.debug("Sync message received: msg-key={}; msg-properties={}; msg-payload={}",
logger.debug("({}) Sync message received: msg-key={}; msg-properties={}; msg-payload={}",
consumer.getConsumerName(),
message.getKey(),
message.getProperties(),
new String(message.getData()));
@ -143,47 +156,17 @@ public class PulsarConsumerOp implements PulsarOp {
e2eMsgProcLatencyHistogram.update(e2eMsgLatency);
}
// keep track of message ordering and message loss
String msgSeqIdStr = message.getProperties().get(PulsarActivityUtil.MSG_SEQUENCE_ID);
if ( (seqTracking) && !StringUtils.isBlank(msgSeqIdStr) ) {
curMsgSeqId = Long.parseLong(msgSeqIdStr);
if ( prevMsgSeqId > -1) {
// normal case: message sequence id is monotonically increasing by 1
if ((curMsgSeqId - prevMsgSeqId) != 1) {
// abnormal case: out of ordering
// - for any subscription type, this check should always hold
if (curMsgSeqId < prevMsgSeqId) {
throw new PulsarMsgOutOfOrderException(
false, curCycleNum, curMsgSeqId, prevMsgSeqId);
}
// - this sequence based message loss and message duplicate check can't be used for
// "Shared" subscription (ignore this check)
// - TODO: for Key_Shared subscription type, this logic needs to be improved on
// per-key basis
else {
if ( !StringUtils.equalsAnyIgnoreCase(subscriptionType,
PulsarActivityUtil.SUBSCRIPTION_TYPE.Shared.label,
PulsarActivityUtil.SUBSCRIPTION_TYPE.Key_Shared.label)) {
// abnormal case: message loss
if ((curMsgSeqId - prevMsgSeqId) > 1) {
throw new PulsarMsgLossException(
false, curCycleNum, curMsgSeqId, prevMsgSeqId);
} else if (topicMsgDedup && (curMsgSeqId == prevMsgSeqId)) {
throw new PulsarMsgDuplicateException(
false, curCycleNum, curMsgSeqId, prevMsgSeqId);
}
}
}
}
}
}
// keep track of message errors and update error counters
if (seqTracking) checkAndUpdateMessageErrorCounter(message);
int messageSize = message.getData().length;
bytesCounter.inc(messageSize);
messageSizeHistogram.update(messageSize);
if (useTransaction) {
if (!useTransaction) {
consumer.acknowledge(message.getMessageId());
}
else {
consumer.acknowledgeAsync(message.getMessageId(), transaction).get();
// little problem: here we are counting the "commit" time
@ -194,14 +177,12 @@ public class PulsarConsumerOp implements PulsarOp {
transaction.commit().get();
}
}
else {
consumer.acknowledge(message.getMessageId());
}
}
catch (Exception e) {
logger.error(
"Sync message receiving failed - timeout value: {} seconds ", timeoutSeconds);
e.printStackTrace();
throw new PulsarDriverUnexpectedException("" +
"Sync message receiving failed - timeout value: " + timeoutSeconds + " seconds ");
}
@ -236,13 +217,15 @@ public class PulsarConsumerOp implements PulsarOp {
org.apache.avro.generic.GenericRecord avroGenericRecord =
AvroUtil.GetGenericRecord_ApacheAvro(avroSchema, message.getData());
logger.debug("Async message received: msg-key={}; msg-properties={}; msg-payload={})",
logger.debug("({}) Async message received: msg-key={}; msg-properties={}; msg-payload={})",
consumer.getConsumerName(),
message.getKey(),
message.getProperties(),
avroGenericRecord.toString());
}
else {
logger.debug("Async message received: msg-key={}; msg-properties={}; msg-payload={})",
logger.debug("({}) Async message received: msg-key={}; msg-properties={}; msg-payload={})",
consumer.getConsumerName(),
message.getKey(),
message.getProperties(),
new String(message.getData()));
@ -254,47 +237,14 @@ public class PulsarConsumerOp implements PulsarOp {
e2eMsgProcLatencyHistogram.update(e2eMsgLatency);
}
// keep track of message ordering, message loss, and message duplication
String msgSeqIdStr = message.getProperties().get(PulsarActivityUtil.MSG_SEQUENCE_ID);
if ( (seqTracking) && !StringUtils.isBlank(msgSeqIdStr) ) {
curMsgSeqId = Long.parseLong(msgSeqIdStr);
// keep track of message errors and update error counters
if (seqTracking) checkAndUpdateMessageErrorCounter(message);
if (prevMsgSeqId > -1) {
// normal case: message sequence id is monotonically increasing by 1
if ((curMsgSeqId - prevMsgSeqId) != 1) {
// abnormal case: out of ordering
// - for any subscription type, this check should always hold
if (curMsgSeqId < prevMsgSeqId) {
throw new PulsarMsgOutOfOrderException(
false, curCycleNum, curMsgSeqId, prevMsgSeqId);
}
// - this sequence based message loss and message duplicate check can't be used for
// "Shared" subscription (ignore this check)
// - TODO: for Key_Shared subscription type, this logic needs to be improved on
// per-key basis
else {
if ( !StringUtils.equalsAnyIgnoreCase(subscriptionType,
PulsarActivityUtil.SUBSCRIPTION_TYPE.Shared.label,
PulsarActivityUtil.SUBSCRIPTION_TYPE.Key_Shared.label)) {
// abnormal case: message loss
if ((curMsgSeqId - prevMsgSeqId) > 1) {
throw new PulsarMsgLossException(
false, curCycleNum, curMsgSeqId, prevMsgSeqId);
} else if (topicMsgDedup && (curMsgSeqId == prevMsgSeqId)) {
throw new PulsarMsgDuplicateException(
false, curCycleNum, curMsgSeqId, prevMsgSeqId);
}
}
}
}
}
}
if (useTransaction) {
consumer.acknowledgeAsync(message.getMessageId(), transaction);
if (!useTransaction) {
consumer.acknowledgeAsync(message);
}
else {
consumer.acknowledgeAsync(message);
consumer.acknowledgeAsync(message.getMessageId(), transaction);
}
timeTracker.run();
@ -304,8 +254,9 @@ public class PulsarConsumerOp implements PulsarOp {
});
}
catch (Exception e) {
throw new PulsarDriverUnexpectedException("Async message receiving failed");
throw new PulsarDriverUnexpectedException(e);
}
}
}
}

View File

@ -4,18 +4,17 @@ import io.nosqlbench.driver.pulsar.PulsarActivity;
import io.nosqlbench.driver.pulsar.PulsarSpace;
import io.nosqlbench.driver.pulsar.util.PulsarActivityUtil;
import io.nosqlbench.engine.api.templating.CommandTemplate;
import org.apache.commons.lang3.RandomUtils;
import java.util.HashMap;
import java.util.Map;
import java.util.Set;
import java.util.function.LongFunction;
import java.util.function.Supplier;
import org.apache.commons.lang3.StringUtils;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.apache.pulsar.client.api.Producer;
import org.apache.pulsar.client.api.transaction.Transaction;
import java.util.HashMap;
import java.util.Map;
import java.util.function.LongFunction;
import java.util.function.Supplier;
/**
* This maps a set of specifier functions to a pulsar operation. The pulsar operation contains
* enough state to define a pulsar operation such that it can be executed, measured, and possibly
@ -31,7 +30,7 @@ public class PulsarProducerMapper extends PulsarTransactOpMapper {
private final static Logger logger = LogManager.getLogger(PulsarProducerMapper.class);
private final LongFunction<Producer<?>> producerFunc;
private final LongFunction<String> seqErrSimuTypeFunc;
private final Set<PulsarActivityUtil.SEQ_ERROR_SIMU_TYPE> seqErrSimuTypes;
private final LongFunction<String> keyFunc;
private final LongFunction<String> propFunc;
private final LongFunction<String> payloadFunc;
@ -44,14 +43,14 @@ public class PulsarProducerMapper extends PulsarTransactOpMapper {
LongFunction<Boolean> seqTrackingFunc,
LongFunction<Supplier<Transaction>> transactionSupplierFunc,
LongFunction<Producer<?>> producerFunc,
LongFunction<String> seqErrSimuTypeFunc,
Set<PulsarActivityUtil.SEQ_ERROR_SIMU_TYPE> seqErrSimuTypes,
LongFunction<String> keyFunc,
LongFunction<String> propFunc,
LongFunction<String> payloadFunc) {
super(cmdTpl, clientSpace, pulsarActivity, asyncApiFunc, useTransactionFunc, seqTrackingFunc, transactionSupplierFunc);
this.producerFunc = producerFunc;
this.seqErrSimuTypeFunc = seqErrSimuTypeFunc;
this.seqErrSimuTypes = seqErrSimuTypes;
this.keyFunc = keyFunc;
this.propFunc = propFunc;
this.payloadFunc = payloadFunc;
@ -61,25 +60,10 @@ public class PulsarProducerMapper extends PulsarTransactOpMapper {
public PulsarOp apply(long value) {
boolean asyncApi = asyncApiFunc.apply(value);
boolean useTransaction = useTransactionFunc.apply(value);
boolean seqTracking = seqTrackingFunc.apply(value);
Supplier<Transaction> transactionSupplier = transactionSupplierFunc.apply(value);
Producer<?> producer = producerFunc.apply(value);
// Simulate error 10% of the time
float rndVal = RandomUtils.nextFloat(0, 1.0f);
boolean simulationError = (rndVal >= 0) && (rndVal < 0.1f);
String seqErrSimuType = seqErrSimuTypeFunc.apply(value);
boolean simulateMsgOutofOrder = simulationError &&
!StringUtils.isBlank(seqErrSimuType) &&
StringUtils.equalsIgnoreCase(seqErrSimuType, PulsarActivityUtil.SEQ_ERROR_SIMU_TYPE.OutOfOrder.label);
boolean simulateMsgLoss = simulationError &&
!StringUtils.isBlank(seqErrSimuType) &&
StringUtils.equalsIgnoreCase(seqErrSimuType, PulsarActivityUtil.SEQ_ERROR_SIMU_TYPE.MsgLoss.label);
boolean simulateMsgDup = simulationError &&
!StringUtils.isBlank(seqErrSimuType) &&
StringUtils.equalsIgnoreCase(seqErrSimuType, PulsarActivityUtil.SEQ_ERROR_SIMU_TYPE.MsgDup.label);
String msgKey = keyFunc.apply(value);
String msgPayload = payloadFunc.apply(value);
@ -99,24 +83,11 @@ public class PulsarProducerMapper extends PulsarTransactOpMapper {
}
}
// Set message sequence tracking property
if (seqTracking) {
// normal case
if (!simulateMsgOutofOrder && !simulateMsgDup) {
msgProperties.put(PulsarActivityUtil.MSG_SEQUENCE_ID, String.valueOf(value));
}
// simulate message out of order
else if ( simulateMsgOutofOrder ) {
int rndmOffset = 2;
msgProperties.put(PulsarActivityUtil.MSG_SEQUENCE_ID,
String.valueOf((value > rndmOffset) ? (value-rndmOffset) : value));
}
// simulate message duplication
else {
msgProperties.put(PulsarActivityUtil.MSG_SEQUENCE_ID, String.valueOf(value-1));
}
// message loss simulation is not done by message property
// we simply skip sending message in the current NB cycle
boolean sequenceTrackingEnabled = seqTrackingFunc.apply(value);
if (sequenceTrackingEnabled) {
long nextSequenceNumber = getMessageSequenceNumberSendingHandler(producer.getTopic())
.getNextSequenceNumber(seqErrSimuTypes);
msgProperties.put(PulsarActivityUtil.MSG_SEQUENCE_NUMBER, String.valueOf(nextSequenceNumber));
}
return new PulsarProducerOp(
@ -128,7 +99,15 @@ public class PulsarProducerMapper extends PulsarTransactOpMapper {
clientSpace.getPulsarSchema(),
msgKey,
msgProperties,
msgPayload,
simulateMsgLoss);
msgPayload);
}
private MessageSequenceNumberSendingHandler getMessageSequenceNumberSendingHandler(String topicName) {
return MessageSequenceNumberSendingHandlersThreadLocal.get()
.computeIfAbsent(topicName, k -> new MessageSequenceNumberSendingHandler());
}
private final ThreadLocal<Map<String, MessageSequenceNumberSendingHandler>> MessageSequenceNumberSendingHandlersThreadLocal =
ThreadLocal.withInitial(HashMap::new);
}

View File

@ -38,7 +38,6 @@ public class PulsarProducerOp implements PulsarOp {
private final String msgKey;
private final Map<String, String> msgProperties;
private final String msgPayload;
private final boolean simulateMsgLoss;
private final Counter bytesCounter;
private final Histogram messageSizeHistogram;
@ -52,8 +51,7 @@ public class PulsarProducerOp implements PulsarOp {
Schema<?> schema,
String key,
Map<String, String> msgProperties,
String payload,
boolean simulateMsgLoss) {
String payload) {
this.pulsarActivity = pulsarActivity;
this.asyncPulsarOp = asyncPulsarOp;
@ -65,7 +63,6 @@ public class PulsarProducerOp implements PulsarOp {
this.msgKey = key;
this.msgProperties = msgProperties;
this.msgPayload = payload;
this.simulateMsgLoss = simulateMsgLoss;
this.bytesCounter = pulsarActivity.getBytesCounter();
this.messageSizeHistogram = pulsarActivity.getMessageSizeHistogram();
@ -74,11 +71,6 @@ public class PulsarProducerOp implements PulsarOp {
@Override
public void run(Runnable timeTracker) {
// Skip this cycle (without sending messages) if we're doing message loss simulation
if (simulateMsgLoss) {
return;
}
if ( StringUtils.isBlank(msgPayload)) {
throw new PulsarDriverParamException("Message payload (\"msg-value\") can't be empty!");
}
@ -146,13 +138,15 @@ public class PulsarProducerOp implements PulsarOp {
org.apache.avro.generic.GenericRecord avroGenericRecord =
AvroUtil.GetGenericRecord_ApacheAvro(avroSchema, msgPayload);
logger.debug("Sync message sent: msg-key={}; msg-properties={}; msg-payload={})",
logger.debug("({}) Sync message sent: msg-key={}; msg-properties={}; msg-payload={})",
producer.getProducerName(),
msgKey,
msgProperties,
avroGenericRecord.toString());
}
else {
logger.debug("Sync message sent: msg-key={}; msg-properties={}; msg-payload={}",
logger.debug("({}) Sync message sent; msg-key={}; msg-properties={}; msg-payload={}",
producer.getProducerName(),
msgKey,
msgProperties,
msgPayload);
@ -199,13 +193,15 @@ public class PulsarProducerOp implements PulsarOp {
org.apache.avro.generic.GenericRecord avroGenericRecord =
AvroUtil.GetGenericRecord_ApacheAvro(avroSchema, msgPayload);
logger.debug("Aysnc message sent: msg-key={}; msg-properties={}; msg-payload={})",
logger.debug("({}) Aysnc message sent: msg-key={}; msg-properties={}; msg-payload={})",
producer.getProducerName(),
msgKey,
msgProperties,
avroGenericRecord.toString());
}
else {
logger.debug("Aysnc message sent: msg-key={}; msg-properties={}; msg-payload={}",
logger.debug("({}) Aysnc message sent: msg-key={}; msg-properties={}; msg-payload={}",
producer.getProducerName(),
msgKey,
msgProperties,
msgPayload);

View File

@ -1,6 +1,8 @@
package io.nosqlbench.driver.pulsar.ops;
import io.nosqlbench.driver.pulsar.*;
import io.nosqlbench.driver.pulsar.PulsarActivity;
import io.nosqlbench.driver.pulsar.PulsarSpace;
import io.nosqlbench.driver.pulsar.PulsarSpaceCache;
import io.nosqlbench.driver.pulsar.exception.PulsarDriverParamException;
import io.nosqlbench.driver.pulsar.exception.PulsarDriverUnsupportedOpException;
import io.nosqlbench.driver.pulsar.util.PulsarActivityUtil;
@ -11,18 +13,15 @@ import org.apache.commons.lang3.BooleanUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.apache.pulsar.client.admin.PulsarAdmin;
import org.apache.pulsar.client.admin.PulsarAdminException;
import org.apache.pulsar.client.api.Producer;
import org.apache.pulsar.client.api.Consumer;
import org.apache.pulsar.client.api.Producer;
import org.apache.pulsar.client.api.Reader;
import org.apache.pulsar.client.api.transaction.Transaction;
import java.util.Arrays;
import java.util.HashSet;
import java.util.Set;
import java.util.*;
import java.util.function.LongFunction;
import java.util.function.Supplier;
import java.util.stream.Collectors;
public class ReadyPulsarOp implements OpDispenser<PulsarOp> {
@ -354,10 +353,10 @@ public class ReadyPulsarOp implements OpDispenser<PulsarOp> {
// check if we're going to simulate producer message out-of-sequence error
// - message ordering
// - message loss
LongFunction<String> seqErrSimuTypeFunc = (l) -> null;
Set<PulsarActivityUtil.SEQ_ERROR_SIMU_TYPE> seqErrSimuTypes = Collections.emptySet();
if (cmdTpl.containsKey("seqerr_simu")) {
if (cmdTpl.isStatic("seqerr_simu")) {
seqErrSimuTypeFunc = (l) -> cmdTpl.getStatic("seqerr_simu");
seqErrSimuTypes = parseSimulatedErrorTypes(cmdTpl.getStatic("seqerr_simu"));
} else {
throw new PulsarDriverParamException("[resolveMsgSend()] \"seqerr_simu\" parameter cannot be dynamic!");
}
@ -405,12 +404,23 @@ public class ReadyPulsarOp implements OpDispenser<PulsarOp> {
seqTrackingFunc,
transactionSupplierFunc,
producerFunc,
seqErrSimuTypeFunc,
seqErrSimuTypes,
keyFunc,
propFunc,
valueFunc);
}
private Set<PulsarActivityUtil.SEQ_ERROR_SIMU_TYPE> parseSimulatedErrorTypes(String sequenceErrorSimulatedTypeString) {
if (StringUtils.isBlank(sequenceErrorSimulatedTypeString)) {
return Collections.emptySet();
}
return Arrays.stream(StringUtils.split(sequenceErrorSimulatedTypeString, ','))
.map(PulsarActivityUtil.SEQ_ERROR_SIMU_TYPE::parseSimuType)
.filter(Optional::isPresent)
.map(Optional::get)
.collect(Collectors.toSet());
}
private LongFunction<PulsarOp> resolveMsgConsume(
PulsarSpace clientSpace,
LongFunction<String> topic_uri_func,
@ -450,34 +460,11 @@ public class ReadyPulsarOp implements OpDispenser<PulsarOp> {
LongFunction<Supplier<Transaction>> transactionSupplierFunc =
(l) -> clientSpace.getTransactionSupplier(); //TODO make it dependant on current cycle?
LongFunction<Boolean> topicMsgDedupFunc = (l) -> {
String topic = topic_uri_func.apply(l);
String namespace = PulsarActivityUtil.getFullNamespaceName(topic);
PulsarAdmin pulsarAdmin = pulsarActivity.getPulsarAdmin();
// Check namespace-level deduplication setting
// - default to broker level deduplication setting
boolean nsMsgDedup = brokerMsgDupFunc.apply(l);
try {
nsMsgDedup = pulsarAdmin.namespaces().getDeduplicationStatus(namespace);
}
catch (PulsarAdminException pae) {
// it is fine if we're unable to check namespace level setting; use default
}
// Check topic-level deduplication setting
// - default to namespace level deduplication setting
boolean topicMsgDedup = nsMsgDedup;
try {
topicMsgDedup = pulsarAdmin.topics().getDeduplicationStatus(topic);
}
catch (PulsarAdminException pae) {
// it is fine if we're unable to check topic level setting; use default
}
return topicMsgDedup;
};
// TODO: Ignore namespace and topic level dedup check on the fly
// this will impact the consumer performance significantly
// Consider using caching or Memoizer in the future?
// (https://www.baeldung.com/guava-memoizer)
LongFunction<Boolean> topicMsgDedupFunc = brokerMsgDupFunc;
LongFunction<Consumer<?>> consumerFunc = (l) ->
clientSpace.getConsumer(

View File

@ -0,0 +1,150 @@
package io.nosqlbench.driver.pulsar.ops;
import com.codahale.metrics.Counter;
import java.util.Iterator;
import java.util.SortedSet;
import java.util.TreeSet;
/**
* Detects message loss, message duplication and out-of-order message delivery
* based on a monotonic sequence number that each received message contains.
* <p>
* Out-of-order messages are detected with a maximum look behind of 1000 sequence number entries.
* This is currently defined as a constant, {@link ReceivedMessageSequenceTracker#DEFAULT_MAX_TRACK_OUT_OF_ORDER_SEQUENCE_NUMBERS}.
*/
class ReceivedMessageSequenceTracker implements AutoCloseable {
private static final int DEFAULT_MAX_TRACK_OUT_OF_ORDER_SEQUENCE_NUMBERS = 1000;
private static final int DEFAULT_MAX_TRACK_SKIPPED_SEQUENCE_NUMBERS = 1000;
// message out-of-sequence error counter
private final Counter msgErrOutOfSeqCounter;
// duplicate message error counter
private final Counter msgErrDuplicateCounter;
// message loss error counter
private final Counter msgErrLossCounter;
private final SortedSet<Long> pendingOutOfSeqNumbers;
private final int maxTrackOutOfOrderSequenceNumbers;
private final SortedSet<Long> skippedSeqNumbers;
private final int maxTrackSkippedSequenceNumbers;
private long expectedNumber = -1;
public ReceivedMessageSequenceTracker(Counter msgErrOutOfSeqCounter, Counter msgErrDuplicateCounter, Counter msgErrLossCounter) {
this(msgErrOutOfSeqCounter, msgErrDuplicateCounter, msgErrLossCounter,
DEFAULT_MAX_TRACK_OUT_OF_ORDER_SEQUENCE_NUMBERS, DEFAULT_MAX_TRACK_SKIPPED_SEQUENCE_NUMBERS);
}
public ReceivedMessageSequenceTracker(Counter msgErrOutOfSeqCounter, Counter msgErrDuplicateCounter, Counter msgErrLossCounter,
int maxTrackOutOfOrderSequenceNumbers, int maxTrackSkippedSequenceNumbers) {
this.msgErrOutOfSeqCounter = msgErrOutOfSeqCounter;
this.msgErrDuplicateCounter = msgErrDuplicateCounter;
this.msgErrLossCounter = msgErrLossCounter;
this.maxTrackOutOfOrderSequenceNumbers = maxTrackOutOfOrderSequenceNumbers;
this.maxTrackSkippedSequenceNumbers = maxTrackSkippedSequenceNumbers;
this.pendingOutOfSeqNumbers = new TreeSet<>();
this.skippedSeqNumbers = new TreeSet<>();
}
/**
* Notifies the tracker about a received sequence number
*
* @param sequenceNumber the sequence number of the received message
*/
public void sequenceNumberReceived(long sequenceNumber) {
if (expectedNumber == -1) {
expectedNumber = sequenceNumber + 1;
return;
}
if (sequenceNumber < expectedNumber) {
if (skippedSeqNumbers.remove(sequenceNumber)) {
// late out-of-order delivery was detected
// decrease the loss counter
msgErrLossCounter.dec();
// increment the out-of-order counter
msgErrOutOfSeqCounter.inc();
} else {
msgErrDuplicateCounter.inc();
}
return;
}
boolean messagesSkipped = false;
if (sequenceNumber > expectedNumber) {
if (pendingOutOfSeqNumbers.size() == maxTrackOutOfOrderSequenceNumbers) {
messagesSkipped = processLowestPendingOutOfSequenceNumber();
}
if (!pendingOutOfSeqNumbers.add(sequenceNumber)) {
msgErrDuplicateCounter.inc();
}
} else {
// sequenceNumber == expectedNumber
expectedNumber++;
}
processPendingOutOfSequenceNumbers(messagesSkipped);
cleanUpTooFarBehindOutOfSequenceNumbers();
}
private boolean processLowestPendingOutOfSequenceNumber() {
// remove the lowest pending out of sequence number
Long lowestOutOfSeqNumber = pendingOutOfSeqNumbers.first();
pendingOutOfSeqNumbers.remove(lowestOutOfSeqNumber);
if (lowestOutOfSeqNumber > expectedNumber) {
// skip the expected number ahead to the number after the lowest sequence number
// increment the counter with the amount of sequence numbers that got skipped
// keep track of the skipped sequence numbers to detect late out-of-order message delivery
for (long l = expectedNumber; l < lowestOutOfSeqNumber; l++) {
msgErrLossCounter.inc();
skippedSeqNumbers.add(l);
if (skippedSeqNumbers.size() > maxTrackSkippedSequenceNumbers) {
skippedSeqNumbers.remove(skippedSeqNumbers.first());
}
}
expectedNumber = lowestOutOfSeqNumber + 1;
return true;
} else {
msgErrLossCounter.inc();
}
return false;
}
private void processPendingOutOfSequenceNumbers(boolean messagesSkipped) {
// check if there are previously received out-of-order sequence number that have been received
while (pendingOutOfSeqNumbers.remove(expectedNumber)) {
expectedNumber++;
if (!messagesSkipped) {
msgErrOutOfSeqCounter.inc();
}
}
}
private void cleanUpTooFarBehindOutOfSequenceNumbers() {
// remove sequence numbers that are too far behind
for (Iterator<Long> iterator = pendingOutOfSeqNumbers.iterator(); iterator.hasNext(); ) {
Long number = iterator.next();
if (number < expectedNumber - maxTrackOutOfOrderSequenceNumbers) {
msgErrLossCounter.inc();
iterator.remove();
} else {
break;
}
}
}
/**
* Handles the possible pending out of sequence numbers. Mainly needed in unit tests to assert the
* counter values.
*/
@Override
public void close() {
while (!pendingOutOfSeqNumbers.isEmpty()) {
processPendingOutOfSequenceNumbers(processLowestPendingOutOfSequenceNumber());
}
}
public int getMaxTrackOutOfOrderSequenceNumbers() {
return maxTrackOutOfOrderSequenceNumbers;
}
public int getMaxTrackSkippedSequenceNumbers() {
return maxTrackSkippedSequenceNumbers;
}
}

View File

@ -1,6 +1,7 @@
package io.nosqlbench.driver.pulsar.util;
import com.fasterxml.jackson.databind.ObjectMapper;
import java.util.*;
import org.apache.commons.lang3.StringUtils;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
@ -12,9 +13,6 @@ import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.Arrays;
import java.util.Base64;
import java.util.Map;
import java.util.stream.Collectors;
import java.util.stream.Stream;
@ -48,8 +46,7 @@ public class PulsarActivityUtil {
return Arrays.stream(OP_TYPES.values()).anyMatch(t -> t.label.equals(type));
}
public static final String MSG_SEQUENCE_ID = "sequence_id";
public static final String MSG_SEQUENCE_TGTMAX = "sequence_tgtmax";
public static final String MSG_SEQUENCE_NUMBER = "sequence_number";
///////
// Valid document level parameters for Pulsar NB yaml file
@ -314,6 +311,23 @@ public class PulsarActivityUtil {
SEQ_ERROR_SIMU_TYPE(String label) {
this.label = label;
}
private static final Map<String, SEQ_ERROR_SIMU_TYPE> MAPPING = new HashMap<>();
static {
for (SEQ_ERROR_SIMU_TYPE simuType : values()) {
MAPPING.put(simuType.label, simuType);
MAPPING.put(simuType.label.toLowerCase(), simuType);
MAPPING.put(simuType.label.toUpperCase(), simuType);
MAPPING.put(simuType.name(), simuType);
MAPPING.put(simuType.name().toLowerCase(), simuType);
MAPPING.put(simuType.name().toUpperCase(), simuType);
}
}
public static Optional<SEQ_ERROR_SIMU_TYPE> parseSimuType(String simuTypeString) {
return Optional.ofNullable(MAPPING.get(simuTypeString.trim()));
}
}
public static boolean isValidSeqErrSimuType(String item) {
return Arrays.stream(SEQ_ERROR_SIMU_TYPE.values()).anyMatch(t -> t.label.equals(item));

View File

@ -5,7 +5,7 @@ bindings:
# document level parameters that apply to all Pulsar client types:
params:
topic_uri: "persistent://public/default/sanity_seqloss2"
topic_uri: "persistent://tnt0/ns0/sanity_seqloss12"
# Only applicable to producer and consumer
# - used for message ordering and message loss check
async_api: "true"
@ -23,6 +23,7 @@ blocks:
#seqerr_simu: "out_of_order"
#seqerr_simu: "msg_loss"
#seqerr_simu: "msg_dup"
#seqerr_simu: "out_of_order, msg_loss"
msg_key:
msg_property:
msg_value: "{myvalue}"
@ -35,5 +36,5 @@ blocks:
- name: s1
optype: msg-consume
subscription_name: "mysub"
subscription_type:
subscription_type: "Shared"
consumer_name:

View File

@ -0,0 +1,74 @@
package io.nosqlbench.driver.pulsar.ops;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertTrue;
import io.nosqlbench.driver.pulsar.util.PulsarActivityUtil;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashSet;
import java.util.Set;
import org.junit.jupiter.api.Test;
class MessageSequenceNumberSendingHandlerTest {
MessageSequenceNumberSendingHandler sequenceNumberSendingHandler = new MessageSequenceNumberSendingHandler();
@Test
void shouldAddMonotonicSequence() {
for (long l = 1; l <= 100; l++) {
assertEquals(l, sequenceNumberSendingHandler.getNextSequenceNumber(Collections.emptySet()));
}
}
@Test
void shouldInjectMessageLoss() {
assertEquals(1L, sequenceNumberSendingHandler.getNextSequenceNumber(Collections.emptySet()));
assertEquals(3L, sequenceNumberSendingHandler.getNextSequenceNumber(Collections.singleton(PulsarActivityUtil.SEQ_ERROR_SIMU_TYPE.MsgLoss), 100));
}
@Test
void shouldInjectMessageDuplication() {
assertEquals(1L, sequenceNumberSendingHandler.getNextSequenceNumber(Collections.emptySet()));
assertEquals(1L, sequenceNumberSendingHandler.getNextSequenceNumber(Collections.singleton(PulsarActivityUtil.SEQ_ERROR_SIMU_TYPE.MsgDup), 100));
}
@Test
void shouldInjectMessageOutOfOrder() {
assertEquals(1L, sequenceNumberSendingHandler.getNextSequenceNumber(Collections.emptySet()));
assertEquals(4L, sequenceNumberSendingHandler.getNextSequenceNumber(Collections.singleton(PulsarActivityUtil.SEQ_ERROR_SIMU_TYPE.OutOfOrder), 100));
assertEquals(2L, sequenceNumberSendingHandler.getNextSequenceNumber(Collections.emptySet()));
assertEquals(3L, sequenceNumberSendingHandler.getNextSequenceNumber(Collections.emptySet()));
assertEquals(5L, sequenceNumberSendingHandler.getNextSequenceNumber(Collections.emptySet()));
assertEquals(6, sequenceNumberSendingHandler.getNextSequenceNumber(Collections.emptySet()));
}
@Test
void shouldInjectOneOfTheSimulatedErrorsRandomly() {
Set<PulsarActivityUtil.SEQ_ERROR_SIMU_TYPE> allErrorTypes = new HashSet<>(Arrays.asList(PulsarActivityUtil.SEQ_ERROR_SIMU_TYPE.values()));
assertEquals(1L, sequenceNumberSendingHandler.getNextSequenceNumber(Collections.emptySet()));
long previousSequenceNumber = 1L;
int outOfSequenceInjectionCounter = 0;
int messageDupCounter = 0;
int messageLossCounter = 0;
int successCounter = 0;
for (int i = 0; i < 1000; i++) {
long nextSequenceNumber = sequenceNumberSendingHandler.getNextSequenceNumber(allErrorTypes);
if (nextSequenceNumber >= previousSequenceNumber + 3) {
outOfSequenceInjectionCounter++;
} else if (nextSequenceNumber <= previousSequenceNumber) {
messageDupCounter++;
} else if (nextSequenceNumber >= previousSequenceNumber + 2) {
messageLossCounter++;
} else if (nextSequenceNumber == previousSequenceNumber + 1) {
successCounter++;
}
previousSequenceNumber = nextSequenceNumber;
}
assertTrue(outOfSequenceInjectionCounter > 0);
assertTrue(messageDupCounter > 0);
assertTrue(messageLossCounter > 0);
assertEquals(1000, outOfSequenceInjectionCounter + messageDupCounter + messageLossCounter + successCounter);
}
}

View File

@ -0,0 +1,230 @@
package io.nosqlbench.driver.pulsar.ops;
import static org.junit.jupiter.api.Assertions.assertEquals;
import com.codahale.metrics.Counter;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.params.ParameterizedTest;
import org.junit.jupiter.params.provider.ValueSource;
class ReceivedMessageSequenceTrackerTest {
Counter msgErrOutOfSeqCounter = new Counter();
Counter msgErrDuplicateCounter = new Counter();
Counter msgErrLossCounter = new Counter();
ReceivedMessageSequenceTracker messageSequenceTracker = new ReceivedMessageSequenceTracker(msgErrOutOfSeqCounter, msgErrDuplicateCounter, msgErrLossCounter, 20, 20);
@Test
void shouldCountersBeZeroWhenSequenceDoesntContainGaps() {
// when
for (long l = 0; l < 100L; l++) {
messageSequenceTracker.sequenceNumberReceived(l);
}
messageSequenceTracker.close();
// then
assertEquals(0, msgErrOutOfSeqCounter.getCount());
assertEquals(0, msgErrDuplicateCounter.getCount());
assertEquals(0, msgErrLossCounter.getCount());
}
@ParameterizedTest
@ValueSource(longs = {10L, 11L, 19L, 20L, 21L, 100L})
void shouldDetectMsgLossWhenEverySecondMessageIsLost(long totalMessages) {
doShouldDetectMsgLoss(totalMessages, 2);
}
@ParameterizedTest
@ValueSource(longs = {10L, 11L, 19L, 20L, 21L, 100L})
void shouldDetectMsgLossWhenEveryThirdMessageIsLost(long totalMessages) {
doShouldDetectMsgLoss(totalMessages, 3);
}
@ParameterizedTest
@ValueSource(longs = {20L, 21L, 40L, 41L, 42L, 43L, 100L})
void shouldDetectMsgLossWhenEvery21stMessageIsLost(long totalMessages) {
doShouldDetectMsgLoss(totalMessages, 21);
}
private void doShouldDetectMsgLoss(long totalMessages, int looseEveryNthMessage) {
int messagesLost = 0;
// when
boolean lastMessageWasLost = false;
for (long l = 0; l < totalMessages; l++) {
if (l % looseEveryNthMessage == 1) {
messagesLost++;
lastMessageWasLost = true;
continue;
} else {
lastMessageWasLost = false;
}
messageSequenceTracker.sequenceNumberReceived(l);
}
if (lastMessageWasLost) {
messageSequenceTracker.sequenceNumberReceived(totalMessages);
}
messageSequenceTracker.close();
// then
assertEquals(0, msgErrOutOfSeqCounter.getCount());
assertEquals(0, msgErrDuplicateCounter.getCount());
assertEquals(messagesLost, msgErrLossCounter.getCount());
}
@ParameterizedTest
@ValueSource(longs = {10L, 11L, 19L, 20L, 21L, 100L})
void shouldDetectMsgDuplication(long totalMessages) {
int messagesDuplicated = 0;
// when
for (long l = 0; l < totalMessages; l++) {
if (l % 2 == 1) {
messagesDuplicated++;
messageSequenceTracker.sequenceNumberReceived(l);
}
messageSequenceTracker.sequenceNumberReceived(l);
}
if (totalMessages % 2 == 0) {
messageSequenceTracker.sequenceNumberReceived(totalMessages);
}
if (totalMessages < 2 * messageSequenceTracker.getMaxTrackOutOfOrderSequenceNumbers()) {
messageSequenceTracker.close();
}
// then
assertEquals(0, msgErrOutOfSeqCounter.getCount());
assertEquals(messagesDuplicated, msgErrDuplicateCounter.getCount());
assertEquals(0, msgErrLossCounter.getCount());
}
@Test
void shouldDetectSingleMessageOutOfSequence() {
// when
for (long l = 0; l < 10L; l++) {
messageSequenceTracker.sequenceNumberReceived(l);
}
messageSequenceTracker.sequenceNumberReceived(10L);
messageSequenceTracker.sequenceNumberReceived(12L);
messageSequenceTracker.sequenceNumberReceived(11L);
for (long l = 13L; l < 100L; l++) {
messageSequenceTracker.sequenceNumberReceived(l);
}
// then
assertEquals(1, msgErrOutOfSeqCounter.getCount());
assertEquals(0, msgErrDuplicateCounter.getCount());
assertEquals(0, msgErrLossCounter.getCount());
}
@Test
void shouldDetectMultipleMessagesOutOfSequence() {
// when
for (long l = 0; l < 10L; l++) {
messageSequenceTracker.sequenceNumberReceived(l);
}
messageSequenceTracker.sequenceNumberReceived(10L);
messageSequenceTracker.sequenceNumberReceived(14L);
messageSequenceTracker.sequenceNumberReceived(13L);
messageSequenceTracker.sequenceNumberReceived(11L);
messageSequenceTracker.sequenceNumberReceived(12L);
for (long l = 15L; l < 100L; l++) {
messageSequenceTracker.sequenceNumberReceived(l);
}
// then
assertEquals(2, msgErrOutOfSeqCounter.getCount());
assertEquals(0, msgErrDuplicateCounter.getCount());
assertEquals(0, msgErrLossCounter.getCount());
}
@Test
void shouldDetectIndividualMessageLoss() {
// when
for (long l = 0; l < 100L; l++) {
if (l != 11L) {
messageSequenceTracker.sequenceNumberReceived(l);
}
}
messageSequenceTracker.close();
// then
assertEquals(0, msgErrOutOfSeqCounter.getCount());
assertEquals(0, msgErrDuplicateCounter.getCount());
assertEquals(1, msgErrLossCounter.getCount());
}
@Test
void shouldDetectGapAndMessageDuplication() {
// when
for (long l = 0; l < 100L; l++) {
if (l != 11L) {
messageSequenceTracker.sequenceNumberReceived(l);
}
if (l == 12L) {
messageSequenceTracker.sequenceNumberReceived(l);
}
}
messageSequenceTracker.close();
// then
assertEquals(0, msgErrOutOfSeqCounter.getCount());
assertEquals(1, msgErrDuplicateCounter.getCount());
assertEquals(1, msgErrLossCounter.getCount());
}
@Test
void shouldDetectGapAndMessageDuplicationTimes2() {
// when
for (long l = 0; l < 100L; l++) {
if (l != 11L) {
messageSequenceTracker.sequenceNumberReceived(l);
}
if (l == 12L) {
messageSequenceTracker.sequenceNumberReceived(l);
messageSequenceTracker.sequenceNumberReceived(l);
}
}
messageSequenceTracker.close();
// then
assertEquals(0, msgErrOutOfSeqCounter.getCount());
assertEquals(2, msgErrDuplicateCounter.getCount());
assertEquals(1, msgErrLossCounter.getCount());
}
@Test
void shouldDetectDelayedOutOfOrderDelivery() {
// when
for (long l = 0; l < 5 * messageSequenceTracker.getMaxTrackOutOfOrderSequenceNumbers(); l++) {
if (l != 10) {
messageSequenceTracker.sequenceNumberReceived(l);
}
if (l == messageSequenceTracker.getMaxTrackOutOfOrderSequenceNumbers() * 2) {
messageSequenceTracker.sequenceNumberReceived(10);
}
}
messageSequenceTracker.close();
// then
assertEquals(1, msgErrOutOfSeqCounter.getCount());
assertEquals(0, msgErrDuplicateCounter.getCount());
assertEquals(0, msgErrLossCounter.getCount());
}
@Test
void shouldDetectDelayedOutOfOrderDeliveryOf2ConsecutiveSequenceNumbers() {
// when
for (long l = 0; l < 5 * messageSequenceTracker.getMaxTrackOutOfOrderSequenceNumbers(); l++) {
if (l != 10 && l != 11) {
messageSequenceTracker.sequenceNumberReceived(l);
}
if (l == messageSequenceTracker.getMaxTrackOutOfOrderSequenceNumbers() * 2) {
messageSequenceTracker.sequenceNumberReceived(10);
messageSequenceTracker.sequenceNumberReceived(11);
}
}
messageSequenceTracker.close();
// then
assertEquals(2, msgErrOutOfSeqCounter.getCount());
assertEquals(0, msgErrDuplicateCounter.getCount());
assertEquals(0, msgErrLossCounter.getCount());
}
}

View File

@ -7,7 +7,7 @@
<parent>
<artifactId>mvn-defaults</artifactId>
<groupId>io.nosqlbench</groupId>
<version>4.15.58-SNAPSHOT</version>
<version>4.15.64-SNAPSHOT</version>
<relativePath>../mvn-defaults</relativePath>
</parent>
@ -22,14 +22,14 @@
<dependency>
<groupId>io.nosqlbench</groupId>
<artifactId>engine-api</artifactId>
<version>4.15.58-SNAPSHOT</version>
<version>4.15.64-SNAPSHOT</version>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>io.nosqlbench</groupId>
<artifactId>drivers-api</artifactId>
<version>4.15.58-SNAPSHOT</version>
<version>4.15.64-SNAPSHOT</version>
<scope>compile</scope>
</dependency>

View File

@ -7,7 +7,7 @@
<parent>
<artifactId>mvn-defaults</artifactId>
<groupId>io.nosqlbench</groupId>
<version>4.15.58-SNAPSHOT</version>
<version>4.15.64-SNAPSHOT</version>
<relativePath>../mvn-defaults</relativePath>
</parent>
@ -24,19 +24,19 @@
<dependency>
<groupId>io.nosqlbench</groupId>
<artifactId>engine-api</artifactId>
<version>4.15.58-SNAPSHOT</version>
<version>4.15.64-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>io.nosqlbench</groupId>
<artifactId>drivers-api</artifactId>
<version>4.15.58-SNAPSHOT</version>
<version>4.15.64-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>io.nosqlbench</groupId>
<artifactId>driver-stdout</artifactId>
<version>4.15.58-SNAPSHOT</version>
<version>4.15.64-SNAPSHOT</version>
</dependency>
</dependencies>

View File

@ -7,7 +7,7 @@
<parent>
<artifactId>mvn-defaults</artifactId>
<groupId>io.nosqlbench</groupId>
<version>4.15.58-SNAPSHOT</version>
<version>4.15.64-SNAPSHOT</version>
<relativePath>../mvn-defaults</relativePath>
</parent>
@ -22,13 +22,13 @@
<dependency>
<groupId>io.nosqlbench</groupId>
<artifactId>engine-api</artifactId>
<version>4.15.58-SNAPSHOT</version>
<version>4.15.64-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>io.nosqlbench</groupId>
<artifactId>drivers-api</artifactId>
<version>4.15.58-SNAPSHOT</version>
<version>4.15.64-SNAPSHOT</version>
</dependency>
<dependency>

View File

@ -5,7 +5,7 @@
<parent>
<artifactId>mvn-defaults</artifactId>
<groupId>io.nosqlbench</groupId>
<version>4.15.58-SNAPSHOT</version>
<version>4.15.64-SNAPSHOT</version>
<relativePath>../mvn-defaults</relativePath>
</parent>
@ -23,19 +23,19 @@
<dependency>
<groupId>io.nosqlbench</groupId>
<artifactId>nb-api</artifactId>
<version>4.15.58-SNAPSHOT</version>
<version>4.15.64-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>io.nosqlbench</groupId>
<artifactId>virtdata-userlibs</artifactId>
<version>4.15.58-SNAPSHOT</version>
<version>4.15.64-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>io.nosqlbench</groupId>
<artifactId>adapters-api</artifactId>
<version>4.15.58-SNAPSHOT</version>
<version>4.15.64-SNAPSHOT</version>
</dependency>

View File

@ -5,7 +5,7 @@
<parent>
<artifactId>mvn-defaults</artifactId>
<groupId>io.nosqlbench</groupId>
<version>4.15.58-SNAPSHOT</version>
<version>4.15.64-SNAPSHOT</version>
<relativePath>../mvn-defaults</relativePath>
</parent>
@ -23,25 +23,25 @@
<dependency>
<groupId>io.nosqlbench</groupId>
<artifactId>nb-api</artifactId>
<version>4.15.58-SNAPSHOT</version>
<version>4.15.64-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>io.nosqlbench</groupId>
<artifactId>drivers-api</artifactId>
<version>4.15.58-SNAPSHOT</version>
<version>4.15.64-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>io.nosqlbench</groupId>
<artifactId>nb-annotations</artifactId>
<version>4.15.58-SNAPSHOT</version>
<version>4.15.64-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>io.nosqlbench</groupId>
<artifactId>virtdata-userlibs</artifactId>
<version>4.15.58-SNAPSHOT</version>
<version>4.15.64-SNAPSHOT</version>
</dependency>
<dependency>

View File

@ -145,10 +145,12 @@ public class NBCLIScenarioParser {
undefKeys.forEach(buildingCmd::remove);
if (!buildingCmd.containsKey("workload")) {
String relativeWorkloadPathFromRoot = yamlWithNamedScenarios.asPath().toString();
relativeWorkloadPathFromRoot = relativeWorkloadPathFromRoot.startsWith("/") ?
relativeWorkloadPathFromRoot.substring(1) : relativeWorkloadPathFromRoot;
buildingCmd.put("workload", "workload=" + relativeWorkloadPathFromRoot);
// The logic to remove the leading slash was likely used to fix a nuisance bug before,
// although it is clearly not correct as-is. Leaving temporarily for context.
// String relativeWorkloadPathFromRoot = yamlWithNamedScenarios.asPath().toString();
// relativeWorkloadPathFromRoot = relativeWorkloadPathFromRoot.startsWith("/") ?
// relativeWorkloadPathFromRoot.substring(1) : relativeWorkloadPathFromRoot;
buildingCmd.put("workload", "workload=" + workloadName);
}
if (!buildingCmd.containsKey("alias")) {

View File

@ -4,7 +4,7 @@
<parent>
<artifactId>mvn-defaults</artifactId>
<groupId>io.nosqlbench</groupId>
<version>4.15.58-SNAPSHOT</version>
<version>4.15.64-SNAPSHOT</version>
<relativePath>../mvn-defaults</relativePath>
</parent>
@ -23,13 +23,13 @@
<dependency>
<groupId>io.nosqlbench</groupId>
<artifactId>engine-core</artifactId>
<version>4.15.58-SNAPSHOT</version>
<version>4.15.64-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>io.nosqlbench</groupId>
<artifactId>engine-docker</artifactId>
<version>4.15.58-SNAPSHOT</version>
<version>4.15.64-SNAPSHOT</version>
</dependency>
</dependencies>

View File

@ -1,6 +1,7 @@
package io.nosqlbench.engine.cli;
import io.nosqlbench.docsys.core.NBWebServerApp;
import io.nosqlbench.nb.api.metadata.SessionNamer;
import io.nosqlbench.engine.api.activityapi.core.ActivityType;
import io.nosqlbench.engine.api.activityapi.cyclelog.outputs.cyclelog.CycleLogDumperUtility;
import io.nosqlbench.engine.api.activityapi.cyclelog.outputs.cyclelog.CycleLogImporterUtility;
@ -18,7 +19,7 @@ import io.nosqlbench.engine.core.script.Scenario;
import io.nosqlbench.engine.core.script.ScenariosExecutor;
import io.nosqlbench.engine.core.script.ScriptParams;
import io.nosqlbench.engine.docker.DockerMetricsManager;
import io.nosqlbench.nb.api.SystemId;
import io.nosqlbench.nb.api.metadata.SystemId;
import io.nosqlbench.nb.api.annotations.Annotation;
import io.nosqlbench.nb.api.annotations.Layer;
import io.nosqlbench.nb.api.content.Content;
@ -98,7 +99,7 @@ public class NBCLI {
loggerConfig.setConsoleLevel(NBLogLevel.ERROR);
NBCLIOptions globalOptions = new NBCLIOptions(args, NBCLIOptions.Mode.ParseGlobalsOnly);
String sessionName = new SessionNamer().format(globalOptions.getSessionName());
String sessionName = SessionNamer.format(globalOptions.getSessionName());
loggerConfig
.setSessionName(sessionName)

View File

@ -371,7 +371,7 @@ public class NBCLIOptions {
private Path setStatePath() {
if (statePathAccesses.size() > 0) {
throw new BasicError("The statedir must be set before it is used by other\n" +
throw new BasicError("The state dir must be set before it is used by other\n" +
" options. If you want to change the statedir, be sure you do it before\n" +
" dependent options. These parameters were called before this --statedir:\n" +
statePathAccesses.stream().map(s -> "> " + s).collect(Collectors.joining("\n")));
@ -380,7 +380,7 @@ public class NBCLIOptions {
return this.statepath;
}
List<String> paths = NBEnvironment.INSTANCE.interpolate(":", statedirs);
List<String> paths = NBEnvironment.INSTANCE.interpolateEach(":", statedirs);
Path selected = null;
for (String pathName : paths) {
@ -395,7 +395,7 @@ public class NBCLIOptions {
}
}
if (selected == null) {
selected = Path.of(paths.get(0));
selected = Path.of(paths.get(paths.size()-1));
}
if (!Files.exists(selected)) {

View File

@ -1,19 +1,19 @@
# docker-metrics
Enlist nosqlbench to stand up your metrics infrastructure using a local
Enlist nosqlbench to stand up your metrics infrastructure using a local
docker runtime:
--docker-metrics
When this option is set, nosqlbench will start graphite, prometheus,
When this option is set, nosqlbench will start graphite, prometheus,
and grafana dockers (if-needed) automatically on your local system
, configure them to work together, and point nosqlbench to send metrics
and annotations to the system automatically.
The inclued NoSQLBench dashboard uses the default grafana credentials of
The included NoSQLBench dashboard uses the default grafana credentials of
admin:admin. You can find this dashboard by browsing to the "manage
dashboards" section of grafana.
# remote docker-metrics
It is possible to use `--docker-metrics` to set up a metrics collector
@ -30,7 +30,7 @@ and other nodes, you can use this pattern:
# on the collector node
... --pin --docker-metrics
# on other nodes
... --pin --docker-metrics-at <collector node ip>
@ -46,7 +46,7 @@ of running the following by hand:
# pull and run the graphite-exporter container
docker run -d -p 9108:9108 -p 9109:9109 -p 9109:9109/udp prom/graphite-exporter
Configuration files which are used by the docker containers are stored in:
Configuration files which are used by the docker containers are stored in:
$HOME/.nosqlbench
@ -55,14 +55,14 @@ Configuration files which are used by the docker containers are stored in:
If you need to clear the state for a local docker metrics stack, you
can remove these directories.
# DASHBOARDS AND METRICS WILL BE LOST IF YOU DO THIS
# DASHBOARDS AND METRICS WILL BE LOST IF YOU DO THIS
rm ~/.nosqlbench/{grafana,prometheus,prometheus-conf,graphite-exporter}
## Manually installing dockers
# pull and run the prometheus container
docker run -d -p 9090:9090 -v '<USER HOME>/.prometheus:/etc/prometheus' prom/prometheus --config.file=/etc/prometheus/prometheus.yml" --storage.tsdb.path=/prometheus" --storage.tsdb.retention=183d --web.enable-lifecycle
# pull and run the grafana container
docker run -d -p 3000:3000 -v grafana/grafana
@ -70,7 +70,7 @@ If you need to clear the state for a local docker metrics stack, you
These may allow you to send snapshot data to a specially configured
remote grafana instance.
GF_SECURITY_ADMIN_PASSWORD=admin
GF_AUTH_ANONYMOUS_ENABLED="true"
GF_SNAPSHOTS_EXTERNAL_SNAPSHOT_URL=http://54.165.144.56:3001
@ -82,11 +82,11 @@ You can use the grafana api to set up the datasource and dashboard
if you have other tools which integrate with grafana:
# These are not commands, they are only provides API parameters
POST http://localhost:3000/api/dashboards/db
analysis.json
# (found in resources/docker/dashboards/analysis.json)
POST http://localhost:3000/api/datasources
prometheus-datasource.yaml
# (found in resources/docker/datasources/prometheus-datasource.yaml)

View File

@ -0,0 +1,38 @@
# State Directory
In order to maintain state for a NoSQLBench client instance,
a directory is used. The default directory will be auto-created
for you if you do not specify one.
You can always override the state directory location by providing
an option like `--statedir=/tmp/testdir`, or `--statedir=$HOME/.nosqlbench`.
Within the --statedir parameter, the following values will be expanded
automatically:
- $HOME: the current user's home directory
- $USER: the current user's name
- $varname: Any other environment variable
`$NBSTATEDIR` is a mechanism for setting and finding the local state
directory for NoSQLBench. It is a search path, delimited by
the ':' character. It allows both Java system properties
and shell environment variables.
Multiple values may be specified, like with the PATH environment variable,
separated by colons. When none of the directories is found,
the last one in the list will be created. This is based on the convention
that more specific "override" directories are searched first, whereas more
global state is allowed as the fall-through case. Generally users will
want to keep their state in a single and uniform location, like
`$HOME/.nosqlbench`, but they will want the option of localizing configs for
directory-based test management. Thus, the default value for
--statedir is '$NBSTATEDIR:$PWD/.nosqlbench:$HOME/.nosqlbench'.
Once NoSQLBench is past the CLI processing stage, the NBSTATEDIR becomes
a valid system property, and any internal access to environment variables
can also use this property.
NoSQLBench developers should take care to use the
NBEnvironment class as the method to access environment variables.
(TODO: Add this to the developer guide)

View File

@ -4,6 +4,7 @@ import io.nosqlbench.engine.api.scenarios.NBCLIScenarioParser;
import io.nosqlbench.nb.api.errors.BasicError;
import org.junit.jupiter.api.Test;
import java.nio.file.Path;
import java.util.List;
import java.util.Map;
@ -77,7 +78,7 @@ public class NBCLIScenarioParserTest {
assertThat(cmds.size()).isEqualTo(1);
assertThat(cmds.get(0).getArg("driver")).isEqualTo("stdout");
assertThat(cmds.get(0).getArg("cycles")).isEqualTo("10");
assertThat(cmds.get(0).getArg("workload")).isEqualTo("target/test-classes/activities/scenario-test.yaml");
assertThat(cmds.get(0).getArg("workload")).isEqualTo("scenario-test");
}
@Test
@ -88,7 +89,7 @@ public class NBCLIScenarioParserTest {
assertThat(cmds.get(0).getArg("driver")).isEqualTo("stdout");
assertThat(cmds.get(0).getArg("cycles")).isEqualTo("20");
assertThat(cmds.get(0).getArg("cycles-test")).isEqualTo("20");
assertThat(cmds.get(0).getArg("workload")).isEqualTo("target/test-classes/activities/scenario-test.yaml");
assertThat(cmds.get(0).getArg("workload")).isEqualTo("scenario-test");
}
@Test
@ -106,9 +107,23 @@ public class NBCLIScenarioParserTest {
List<Cmd> cmds1 = opts1.getCommands();
assertThat(cmds1.size()).isEqualTo(1);
assertThat(cmds1.get(0).getArg("cycles-test")).isNull();
}
@Test
public void testThatFullyQualifiedScenarioFilesAreSupported() {
Path cwd = Path.of(".").toAbsolutePath();
System.out.println("cwd: '" + cwd + "'");
Path rel = Path.of("src/test/resources/activities/scenario-test.yaml");
assertThat(rel).exists();
Path absolute = rel.toAbsolutePath();
assertThat(absolute).exists();
NBCLIOptions opts = new NBCLIOptions(new String[]{ absolute.toString(), "schema-only", "cycles-test=20"});
List<Cmd> cmds = opts.getCommands();
assertThat(cmds.size()).isGreaterThan(0);
}
@Test
public void testSanitizer() {

View File

@ -17,6 +17,7 @@
package io.nosqlbench.engine.cli;
import io.nosqlbench.nb.api.metadata.SessionNamer;
import org.junit.jupiter.api.Test;
import static org.assertj.core.api.Assertions.assertThat;
@ -26,18 +27,18 @@ public class SessionNamerTest {
@Test
public void testDefaultFormat() {
SessionNamer namer = new SessionNamer();
String name1 = namer.format(null);
String name1 = SessionNamer.format(null);
assertThat(name1).matches("scenario_\\d{8}_\\d{6}_\\d{3}");
String name2 = namer.format("");
String name2 = SessionNamer.format("");
assertThat(name2).matches("scenario_\\d{8}_\\d{6}_\\d{3}");
}
@Test
public void testCustomFormat() {
SessionNamer namer = new SessionNamer();
String name1 = namer.format("Custom_session_name");
String name1 = SessionNamer.format("Custom_session_name");
assertThat(name1).matches("Custom_session_name");
String name2 = namer.format("TEST--%tQ");
String name2 = SessionNamer.format("TEST--%tQ");
assertThat(name2).matches("TEST--\\d{13}");
}

View File

@ -1,42 +1,42 @@
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<modelVersion>4.0.0</modelVersion>
<parent>
<artifactId>mvn-defaults</artifactId>
<groupId>io.nosqlbench</groupId>
<version>4.15.58-SNAPSHOT</version>
<relativePath>../mvn-defaults</relativePath>
</parent>
<parent>
<artifactId>mvn-defaults</artifactId>
<groupId>io.nosqlbench</groupId>
<version>4.15.64-SNAPSHOT</version>
<relativePath>../mvn-defaults</relativePath>
</parent>
<artifactId>engine-clients</artifactId>
<packaging>jar</packaging>
<name>${project.artifactId}</name>
<description>
A set of clients for calling nosqlbench and related services.
</description>
<artifactId>engine-clients</artifactId>
<packaging>jar</packaging>
<name>${project.artifactId}</name>
<description>
A set of clients for calling nosqlbench and related services.
</description>
<dependencies>
<dependencies>
<dependency>
<groupId>io.nosqlbench</groupId>
<artifactId>engine-api</artifactId>
<version>4.15.58-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>io.nosqlbench</groupId>
<artifactId>engine-api</artifactId>
<version>4.15.64-SNAPSHOT</version>
</dependency>
</dependencies>
</dependencies>
<build>
<testResources>
<testResource>
<directory>src/test/resources</directory>
<includes>
<include>examples/**</include>
<include />
</includes>
</testResource>
</testResources>
</build>
<build>
<testResources>
<testResource>
<directory>src/test/resources</directory>
<includes>
<include>examples/**</include>
<include />
</includes>
</testResource>
</testResources>
</build>
</project>

View File

@ -5,7 +5,7 @@ import io.nosqlbench.engine.clients.grafana.GStitcher;
import io.nosqlbench.engine.clients.grafana.GrafanaClient;
import io.nosqlbench.engine.clients.grafana.GrafanaClientConfig;
import io.nosqlbench.engine.clients.grafana.transfer.*;
import io.nosqlbench.nb.api.SystemId;
import io.nosqlbench.nb.api.metadata.SystemId;
import java.nio.file.Path;
import java.time.Instant;

View File

@ -5,7 +5,7 @@ import io.nosqlbench.engine.clients.grafana.GrafanaClientConfig;
import io.nosqlbench.engine.clients.grafana.transfer.GAnnotation;
import io.nosqlbench.nb.annotations.Service;
import io.nosqlbench.nb.api.OnError;
import io.nosqlbench.nb.api.SystemId;
import io.nosqlbench.nb.api.metadata.SystemId;
import io.nosqlbench.nb.api.annotations.Annotation;
import io.nosqlbench.nb.api.annotations.Annotator;
import io.nosqlbench.nb.api.config.params.ParamsParser;

View File

@ -5,7 +5,7 @@
<parent>
<artifactId>mvn-defaults</artifactId>
<groupId>io.nosqlbench</groupId>
<version>4.15.58-SNAPSHOT</version>
<version>4.15.64-SNAPSHOT</version>
<relativePath>../mvn-defaults</relativePath>
</parent>
@ -20,7 +20,7 @@
<dependencies>
<!--<dependency>-->
<!--<groupId>com.github.oshi</groupId>-->
<!--<groupId>com.github.oshi</groupId>-->
<!--<artifactId>oshi-core</artifactId>-->
<!--<version>3.5.0</version>-->
<!--</dependency>-->
@ -28,13 +28,13 @@
<dependency>
<groupId>io.nosqlbench</groupId>
<artifactId>engine-api</artifactId>
<version>4.15.58-SNAPSHOT</version>
<version>4.15.64-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>io.nosqlbench</groupId>
<artifactId>drivers-api</artifactId>
<version>4.15.58-SNAPSHOT</version>
<version>4.15.64-SNAPSHOT</version>
</dependency>
<dependency>
@ -72,25 +72,25 @@
<groupId>org.graalvm.js</groupId>
<artifactId>js-scriptengine</artifactId>
</dependency>
<dependency>
<groupId>org.graalvm.tools</groupId>
<artifactId>profiler</artifactId>
<scope>runtime</scope>
</dependency>
<dependency>
<groupId>org.graalvm.tools</groupId>
<artifactId>chromeinspector</artifactId>
<scope>runtime</scope>
</dependency>
<dependency>
<groupId>io.nosqlbench</groupId>
<artifactId>engine-clients</artifactId>
<version>4.15.58-SNAPSHOT</version>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>org.graalvm.tools</groupId>
<artifactId>profiler</artifactId>
<scope>runtime</scope>
</dependency>
<dependency>
<groupId>org.graalvm.tools</groupId>
<artifactId>chromeinspector</artifactId>
<scope>runtime</scope>
</dependency>
<dependency>
<groupId>io.nosqlbench</groupId>
<artifactId>engine-clients</artifactId>
<version>4.15.64-SNAPSHOT</version>
<scope>compile</scope>
</dependency>
<!-- only compile scope -->
<!-- only compile scope -->
</dependencies>

View File

@ -31,7 +31,7 @@ public class ActivityTypeLoader {
public ActivityTypeLoader() {
List<String> libpaths = NBEnvironment.INSTANCE.interpolate(":", "$" + NBEnvironment.NBLIBS);
List<String> libpaths = NBEnvironment.INSTANCE.interpolateEach(":", "$" + NBEnvironment.NBLIBS);
Set<URL> urlsToAdd = new HashSet<>();
for (String libpaths_entry : libpaths) {

View File

@ -25,6 +25,9 @@ import io.nosqlbench.engine.core.lifecycle.ScenarioController;
import io.nosqlbench.engine.core.lifecycle.ScenarioResult;
import io.nosqlbench.engine.core.annotation.Annotators;
import io.nosqlbench.engine.core.metrics.PolyglotMetricRegistryBindings;
import io.nosqlbench.nb.api.metadata.ScenarioMetadata;
import io.nosqlbench.nb.api.metadata.ScenarioMetadataAware;
import io.nosqlbench.nb.api.metadata.SystemId;
import io.nosqlbench.nb.api.annotations.Layer;
import io.nosqlbench.nb.api.annotations.Annotation;
import org.apache.logging.log4j.LogManager;
@ -46,10 +49,7 @@ import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.nio.file.StandardOpenOption;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.*;
import java.util.concurrent.Callable;
import java.util.stream.Collectors;
@ -63,6 +63,7 @@ public class Scenario implements Callable<ScenarioResult> {
private State state = State.Scheduled;
private volatile ScenarioShutdownHook scenarioShutdownHook;
private Exception error;
private ScenarioMetadata scenarioMetadata;
public enum State {
@ -236,12 +237,23 @@ public class Scenario implements Callable<ScenarioResult> {
metricRegistry,
scriptEnv
);
ScenarioMetadataAware.apply(extensionObject,getScenarioMetadata());
logger.trace("Adding extension object: name=" + extensionDescriptor.getBaseVariableName() +
" class=" + extensionObject.getClass().getSimpleName());
scriptEngine.put(extensionDescriptor.getBaseVariableName(), extensionObject);
}
}
private synchronized ScenarioMetadata getScenarioMetadata() {
if (this.scenarioMetadata==null) {
this.scenarioMetadata = new ScenarioMetadata(
this.startedAtMillis,
this.scenarioName,
SystemId.getNodeId(),
SystemId.getNodeFingerprint()
);
}
return scenarioMetadata;
}
public void runScenario() {

View File

@ -4,7 +4,7 @@
<parent>
<artifactId>mvn-defaults</artifactId>
<groupId>io.nosqlbench</groupId>
<version>4.15.58-SNAPSHOT</version>
<version>4.15.64-SNAPSHOT</version>
<relativePath>../mvn-defaults</relativePath>
</parent>
@ -56,7 +56,7 @@
<dependency>
<groupId>io.nosqlbench</groupId>
<artifactId>engine-api</artifactId>
<version>4.15.58-SNAPSHOT</version>
<version>4.15.64-SNAPSHOT</version>
</dependency>
</dependencies>

View File

@ -1,46 +1,46 @@
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<modelVersion>4.0.0</modelVersion>
<parent>
<artifactId>mvn-defaults</artifactId>
<groupId>io.nosqlbench</groupId>
<version>4.15.58-SNAPSHOT</version>
<relativePath>../mvn-defaults</relativePath>
</parent>
<parent>
<artifactId>mvn-defaults</artifactId>
<groupId>io.nosqlbench</groupId>
<version>4.15.64-SNAPSHOT</version>
<relativePath>../mvn-defaults</relativePath>
</parent>
<artifactId>engine-docs</artifactId>
<packaging>jar</packaging>
<name>${project.artifactId}</name>
<description>CLI for nosqlbench.</description>
<artifactId>engine-docs</artifactId>
<packaging>jar</packaging>
<name>${project.artifactId}</name>
<description>CLI for nosqlbench.</description>
<properties>
<javadoc.name>nosqlbench Docs</javadoc.name>
</properties>
<properties>
<javadoc.name>nosqlbench Docs</javadoc.name>
</properties>
<dependencies>
<dependencies>
<!-- <dependency>-->
<!-- <groupId>io.nosqlbench</groupId>-->
<!-- <artifactId>engine-vis</artifactId>-->
<!-- <version>2.11.31-SNAPSHOT</version>-->
<!-- </dependency>-->
<!-- <dependency>-->
<!-- <groupId>io.nosqlbench</groupId>-->
<!-- <artifactId>engine-vis</artifactId>-->
<!-- <version>2.11.31-SNAPSHOT</version>-->
<!-- </dependency>-->
<dependency>
<groupId>io.nosqlbench</groupId>
<artifactId>docsys</artifactId>
<version>4.15.58-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>io.nosqlbench</groupId>
<artifactId>docsys</artifactId>
<version>4.15.64-SNAPSHOT</version>
</dependency>
</dependencies>
</dependencies>
<build>
<resources>
<resource>
<directory>src/main/resources</directory>
<filtering>true</filtering>
</resource>
</resources>
</build>
<build>
<resources>
<resource>
<directory>src/main/resources</directory>
<filtering>true</filtering>
</resource>
</resources>
</build>
<profiles>
<profile>

View File

@ -4,7 +4,7 @@
<parent>
<artifactId>mvn-defaults</artifactId>
<groupId>io.nosqlbench</groupId>
<version>4.15.58-SNAPSHOT</version>
<version>4.15.64-SNAPSHOT</version>
<relativePath>../mvn-defaults</relativePath>
</parent>
@ -22,7 +22,7 @@
<dependency>
<groupId>io.nosqlbench</groupId>
<artifactId>engine-api</artifactId>
<version>4.15.58-SNAPSHOT</version>
<version>4.15.64-SNAPSHOT</version>
</dependency>
</dependencies>
@ -40,23 +40,23 @@
</resources>
<plugins>
<!-- <plugin>-->
<!-- <groupId>org.apache.maven.plugins</groupId>-->
<!-- <artifactId>maven-javadoc-plugin</artifactId>-->
<!-- <version>3.1.1</version>-->
<!-- <configuration>-->
<!-- <doctitle>nosqlbench Extensions</doctitle>-->
<!-- <windowtitle>nosqlbench Extensions</windowtitle>-->
<!-- </configuration>-->
<!-- <executions>-->
<!-- <execution>-->
<!-- <id>attach-javadoc</id>-->
<!-- <goals>-->
<!-- <goal>jar</goal>-->
<!-- </goals>-->
<!-- </execution>-->
<!-- </executions>-->
<!-- </plugin>-->
<!-- <plugin>-->
<!-- <groupId>org.apache.maven.plugins</groupId>-->
<!-- <artifactId>maven-javadoc-plugin</artifactId>-->
<!-- <version>3.1.1</version>-->
<!-- <configuration>-->
<!-- <doctitle>nosqlbench Extensions</doctitle>-->
<!-- <windowtitle>nosqlbench Extensions</windowtitle>-->
<!-- </configuration>-->
<!-- <executions>-->
<!-- <execution>-->
<!-- <id>attach-javadoc</id>-->
<!-- <goals>-->
<!-- <goal>jar</goal>-->
<!-- </goals>-->
<!-- </execution>-->
<!-- </executions>-->
<!-- </plugin>-->
</plugins>
</build>

View File

@ -0,0 +1,9 @@
globalvars extension
===================
Allows access to the global object map from SharedState.gl_ObjectMap, which allows
for cross-binding and cross-thread data sharing.
```
var result = globalvars.get("result");
```

View File

@ -0,0 +1,26 @@
http extension
==============
Allow access to HTTP URLs from within scripts, supporting both basic
get and post methods. In all cases, the returned type is the full
response object, from which the body content can be accessed.
## Examples
Get content from a URL into a string variable:
```
var response= http.get("http://google.com/")
```
Post an empty body to a URL, useful for state-changing calls where
all of the control data is in the URL:
```
var response= http.post("http://some.server/path/to/resource?query=foobarbaz")
```
Post content to a URL, specifying the URL, content value, and content type:
```
var response= http.post("http://some.server/path/to/resource", "this is the data", "text/plain");
```

View File

@ -0,0 +1,96 @@
package io.nosqlbench.engine.extensions.s3uploader;
import com.amazonaws.services.s3.AmazonS3;
import com.amazonaws.services.s3.transfer.MultipleFileUpload;
import com.amazonaws.services.s3.transfer.TransferManager;
import com.amazonaws.services.s3.transfer.TransferManagerBuilder;
import com.codahale.metrics.MetricRegistry;
import io.nosqlbench.nb.addins.s3.s3urlhandler.S3ClientCache;
import io.nosqlbench.nb.addins.s3.s3urlhandler.S3UrlFields;
import io.nosqlbench.nb.api.NBEnvironment;
import io.nosqlbench.nb.api.metadata.ScenarioMetadata;
import io.nosqlbench.nb.api.metadata.ScenarioMetadataAware;
import org.apache.logging.log4j.Logger;
import javax.script.ScriptContext;
import java.io.File;
import java.nio.file.FileSystems;
import java.nio.file.Files;
import java.nio.file.LinkOption;
import java.nio.file.Path;
import java.util.LinkedHashMap;
import java.util.Map;
public class S3Uploader implements ScenarioMetadataAware {
private final Logger logger;
private final MetricRegistry metricRegistry;
private final ScriptContext scriptContext;
private ScenarioMetadata scenarioMetadata;
public S3Uploader(Logger logger, MetricRegistry metricRegistry, ScriptContext scriptContext) {
this.logger = logger;
this.metricRegistry = metricRegistry;
this.scriptContext = scriptContext;
}
/**
* Upload the local file path to the specified S3 URL, then return the URL of the bucket
* in its fully expanded form. See the details on token expansions in the s3.md help docs.
* @param localFilePath The path to the local directory
* @param urlTemplate A template that is expanded to a valid S3 URL
* @return The fully expanded name of the URL used for upload
*/
public String uploadDirToUrl(String localFilePath, String urlTemplate) {
return uploadDirToUrlTokenized(localFilePath, urlTemplate, Map.of());
}
/**
* Upload the local file path to the specified S3 URL, then return the URL of the bucket
* in its fully expanded form. See the details on token expansions in the s3.md help docs.
* Any params which are provided supersede the normally provided values from the system.
* @param localFilePath The path to the local directory
* @param urlTemplate A template that is expanded to a valid S3 URL
* @param params Additional token expansions which will take precedence over other available values.
* @return The fully expanded name of the URL used for upload
*/
public String uploadDirToUrlTokenized(String localFilePath, String urlTemplate, Map<String,String> params) {
Path sourcePath = Path.of(localFilePath);
if (!FileSystems.getDefault().equals(sourcePath.getFileSystem())) {
throw new RuntimeException("The file must reside on the default filesystem to be uploaded by S3.");
}
if (!Files.isDirectory(sourcePath, LinkOption.NOFOLLOW_LINKS)) {
throw new RuntimeException("path '" + sourcePath + "' is not a directory.");
}
File sourceDir = sourcePath.toFile();
Map<String,String> combined = new LinkedHashMap<>(params);
combined.putAll(scenarioMetadata.asMap());
String url = NBEnvironment.INSTANCE.interpolateWithTimestamp(
urlTemplate,
scenarioMetadata.getStartedAt(),
combined
)
.orElseThrow();
logger.debug("S3 composite URL is '" + url + "'");
S3UrlFields fields = S3UrlFields.fromURLString(url);
S3ClientCache s3ClientCache = new S3ClientCache();
AmazonS3 s3 = s3ClientCache.get(fields);
TransferManager xfers = TransferManagerBuilder.standard().withS3Client(s3).build();
String prefix = fields.key;
MultipleFileUpload mfu = xfers.uploadDirectory(fields.bucket, prefix, sourceDir, true);
try {
mfu.waitForCompletion();
} catch (InterruptedException e) {
throw new RuntimeException("Multi-file upload was interrupted.");
}
return url;
}
@Override
public void setScenarioMetadata(ScenarioMetadata metadata) {
this.scenarioMetadata = metadata;
}
}

View File

@ -0,0 +1,32 @@
package io.nosqlbench.engine.extensions.s3uploader;
import com.codahale.metrics.MetricRegistry;
import io.nosqlbench.engine.api.extensions.ScriptingPluginInfo;
import io.nosqlbench.nb.annotations.Service;
import io.nosqlbench.nb.api.metadata.ScenarioMetadata;
import io.nosqlbench.nb.api.metadata.ScenarioMetadataAware;
import org.apache.logging.log4j.Logger;
import javax.script.ScriptContext;
@Service(value = ScriptingPluginInfo.class, selector = "s3")
public class S3UploaderPluginData implements ScriptingPluginInfo<S3Uploader>, ScenarioMetadataAware {
private ScenarioMetadata scenarioMetadata;
@Override
public String getDescription() {
return "Allow for uploading or downloading a directory from S3";
}
@Override
public S3Uploader getExtensionObject(Logger logger, MetricRegistry metricRegistry, ScriptContext scriptContext) {
S3Uploader uploader = new S3Uploader(logger, metricRegistry, scriptContext);
ScenarioMetadataAware.apply(uploader,scenarioMetadata);
return uploader;
}
@Override
public void setScenarioMetadata(ScenarioMetadata metadata) {
this.scenarioMetadata = metadata;
}
}

View File

@ -0,0 +1,68 @@
S3 extension
==============
Allow uploading of a local directory on the default filesystem
to an S3 bucket, using an S3 URI to specify the bucket, location, and so on.
The URL is specified in the standard S3 format, such as:
1. `s3://mybucket/mypath-as-a-key/with-any-level-of-depth`
2. `s3://myuser:mypass@mybucket/mypath-as-a-key/with-any-level-of-depth`
In addition, any tokens which are supported by the standard NoSQLBench
token substitution mechanism will be used to construct a URL at the time
of usage. These forms include the following:
- Scenario Metadata - There are several key fields initialized for a scenario which can be used as common
reference points. These occlude the environment variables of the same name. These are:
- SESSION_NAME - The name auto-generated for a session, used in the logfile names, and so on.
- SYSTEM_ID - The string form of the most canonically identifying IP address, excluding
known symbolic interface names (docker*, for example) and all localhost addresses.
- SYSTEM_FINGERPRINT - a stable and anonymized identifier for a given system. This will be
stable as long as the networking configuration does not change.
- System Properties
- Any parameter in `$word1.word2...` form -- any multi-part variable name with separating dots
is taken as a system property to the JVM. These are expanded in place. Both `$word1.word2`
and `${word1.word2}` patterns are supported, whereas the latter is more strict and thus safer.
- Environment Variables
- As with System Properties, environment variable form the shell are also supported, as long
as they do not include a dot.
- Temporal Fields from the Scenario start time
- Any field specifier that you can use with the temporal types in Java's standard String.
format can be used. The reference time for these is always the scenario start time.
- Example: The default session name template looks like `scenario_%tY%tm%td_%tH%tM%tS_%tL`
## Examples
```
// If you have local logical identifiers in your scenario script which you want
// to templatize into your upload paths, you can provide your own js object
// as the third parameter
s3.uploadDirToUrlTokenized(
'metrics',
's3://test-results/${HOSTNAME}/${testid}-${testversion}/metrics',
{
'testid':'20210343',
'testversion':'v2'
}
);
// Otherwise, use the two-parameter version:
s3.uploadDirToUrl('metrics','s3://test-results/${HOSTNAME}/metrics');
```
## Post-Hoc upload
Scripting extensions only run if the scenario is not halted before they are invoked
in the main scenario script. If you want to ensure that this one runs after a test,
regardless of when or why the test stopped, it is possible to wrap it within
a shutdown hook which will run after scenario completion.
This is an example of how to do so:
```
shutdown.addShutdownHook('upload_metrics', function f() {
s3.uploadDirToUrl('metrics','s3://test-results/${HOSTNAME}/metrics');
});
```

View File

@ -0,0 +1,32 @@
package io.nosqlbench.engine.shutdown;
import com.codahale.metrics.MetricRegistry;
import org.apache.logging.log4j.Logger;
import javax.script.ScriptContext;
import java.util.function.Function;
public class ShutdownHookPlugin {
private final Logger logger;
private final MetricRegistry metricRegistry;
private final ScriptContext scriptContext;
public ShutdownHookPlugin(Logger logger, MetricRegistry metricRegistry, ScriptContext scriptContext) {
this.logger = logger;
this.metricRegistry = metricRegistry;
this.scriptContext = scriptContext;
}
public void addShutdownHook(String name, Object f) {
if (!(f instanceof Function)) {
throw new RuntimeException("The object provided to the shutdown hook plugin was not recognized as a function.");
}
String shutdownName = "shutdown-function-" + name;
Thread runnable = new ShutdownRunnableFunction(logger, name, (Function<?,?>)f);
runnable.setName(shutdownName);
Runtime.getRuntime().addShutdownHook(runnable);
logger.info("Registered shutdown hook to run under name '" + shutdownName + "'");
}
}

View File

@ -0,0 +1,22 @@
package io.nosqlbench.engine.shutdown;
import com.codahale.metrics.MetricRegistry;
import io.nosqlbench.engine.api.extensions.ScriptingPluginInfo;
import io.nosqlbench.nb.annotations.Service;
import org.apache.logging.log4j.Logger;
import javax.script.ScriptContext;
@Service(value=ScriptingPluginInfo.class,selector = "shutdown")
public class ShutdownHookPluginMetadata implements ScriptingPluginInfo<ShutdownHookPlugin> {
@Override
public String getDescription() {
return "Register shutdown hooks in the form of javascript functions.";
}
@Override
public ShutdownHookPlugin getExtensionObject(Logger logger, MetricRegistry metricRegistry, ScriptContext scriptContext) {
return new ShutdownHookPlugin(logger,metricRegistry,scriptContext);
}
}

View File

@ -0,0 +1,32 @@
package io.nosqlbench.engine.shutdown;
import org.apache.logging.log4j.Logger;
import java.util.function.Function;
public class ShutdownRunnableFunction extends Thread {
private final String name;
private final Function<Object[],Object> function;
private final Logger logger;
public ShutdownRunnableFunction(Logger logger, String name, Function<?, ?> function) {
this.logger = logger;
this.name = name;
this.function = (Function<Object[],Object>)function;
}
@Override
public void run() {
logger.info("Running shutdown hook '" + name + "'...");
try {
Object result = function.apply(new Object[0]);
if (result instanceof CharSequence) {
logger.info("shutdown hook returned output:\n" + ((CharSequence) result));
}
logger.info("Completed shutdown hook '" + name + "'...");
} catch (Exception e) {
throw new RuntimeException(e);
}
}
}

View File

@ -4,7 +4,7 @@
<parent>
<artifactId>mvn-defaults</artifactId>
<groupId>io.nosqlbench</groupId>
<version>4.15.58-SNAPSHOT</version>
<version>4.15.64-SNAPSHOT</version>
<relativePath>../mvn-defaults</relativePath>
</parent>
@ -35,7 +35,7 @@
<dependency>
<groupId>io.nosqlbench</groupId>
<artifactId>engine-cli</artifactId>
<version>4.15.58-SNAPSHOT</version>
<version>4.15.64-SNAPSHOT</version>
</dependency>
</dependencies>

View File

@ -1,10 +0,0 @@
# (src/test/resources/activities/) jmx-test-1.yaml
statements:
- read1:
url: service:jmx:rmi:///jndi/rmi://10.101.33.50:7199/jmxrmi
object: org.apache.cassandra.metrics:type=Compaction,name=PendingTasks
readvar: Value
as_type: int
as_name: pending_tasks

File diff suppressed because it is too large Load Diff

View File

@ -5,7 +5,7 @@
<parent>
<artifactId>mvn-defaults</artifactId>
<groupId>io.nosqlbench</groupId>
<version>4.15.58-SNAPSHOT</version>
<version>4.15.64-SNAPSHOT</version>
<relativePath>../mvn-defaults</relativePath>
</parent>

View File

@ -7,7 +7,7 @@
<parent>
<artifactId>mvn-defaults</artifactId>
<groupId>io.nosqlbench</groupId>
<version>4.15.58-SNAPSHOT</version>
<version>4.15.64-SNAPSHOT</version>
<relativePath>../mvn-defaults</relativePath>
</parent>
@ -32,7 +32,7 @@
<dependency>
<groupId>io.nosqlbench</groupId>
<artifactId>nb-annotations</artifactId>
<version>4.15.58-SNAPSHOT</version>
<version>4.15.64-SNAPSHOT</version>
</dependency>
<dependency>
@ -63,6 +63,18 @@
<artifactId>oshi-core</artifactId>
</dependency>
<dependency>
<groupId>com.amazonaws</groupId>
<artifactId>aws-java-sdk-s3</artifactId>
<version>1.12.12</version>
</dependency>
<dependency>
<groupId>javax.xml.bind</groupId>
<artifactId>jaxb-api</artifactId>
<version>2.4.0-b180830.0359</version>
</dependency>
<!-- perf testing -->
<dependency>
<groupId>org.openjdk.jmh</groupId>
<artifactId>jmh-core</artifactId>

View File

@ -0,0 +1,41 @@
package io.nosqlbench.nb.addins.s3.s3urlhandler;
import com.amazonaws.auth.AWSCredentials;
import com.amazonaws.auth.AWSStaticCredentialsProvider;
import com.amazonaws.auth.BasicAWSCredentials;
import com.amazonaws.services.s3.AmazonS3;
import com.amazonaws.services.s3.AmazonS3ClientBuilder;
import java.util.WeakHashMap;
/**
* This client cache uses the credentials provided in a URL to create
* a fingerprint, and then creates a customized S3 client for each unique
* instance. If these clients are not used, they are allowed to be expired
* from the map and collected.
*/
public class S3ClientCache {
private final WeakHashMap<S3UrlFields.CredentialsFingerprint, AmazonS3> cache = new WeakHashMap<>();
public S3ClientCache() {
}
public AmazonS3 get(S3UrlFields fields) {
AmazonS3 s3 = cache.computeIfAbsent(fields.getCredentialsFingerprint(),
cfp -> createAuthorizedClient(fields));
return s3;
}
private AmazonS3 createAuthorizedClient(S3UrlFields fields) {
if (fields.accessKey!=null && fields.secretKey!=null) {
AmazonS3ClientBuilder builder = AmazonS3ClientBuilder.standard();
AWSCredentials specialcreds = new BasicAWSCredentials(fields.accessKey, fields.secretKey);
builder = builder.withCredentials(new AWSStaticCredentialsProvider(specialcreds));
return builder.build();
} else {
return AmazonS3ClientBuilder.defaultClient();
}
}
}

View File

@ -0,0 +1,31 @@
package io.nosqlbench.nb.addins.s3.s3urlhandler;
import com.amazonaws.services.s3.AmazonS3;
import com.amazonaws.services.s3.model.S3Object;
import java.io.IOException;
import java.io.InputStream;
import java.net.URL;
import java.net.URLConnection;
public class S3UrlConnection extends URLConnection {
private final S3ClientCache clientCache;
protected S3UrlConnection(S3ClientCache clientCache, URL url) {
super(url);
this.clientCache = clientCache;
}
@Override
public InputStream getInputStream() throws IOException {
S3UrlFields fields = new S3UrlFields(url);
AmazonS3 s3 = clientCache.get(fields);
S3Object object = s3.getObject(fields.bucket, fields.key);
return object.getObjectContent();
}
@Override
public void connect() throws IOException {
}
}

View File

@ -0,0 +1,99 @@
package io.nosqlbench.nb.addins.s3.s3urlhandler;
import java.net.MalformedURLException;
import java.net.URL;
import java.net.URLDecoder;
import java.nio.charset.StandardCharsets;
import java.util.Objects;
public class S3UrlFields {
public final String bucket;
public final String key;
public final String secretKey;
public final String accessKey;
private final String endpoint;
public static S3UrlFields fromURLString(String urlString) {
URL url = null;
try {
url = new URL(urlString);
} catch (MalformedURLException e) {
throw new RuntimeException(e);
}
return new S3UrlFields(url);
}
public S3UrlFields(URL url) {
String accessKey = null;
String secretKey = null;
String userinfo = url.getUserInfo();
if (userinfo != null) {
String[] userfields = userinfo.split(":", 2);
accessKey = URLDecoder.decode(userfields[0], StandardCharsets.UTF_8);
secretKey = URLDecoder.decode(userfields[1], StandardCharsets.UTF_8);
} else {
String query = url.getQuery();
if (query != null) {
for (String qs : query.split("&")) {
String[] words = qs.split(":", 2);
if (words[0].equals("accessKey")) {
accessKey = URLDecoder.decode(words[1], StandardCharsets.UTF_8);
} else if (words[0].equals("secretKey")) {
secretKey = URLDecoder.decode(words[1], StandardCharsets.UTF_8);
}
}
}
}
// https://docs.aws.amazon.com/AmazonS3/latest/userguide/access-bucket-intro.html
this.accessKey = accessKey;
this.secretKey = secretKey;
String[] bucketAndEndpoint = url.getHost().split("\\.", 2);
this.bucket = bucketAndEndpoint[0];
this.endpoint = (bucketAndEndpoint.length==2) ? bucketAndEndpoint[1] : "";
this.key = url.getPath().substring(1);
}
public CredentialsFingerprint credentialsFingerprint() {
return new CredentialsFingerprint(this);
}
public CredentialsFingerprint getCredentialsFingerprint() {
return null;
}
public static class CredentialsFingerprint {
private final S3UrlFields fields;
public CredentialsFingerprint(S3UrlFields fields) {
this.fields = fields;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
S3UrlFields that = (S3UrlFields) o;
if (!Objects.equals(fields.secretKey, that.secretKey)) return false;
if (!Objects.equals(fields.accessKey, that.accessKey)) return false;
return Objects.equals(fields.endpoint, that.endpoint);
}
@Override
public int hashCode() {
int result = (fields.secretKey != null ? fields.secretKey.hashCode() : 0);
result = 31 * result + (fields.accessKey != null ? fields.accessKey.hashCode() : 0);
result = 31 * result + (fields.endpoint != null ? fields.endpoint.hashCode() : 0);
return result;
}
}
}

View File

@ -0,0 +1,21 @@
package io.nosqlbench.nb.addins.s3.s3urlhandler;
import java.io.IOException;
import java.net.URL;
import java.net.URLStreamHandler;
public class S3UrlStreamHandler extends URLStreamHandler {
private final S3ClientCache clientCache;
private final String protocol;
public S3UrlStreamHandler(S3ClientCache clientCache, String protocol) {
this.clientCache = clientCache;
this.protocol = protocol;
}
@Override
protected S3UrlConnection openConnection(URL url) throws IOException {
return new S3UrlConnection(clientCache, url);
}
}

View File

@ -0,0 +1,21 @@
package io.nosqlbench.nb.addins.s3.s3urlhandler;
import io.nosqlbench.nb.annotations.Service;
import java.net.URLStreamHandler;
import java.net.spi.URLStreamHandlerProvider;
@Service(value = URLStreamHandlerProvider.class, selector = "s3")
public class S3UrlStreamHandlerProvider extends URLStreamHandlerProvider {
private final S3ClientCache clientCache = new S3ClientCache();
@Override
public URLStreamHandler createURLStreamHandler(String protocol) {
if ("s3".equals(protocol)) {
return new S3UrlStreamHandler(clientCache, protocol);
}
return null;
}
}

View File

@ -1,6 +1,7 @@
package io.nosqlbench.nb.api;
import io.nosqlbench.nb.api.errors.BasicError;
import io.nosqlbench.nb.api.metadata.SessionNamer;
import org.apache.logging.log4j.Logger;
import java.util.*;
@ -107,14 +108,19 @@ public class NBEnvironment {
* @param defaultValue The value to return if the name is not found
* @return the system property or environment variable's value, or the default value
*/
public String getOr(String name, String defaultValue) {
String value = peek(name);
public String getOr(String name, String defaultValue, Map<String,String> supplemental) {
String value = peek(name, supplemental);
if (value == null) {
value = defaultValue;
}
return reference(name, value);
}
public String getOr(String name, String defaultValue) {
return getOr(name, defaultValue, Map.of());
}
/**
* This is a non-referencing get of a value, and the canonical way to
* access a value. This method codifies the semantics of whether something is
@ -122,8 +128,14 @@ public class NBEnvironment {
* @param name The parameter name
* @return A value, or null if none was found
*/
private String peek(String name) {
private String peek(String name, Map<String,String> supplemental) {
String value = null;
if (supplemental.containsKey(name)) {
value = supplemental.get(name);
if (value!=null) {
return value;
}
}
if (name.contains(".")) {
value = System.getProperty(name.toLowerCase());
if (value != null) {
@ -167,7 +179,11 @@ public class NBEnvironment {
}
public boolean containsKey(String name) {
String value = peek(name);
return containsKey(name, Map.of());
}
public boolean containsKey(String name, Map<String,String> supplemental) {
String value = peek(name, supplemental);
return (value != null);
}
@ -184,7 +200,7 @@ public class NBEnvironment {
* @param word The word to interpolate the environment values into
* @return The interpolated value, after substitutions, or null if any lookup failed
*/
public Optional<String> interpolate(String word) {
public Optional<String> interpolate(String word, Map<String,String> supplemental) {
Pattern envpattern = Pattern.compile("(\\$(?<env1>[a-zA-Z_][A-Za-z0-9_.]+)|\\$\\{(?<env2>[^}]+)\\})");
Matcher matcher = envpattern.matcher(word);
StringBuilder sb = new StringBuilder();
@ -193,7 +209,7 @@ public class NBEnvironment {
if (envvar == null) {
envvar = matcher.group("env2");
}
String value = peek(envvar);
String value = peek(envvar,supplemental);
if (value == null) {
if (logger != null) {
logger.debug("no value found for '" + envvar + "', returning Optional.empty() for '" + word + "'");
@ -208,8 +224,12 @@ public class NBEnvironment {
return Optional.of(sb.toString());
}
public List<String> interpolate(CharSequence delim, String combined) {
String[] split = combined.split(delim.toString());
public Optional<String> interpolate(String word) {
return interpolate(word,Map.of());
}
public List<String> interpolateEach(CharSequence delim, String toBeRecombined) {
String[] split = toBeRecombined.split(delim.toString());
List<String> mapped = new ArrayList<>();
for (String pattern : split) {
Optional<String> interpolated = interpolate(pattern);
@ -218,4 +238,40 @@ public class NBEnvironment {
return mapped;
}
/**
* Interpolate system properties, environment variables, time fields, and arbitrary replacement strings
* into a single result. Templates such as {@code /tmp/%d-${testrun}-$System.index-SCENARIO} are supported.
*
* <hr/>
*
* The tokens found in the raw template are interpolated in the following order.
* <ul>
* <li>Any token which exactly matches one of the keys in the provided map is substituted
* directly as is. No token sigil like '$' is used here, so if you want to support that
* as is, you need to provide the keys in your substitution map as such.</li>
* <li>Any tokens in the form {@code %f} which is supported by the time fields in
* {@link Formatter}</li> are honored and used with the timestamp provided.*
* <li>System Properties: Any token in the form {@code $word.word} will be taken as the name
* of a system property to be substited.</li>
* <li>Environment Variables: Any token in the form {@code $name}</li> will be takens as
* an environment variable to be substituted.</li>
* </ul>
*
* @param rawtext The template, including any of the supported token forms
* @param millis The timestamp to use for any temporal tokens
* @param map Any additional parameters to interpolate into the template first
* @return Optionally, the interpolated string, as long as all references were qualified. Error
* handling is contextual to the caller -- If not getting a valid result would cause a downstream error,
* an error should likely be thrown.
*/
public final Optional<String> interpolateWithTimestamp(String rawtext, long millis, Map<String, String> map) {
String result = rawtext;
result = SessionNamer.format(result, millis);
return interpolate(result,map);
}
public final Optional<String> interpolateWithTimestamp(String rawText, long millis) {
return interpolateWithTimestamp(rawText, millis, Map.of());
}
}

View File

@ -0,0 +1,47 @@
package io.nosqlbench.nb.api.metadata;
import java.util.Map;
/**
* If an object is ScenarioMetadata, then they will be updated with a map of
* scenario metadata. Supported types are:
* <UL>
* <LI>ScriptingPluginInfo</LI>
* </UL>
*/
public class ScenarioMetadata {
private final long startedAt;
private final String sessionName;
private final String systemId;
private final String systemFingerprint;
public ScenarioMetadata(long startedAt, String sessionName, String systemId, String systemFingerprint) {
this.startedAt = startedAt;
this.sessionName = sessionName;
this.systemId = systemId;
this.systemFingerprint = systemFingerprint;
}
public long getStartedAt() {
return startedAt;
}
public String getSessionName() {
return sessionName;
}
public String getSystemId() {
return systemId;
}
public String getSystemFingerprint() {
return systemFingerprint;
}
public Map<String,String> asMap() {
return Map.of("STARTED_AT",String.valueOf(startedAt),
"SESSION_NAME",sessionName,
"SYSTEM_ID",systemId,
"SYSTEM_FINGERPRINT", systemFingerprint);
}
}

View File

@ -0,0 +1,21 @@
package io.nosqlbench.nb.api.metadata;
/**
* Where supported, the following named fields are injected into object which
* implement this interface:
* <UL>
* <LI>SCENARIO_NAME - The full scenario name, used for logging, metrics, etc</LI>
* <LI>STARTED_AT_MILLIS - The millisecond timestamp used to create the scenario name</LI>
* <LI>SYSTEM_ID - A stable identifier based on the available ip addresses</LI></LK>
* <LI>SYSTEM_FINGERPRINT - a stable and pseudonymous identifier based on SYSTEM_ID</LI>
* </UL>
*/
public interface ScenarioMetadataAware {
void setScenarioMetadata(ScenarioMetadata metadata);
static void apply(Object target, ScenarioMetadata metadata) {
if (target instanceof ScenarioMetadataAware) {
((ScenarioMetadataAware)target).setScenarioMetadata(metadata);
}
}
}

View File

@ -15,11 +15,13 @@
* /
*/
package io.nosqlbench.engine.cli;
package io.nosqlbench.nb.api.metadata;
import java.util.Arrays;
public class SessionNamer {
public String format(String sessionName) {
public static String format(String sessionName, long sessionTimeMillis) {
String nameTemplate = sessionName;
if (nameTemplate==null || nameTemplate.isEmpty()) {
nameTemplate = "scenario_%tY%tm%td_%tH%tM%tS_%tL";
@ -27,11 +29,14 @@ public class SessionNamer {
int splits = nameTemplate.split("%").length -1;
Long[] times = new Long[splits];
long now = System.currentTimeMillis();
for (int i = 0; i < times.length; i++) times[i] = now;
Arrays.fill(times, sessionTimeMillis);
sessionName = String.format(nameTemplate, (Object[]) times);
return sessionName;
}
public static String format(String sessionName) {
return format(sessionName, System.currentTimeMillis());
}
}

View File

@ -1,4 +1,4 @@
package io.nosqlbench.nb.api;
package io.nosqlbench.nb.api.metadata;
import com.google.gson.Gson;
import com.google.gson.GsonBuilder;
@ -7,10 +7,25 @@ import oshi.hardware.CentralProcessor;
import oshi.hardware.HardwareAbstractionLayer;
import oshi.hardware.NetworkIF;
import java.nio.charset.StandardCharsets;
import java.security.MessageDigest;
import java.security.NoSuchAlgorithmException;
import java.util.*;
public class SystemId {
/**
* Return the address of a node which is likely to be unique enough to identify
* it within a given subnet, after filtering out all local addresses. This is useful
* when you are managing configuration or results for a set of systems which
* share a common IP addressing scheme. This identifier should be stable as long
* as the node's addresses do not change.
*
* If you are needing an identifier for a node but wish to expose any address data,
* you can use the {@link #getNodeFingerprint()} which takes this value and hashes
* it with SHA-1 to produce a hex string.
* @return A address for the node, likely to be unique and stable for its lifetime
*/
public static String getNodeId() {
SystemInfo sysinfo = new SystemInfo();
HardwareAbstractionLayer hal = sysinfo.getHardware();
@ -38,6 +53,28 @@ public class SystemId {
return systemID;
}
/**
* Produce a stable string identifier consisting of hexadecimal characters.
* The internal data used for this value is based on a stable ordering of non-local
* ip addresses available on the system.
* @return A stable node identifier
*/
public static String getNodeFingerprint() {
String addrId = getNodeId();
try {
MessageDigest sha1_digest = MessageDigest.getInstance("SHA-1");
byte[] addrBytes = sha1_digest.digest(addrId.getBytes(StandardCharsets.UTF_8));
String fingerprint = "";
for (int i=0; i < addrBytes.length; i++) {
fingerprint +=
Integer.toString( ( addrBytes[i] & 0xff ) + 0x100, 16).substring( 1 );
}
return fingerprint.toUpperCase(Locale.ROOT);
} catch (NoSuchAlgorithmException e) {
throw new RuntimeException(e);
}
}
public static String getHostSummary() {
SystemInfo sysinfo = new SystemInfo();
HardwareAbstractionLayer hal = sysinfo.getHardware();

View File

@ -70,6 +70,7 @@ public class SimpleServiceLoader<T> {
providers = new LinkedHashMap<>();
loader.stream().forEach(provider -> {
logger.trace("loading provider: " + provider.type());
Class<? extends T> type = provider.type();
if (!type.isAnnotationPresent(Service.class)) {
throw new RuntimeException(

View File

@ -0,0 +1,54 @@
package io.nosqlbench.nb.addins.s3.s3urlhandler;
import com.amazonaws.services.s3.AmazonS3;
import com.amazonaws.services.s3.AmazonS3ClientBuilder;
import com.amazonaws.services.s3.model.Bucket;
import com.amazonaws.services.s3.model.PutObjectResult;
import org.junit.jupiter.api.Disabled;
import org.junit.jupiter.api.Test;
import java.io.BufferedReader;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.net.URL;
import static org.assertj.core.api.Assertions.assertThat;
public class S3UrlStreamHandlerTest {
public static String bucketName = "nb-extension-test";
public static String keyName = "key-name";
public static String testValue = "test-value";
/**
* This test requires that you have credentials already configured on your local system
* for S3. It creates an object using the s3 client directly, then uses a generic
* URL method to access and verify the contents.
*/
@Disabled
@Test
public void sanityCheckS3UrlHandler() {
AmazonS3 client = AmazonS3ClientBuilder.defaultClient();
Bucket bucket = null;
if (!client.doesBucketExistV2(bucketName)) {
bucket = client.createBucket(bucketName);
}
PutObjectResult putObjectResult = client.putObject(bucketName, keyName, testValue);
assertThat(putObjectResult).isNotNull();
try {
URL url = new URL("s3://"+bucketName+"/"+keyName);
InputStream is = url.openStream();
BufferedReader br = new BufferedReader(new InputStreamReader(is));
String line = br.readLine();
assertThat(line).isEqualTo(testValue);
System.out.println(line);
} catch (Exception e) {
e.printStackTrace();
}
}
}

View File

@ -0,0 +1,45 @@
package io.nosqlbench.nb.addins.s3.s3utils;
import com.amazonaws.services.s3.transfer.MultipleFileUpload;
import com.amazonaws.services.s3.transfer.TransferManager;
import com.amazonaws.services.s3.transfer.TransferManagerBuilder;
import io.nosqlbench.nb.addins.s3.s3urlhandler.S3ClientCache;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import java.nio.file.FileSystems;
import java.nio.file.Files;
import java.nio.file.LinkOption;
import java.nio.file.Path;
/**
* This is a generic s3 directory uploader which is neither a scripting plugin nor a standard URL handler.
*/
public class S3UploaderDemo {
private final S3ClientCache clientCache = new S3ClientCache();
private static final Logger logger = LogManager.getLogger(S3UploaderDemo.class);
public MultipleFileUpload syncup(Path sourcePath, String bucket, String prefix) {
if (!FileSystems.getDefault().equals(sourcePath.getFileSystem())) {
throw new RuntimeException("The file must reside on the default filesystem to be uploaded by S3.");
}
if (!Files.isDirectory(sourcePath, LinkOption.NOFOLLOW_LINKS)) {
throw new RuntimeException("path '" + sourcePath + "' is not a directory.");
}
TransferManager tm = TransferManagerBuilder.defaultTransferManager();
MultipleFileUpload mfu = tm.uploadDirectory(bucket, prefix, sourcePath.toFile(), true);
try {
mfu.waitForCompletion();
} catch (InterruptedException e) {
throw new RuntimeException("Multi-file upload was interrupted!");
}
tm.shutdownNow();
return mfu;
}
}

View File

@ -0,0 +1,21 @@
package io.nosqlbench.nb.addins.s3.s3utils;
import com.amazonaws.services.s3.transfer.MultipleFileUpload;
import io.nosqlbench.nb.addins.s3.s3urlhandler.S3UrlStreamHandlerTest;
import org.junit.jupiter.api.Disabled;
import org.junit.jupiter.api.Test;
import java.nio.file.Path;
public class S3UploaderTest {
@Disabled
@Test
public void testDirUpload() {
Path path = Path.of("src/test/resources/nesteddir1");
S3UploaderDemo ul = new S3UploaderDemo();
MultipleFileUpload mfu = ul.syncup(path, S3UrlStreamHandlerTest.bucketName, "test-prefix");
System.out.println(mfu);
}
}

View File

@ -2,6 +2,9 @@ package io.nosqlbench.nb.api;
import org.junit.jupiter.api.Test;
import java.util.Map;
import java.util.Optional;
import static org.assertj.core.api.Assertions.assertThat;
public class NBEnvironmentTest {
@ -15,4 +18,22 @@ public class NBEnvironmentTest {
assertThat(home1).matches(".+");
}
@Test
public void testInterpolationWithTimestamp() {
NBEnvironment env = new NBEnvironment();
long millis = 1633964892320L;
String time1 = env.interpolateWithTimestamp("word WOO$WOO %td %% end", millis, Map.of("WOO","WOW")).orElse(null);
assertThat(time1).isEqualTo("word WOOWOW 11 % end");
}
@Test
public void testInterpolationPrecedence() {
NBEnvironment env = new NBEnvironment();
Optional<String> superseded = env.interpolate("$TEST_KEY, $USER", Map.of("TEST_KEY", "supersedes1", "USER", "supersedes2"));
assertThat(superseded).contains("supersedes1, supersedes2");
superseded = env.interpolate("$USER", Map.of("TEST_KEY", "supersedes1"));
assertThat(superseded).isPresent();
assertThat(superseded.get()).isNotEqualTo("supersedes2");
}
}

View File

@ -1,7 +1,10 @@
package io.nosqlbench.nb.api;
import io.nosqlbench.nb.api.metadata.SystemId;
import org.junit.jupiter.api.Test;
import static org.assertj.core.api.Assertions.assertThat;
public class SystemIdTest {
@Test
@ -9,4 +12,17 @@ public class SystemIdTest {
String info = SystemId.getHostSummary();
System.out.println(info);
}
@Test
public void testNostId() {
String info = SystemId.getNodeId();
assertThat(info).matches("\\d+\\.\\d+\\.\\d+\\.\\d+");
}
@Test
public void testNodeFingerprint() {
String hash = SystemId.getNodeFingerprint();
assertThat(hash).matches("[A-Z0-9]+");
}
}

View File

@ -2,6 +2,10 @@ package io.nosqlbench.nb.api.content;
import org.junit.jupiter.api.Test;
import java.io.File;
import java.io.IOException;
import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.LinkedHashSet;
@ -257,4 +261,20 @@ public class NBIOTest {
}
@Test
public void matchFullyQualifiedPathCorrectly() {
Path tmpdir = Paths.get("/tmp");
if (!Files.isDirectory(tmpdir)) return;
try {
File tempFile = File.createTempFile(tmpdir.toString(), "testfile.csv");
tempFile.deleteOnExit();
String fullpath = tempFile.getAbsolutePath();
Files.write(Path.of(fullpath), "COL1,COL2\n\"val1\",\"val2\"\n".getBytes(StandardCharsets.UTF_8));
List<Content<?>> results = NBIO.all().name(fullpath).list();
assertThat(results.size()).isEqualTo(1);
} catch (IOException e) {
throw new RuntimeException(e);
}
}
}

View File

@ -5,7 +5,7 @@
<parent>
<artifactId>mvn-defaults</artifactId>
<groupId>io.nosqlbench</groupId>
<version>4.15.58-SNAPSHOT</version>
<version>4.15.64-SNAPSHOT</version>
<relativePath>../mvn-defaults</relativePath>
</parent>
@ -24,127 +24,127 @@
<dependency>
<groupId>io.nosqlbench</groupId>
<artifactId>engine-rest</artifactId>
<version>4.15.58-SNAPSHOT</version>
<version>4.15.64-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>io.nosqlbench</groupId>
<artifactId>engine-cli</artifactId>
<version>4.15.58-SNAPSHOT</version>
<version>4.15.64-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>io.nosqlbench</groupId>
<artifactId>engine-docs</artifactId>
<version>4.15.58-SNAPSHOT</version>
<version>4.15.64-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>io.nosqlbench</groupId>
<artifactId>engine-core</artifactId>
<version>4.15.58-SNAPSHOT</version>
<version>4.15.64-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>io.nosqlbench</groupId>
<artifactId>engine-extensions</artifactId>
<version>4.15.58-SNAPSHOT</version>
<version>4.15.64-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>io.nosqlbench</groupId>
<artifactId>nbr</artifactId>
<version>4.15.58-SNAPSHOT</version>
<version>4.15.64-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>io.nosqlbench</groupId>
<artifactId>driver-web</artifactId>
<version>4.15.58-SNAPSHOT</version>
<version>4.15.64-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>io.nosqlbench</groupId>
<artifactId>driver-kafka</artifactId>
<version>4.15.58-SNAPSHOT</version>
<version>4.15.64-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>io.nosqlbench</groupId>
<artifactId>driver-stdout</artifactId>
<version>4.15.58-SNAPSHOT</version>
<version>4.15.64-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>io.nosqlbench</groupId>
<artifactId>driver-diag</artifactId>
<version>4.15.58-SNAPSHOT</version>
<version>4.15.64-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>io.nosqlbench</groupId>
<artifactId>driver-tcp</artifactId>
<version>4.15.58-SNAPSHOT</version>
<version>4.15.64-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>io.nosqlbench</groupId>
<artifactId>driver-http</artifactId>
<version>4.15.58-SNAPSHOT</version>
<version>4.15.64-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>io.nosqlbench</groupId>
<artifactId>driver-jmx</artifactId>
<version>4.15.58-SNAPSHOT</version>
<version>4.15.64-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>io.nosqlbench</groupId>
<artifactId>driver-dsegraph-shaded</artifactId>
<version>4.15.58-SNAPSHOT</version>
<version>4.15.64-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>io.nosqlbench</groupId>
<artifactId>driver-cql-shaded</artifactId>
<version>4.15.58-SNAPSHOT</version>
<version>4.15.64-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>io.nosqlbench</groupId>
<artifactId>driver-cqld3-shaded</artifactId>
<version>4.15.58-SNAPSHOT</version>
<version>4.15.64-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>io.nosqlbench</groupId>
<artifactId>driver-cqlverify</artifactId>
<version>4.15.58-SNAPSHOT</version>
<version>4.15.64-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>io.nosqlbench</groupId>
<artifactId>driver-mongodb</artifactId>
<version>4.15.58-SNAPSHOT</version>
<version>4.15.64-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>io.nosqlbench</groupId>
<artifactId>driver-pulsar</artifactId>
<version>4.15.58-SNAPSHOT</version>
<version>4.15.64-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>io.nosqlbench</groupId>
<artifactId>driver-cockroachdb</artifactId>
<version>4.15.58-SNAPSHOT</version>
<version>4.15.64-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>io.nosqlbench</groupId>
<artifactId>driver-jms</artifactId>
<version>4.15.58-SNAPSHOT</version>
<version>4.15.64-SNAPSHOT</version>
</dependency>
</dependencies>
@ -231,7 +231,7 @@
<dependency>
<groupId>io.nosqlbench</groupId>
<artifactId>driver-dsegraph-shaded</artifactId>
<version>4.15.58-SNAPSHOT</version>
<version>4.15.64-SNAPSHOT</version>
</dependency>
</dependencies>
</profile>
@ -244,12 +244,12 @@
<dependency>
<groupId>io.nosqlbench</groupId>
<artifactId>driver-cql-shaded</artifactId>
<version>4.15.58-SNAPSHOT</version>
<version>4.15.64-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>io.nosqlbench</groupId>
<artifactId>driver-cqlverify</artifactId>
<version>4.15.58-SNAPSHOT</version>
<version>4.15.64-SNAPSHOT</version>
</dependency>
</dependencies>
@ -264,7 +264,7 @@
<dependency>
<groupId>io.nosqlbench</groupId>
<artifactId>driver-cqld4</artifactId>
<version>4.15.58-SNAPSHOT</version>
<version>4.15.64-SNAPSHOT</version>
</dependency>
</dependencies>
</profile>
@ -277,7 +277,7 @@
<dependency>
<groupId>io.nosqlbench</groupId>
<artifactId>driver-mongodb</artifactId>
<version>4.15.58-SNAPSHOT</version>
<version>4.15.64-SNAPSHOT</version>
</dependency>
</dependencies>
</profile>

View File

@ -0,0 +1,2 @@
// Just an example
var content = files.read("somefile.txt");

View File

@ -0,0 +1,4 @@
// Just an example
var result= globalvars.put("result","OK")
var result = globalvars.get("result");
print("result="+result);

View File

@ -0,0 +1 @@
var response = http.get("http://example.google.com")

View File

@ -0,0 +1,3 @@
// This requires active credentials, so it is disabled by default.
// This still serves as an example
// s3.uploadDirToUrl("testdata","s3://nb-extension-test/testdata1");

View File

@ -0,0 +1,3 @@
shutdown.addShutdownHook('testfunc', function f() {
print("shutdown hook running");
});

View File

@ -5,7 +5,7 @@
<parent>
<artifactId>mvn-defaults</artifactId>
<groupId>io.nosqlbench</groupId>
<version>4.15.58-SNAPSHOT</version>
<version>4.15.64-SNAPSHOT</version>
<relativePath>../mvn-defaults</relativePath>
</parent>
@ -24,37 +24,37 @@
<dependency>
<groupId>io.nosqlbench</groupId>
<artifactId>engine-rest</artifactId>
<version>4.15.58-SNAPSHOT</version>
<version>4.15.64-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>io.nosqlbench</groupId>
<artifactId>engine-cli</artifactId>
<version>4.15.58-SNAPSHOT</version>
<version>4.15.64-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>io.nosqlbench</groupId>
<artifactId>engine-docs</artifactId>
<version>4.15.58-SNAPSHOT</version>
<version>4.15.64-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>io.nosqlbench</groupId>
<artifactId>engine-core</artifactId>
<version>4.15.58-SNAPSHOT</version>
<version>4.15.64-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>io.nosqlbench</groupId>
<artifactId>engine-extensions</artifactId>
<version>4.15.58-SNAPSHOT</version>
<version>4.15.64-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>io.nosqlbench</groupId>
<artifactId>driver-diag</artifactId>
<version>4.15.58-SNAPSHOT</version>
<version>4.15.64-SNAPSHOT</version>
</dependency>
</dependencies>

View File

@ -237,6 +237,14 @@ public class AsyncScriptIntegrationTests {
assertThat(scenarioResult.getIOLog()).contains("count: ");
}
@Test
public void testShutdownHook() {
ScenarioResult scenarioResult = runScenario("extension_shutdown_hook");
assertThat(scenarioResult.getIOLog()).doesNotContain("shutdown hook running").describedAs(
"shutdown hooks should not run in the same IO context as the main scenario"
);
}
@Test
public void testExceptionPropagationFromMotorThread() {
ScenarioResult scenarioResult = runScenario("activityerror");

View File

@ -7,7 +7,7 @@
<parent>
<artifactId>mvn-defaults</artifactId>
<groupId>io.nosqlbench</groupId>
<version>4.15.58-SNAPSHOT</version>
<version>4.15.64-SNAPSHOT</version>
<relativePath>mvn-defaults</relativePath>
</parent>

View File

@ -7,7 +7,7 @@
<parent>
<groupId>io.nosqlbench</groupId>
<artifactId>mvn-defaults</artifactId>
<version>4.15.58-SNAPSHOT</version>
<version>4.15.64-SNAPSHOT</version>
<relativePath>../mvn-defaults</relativePath>
</parent>
@ -23,14 +23,14 @@
<dependency>
<groupId>io.nosqlbench</groupId>
<version>4.15.58-SNAPSHOT</version>
<version>4.15.64-SNAPSHOT</version>
<artifactId>nb-api</artifactId>
</dependency>
<dependency>
<groupId>io.nosqlbench</groupId>
<artifactId>virtdata-lang</artifactId>
<version>4.15.58-SNAPSHOT</version>
<version>4.15.64-SNAPSHOT</version>
</dependency>

View File

@ -7,7 +7,7 @@
<parent>
<artifactId>mvn-defaults</artifactId>
<groupId>io.nosqlbench</groupId>
<version>4.15.58-SNAPSHOT</version>
<version>4.15.64-SNAPSHOT</version>
<relativePath>../mvn-defaults</relativePath>
</parent>

View File

@ -7,7 +7,7 @@
<parent>
<artifactId>mvn-defaults</artifactId>
<groupId>io.nosqlbench</groupId>
<version>4.15.58-SNAPSHOT</version>
<version>4.15.64-SNAPSHOT</version>
<relativePath>../mvn-defaults</relativePath>
</parent>
@ -20,7 +20,7 @@
<dependency>
<groupId>io.nosqlbench</groupId>
<artifactId>virtdata-api</artifactId>
<version>4.15.58-SNAPSHOT</version>
<version>4.15.64-SNAPSHOT</version>
</dependency>
<dependency>

Some files were not shown because too many files have changed in this diff Show More