Merge remote-tracking branch 'upstream/main' into main

This commit is contained in:
Yabin Meng 2021-03-01 16:05:17 -06:00
commit 305a6046b3
69 changed files with 1001 additions and 237 deletions

View File

@ -18,8 +18,7 @@ Some quick how-to docs have been written for some of the subject-matter
areas in NoSQLBench. If you need an onramp that is not listed, here let us
know!
[I am a developer and I want to contribute a driver.]
(devdocs/devguide/drivers/README.md)
[I am a developer and I want to contribute a driver.](devdocs/devguide/drivers/README.md)
[I am a user and I want to improve the documentation.](devdocs/devguide/nb_docs.md)

View File

@ -88,11 +88,15 @@ available, but more work is needed to support them fully. Here is what is suppor
## Thanks
[![DataStax Logo](https://www.datastax.com/sites/default/files/content/graphics/logo/DS-logo-2019_1-25percent.png)](http://datastax.com/)
<table cellspacing="0" cellpadding="0" align="left">
<tr>
<td><a href="https://datastax.com" target="_blank"><img src="https://www.datastax.com/sites/default/files/2020-12/datastax-logotype-positive.png" alt="DataStax" width="250"/></a></td>
</tr>
</table>
This project is sponsored by [DataStax](http://datastax.com/) -- The always-on, active everywhere, distributed hybrid
cloud database built on Apache Cassandra™, and designed from the ground up to run anywhere, on any cloud, in any
datacenter, and in every possible combination. DataStax delivers the ultimate hybrid and multi-cloud database.
This project is sponsored by [DataStax](https://www.datastax.com) -- The Open, Multi-Cloud Stack for Modern Data Apps built on
Apache Cassandra™, Kubernetes *Based*, Developer *Ready* & Cloud *Delivered* and designed from the ground up to run anywhere,
on any cloud, in any datacenter, and in every possible combination. DataStax delivers the ultimate hybrid and multi-cloud database.
![YourKit Logo](https://www.yourkit.com/images/yklogo.png)

View File

@ -1,12 +1,21 @@
- c278b19f (HEAD -> main) fix nosqlbench-265 NPE in http status code checking
- 90d408da minor version updates
- e539cd49 make deprecated synonyms less ominous
- 11f35fdf doc updates
- 35b49a46 start metrics short format
- eaf30ad9 nosqlbench-266 cql verify should allow multiple verification statements
- 9a974bac reduce console chatter
- ec2a8c55 allow nbweb to use include-apps selector
- aaeecb38 fix fileaccess plugin structure
- dacbb372 delegate plugin names to service selectors
- 6e4eff15 cleanup service loader code
- 7e776d5f allow HOF form of HashedFileExtract with sizefunc parameter
- 41b10016 add starting point for incremental test
- 1607eaf3 revert parser fix, a better solution is needed
- b76c7234 Adding initial http driver baseline activities for use with Stargate.
- a987a4f2 - Minor update of pulsar.md file
- 15cf07ae - Minor update of pulsar.md file
- feb430ff - Minor update of pulsar.md file
- ecf6fa17 - Update pulsar.md - Code changes based on review comments from Shooky
- f78a9e92 - Update pulsar.md - Code changes based on review comments from Shooky
- 0ff6e8a6 disable broken test
- 3ea80fbf initial incremental
- df6b3d3b allow reasonable characters in property names
- c3910d69 add hash range scaled with scale factor parameter
- 3746037b add warning for null value in named scenarios
- a1c39f7d sanity warning for statement and statements
- dba0f94c opdef named maps
- 30be470b rename StmtDef to OpDef
- 6ba95e7f test fixes
- f4076dc5 formatting & typos
- ddc7544b Pulsar driver for producer with schema support - primitive types and Avro schema
- 9f234a8d fix broken link in docs
- 2c0daf77 Update logo

View File

@ -9,7 +9,7 @@
<parent>
<artifactId>mvn-defaults</artifactId>
<groupId>io.nosqlbench</groupId>
<version>4.15.18-SNAPSHOT</version>
<version>4.15.19-SNAPSHOT</version>
<relativePath>../mvn-defaults</relativePath>
</parent>
@ -18,7 +18,7 @@
<dependency>
<groupId>io.nosqlbench</groupId>
<artifactId>nb-api</artifactId>
<version>4.15.18-SNAPSHOT</version>
<version>4.15.19-SNAPSHOT</version>
</dependency>
<dependency>
@ -98,7 +98,7 @@
<dependency>
<groupId>io.nosqlbench</groupId>
<artifactId>virtdata-api</artifactId>
<version>4.15.18-SNAPSHOT</version>
<version>4.15.19-SNAPSHOT</version>
</dependency>
</dependencies>

View File

@ -5,7 +5,7 @@
<parent>
<groupId>io.nosqlbench</groupId>
<artifactId>mvn-defaults</artifactId>
<version>4.15.18-SNAPSHOT</version>
<version>4.15.19-SNAPSHOT</version>
<relativePath>../mvn-defaults</relativePath>
</parent>
@ -21,7 +21,7 @@
<dependency>
<groupId>io.nosqlbench</groupId>
<artifactId>driver-jdbc</artifactId>
<version>4.15.18-SNAPSHOT</version>
<version>4.15.19-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>org.postgresql</groupId>

View File

@ -4,7 +4,7 @@
<parent>
<groupId>io.nosqlbench</groupId>
<artifactId>mvn-defaults</artifactId>
<version>4.15.18-SNAPSHOT</version>
<version>4.15.19-SNAPSHOT</version>
<relativePath>../mvn-defaults</relativePath>
</parent>
@ -23,13 +23,13 @@
<dependency>
<groupId>io.nosqlbench</groupId>
<artifactId>engine-api</artifactId>
<version>4.15.18-SNAPSHOT</version>
<version>4.15.19-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>io.nosqlbench</groupId>
<artifactId>drivers-api</artifactId>
<version>4.15.18-SNAPSHOT</version>
<version>4.15.19-SNAPSHOT</version>
</dependency>
<dependency>

View File

@ -2,7 +2,7 @@ package io.nosqlbench.activitytype.cql.core;
import com.datastax.driver.core.*;
import io.nosqlbench.engine.api.activityconfig.ParsedStmt;
import io.nosqlbench.engine.api.activityconfig.yaml.StmtDef;
import io.nosqlbench.engine.api.activityconfig.yaml.OpDef;
import java.math.BigDecimal;
import java.net.InetAddress;
@ -129,13 +129,13 @@ public class CQLBindHelper {
}
}
public static Map<String, String> parseAndGetSpecificBindings(StmtDef stmtDef, ParsedStmt parsed) {
public static Map<String, String> parseAndGetSpecificBindings(OpDef opDef, ParsedStmt parsed) {
List<String> spans = new ArrayList<>();
String statement = stmtDef.getStmt();
String statement = opDef.getStmt();
Set<String> extraBindings = new HashSet<>();
extraBindings.addAll(stmtDef.getBindings().keySet());
extraBindings.addAll(opDef.getBindings().keySet());
Map<String, String> specificBindings = new LinkedHashMap<>();
Matcher m = stmtToken.matcher(statement);
@ -153,9 +153,9 @@ public class CQLBindHelper {
if (extraBindings.contains(tokenName)) {
if (specificBindings.get(tokenName) != null){
String postfix = UUID.randomUUID().toString();
specificBindings.put(tokenName+postfix, stmtDef.getBindings().get(tokenName));
specificBindings.put(tokenName + postfix, opDef.getBindings().get(tokenName));
}else {
specificBindings.put(tokenName, stmtDef.getBindings().get(tokenName));
specificBindings.put(tokenName, opDef.getBindings().get(tokenName));
}
}
}

View File

@ -0,0 +1,120 @@
description:
This is a workload which creates an incrementally growing dataset over cycles.
Rows will be added incrementally in both rampup and main phases. However, during
the main phase, reads will also occur at the same rate, with the read patterns
selecting from the size of data written up to that point.
In order to ensure that the reads and writes operate against the same set of
identifiers, it is crucial that the ratios are not adjusted unless the binding
recipes are adjusted to match. With write:read ratio of 1:1 and a prefix function
Div(2L) at the front of the main phase bindings, the writes and reads will address
the same rows rather than playing leap-frog on the cycle values.
The main phase can be run without the rampup phase for this workload, as long
as your test is defined as an incremental write and read test. If you need
background data pre-loaded to ensure realistic read times against pre-indexed
data, then you may use the rampup phase before the main phase. However, be aware
that these are simply different test definitions, and are both valid in different ways.
Due to how this workload is meant to be used, you must specify main-cycles= when
invoking the main phase.
The cycles value for the main test includes operations for both writes and reads,
thus the logical number of rows in the dataset will be effectively half of that.
This workload is intended to be run with a sufficiently high number of cycles.
Two key details should be obvious in the read latency metrics -- 1) the relationship
between dataset size, request rate, and response times and 2) inflection points
between any hot and cold access modes for LRU or other caching mechanisms as
the primary cache layer is saturated.
scenarios:
default:
schema: run tags=phase:schema threads==1
# rampup: run tags=phase:rampup cycles===TEMPLATE(rampup-cycles,100000) threads=auto
main: run tags=phase:main cycles===TEMPLATE(main-cycles,0) threads=auto
default-schema: run tags=phase:schema threads==1
# default-rampup: run tags=phase:rampup cycles===TEMPLATE(rampup-cycles,100000) threads=auto
default-main: run tags=phase:main cycles===TEMPLATE(main-cycles,0) threads=auto
astra:
schema: run tags=phase:astra-schema threads==1
# rampup: run tags=phase:rampup cycles===TEMPLATE(rampup-cycles,0) threads=auto
main: run tags=phase:main cycles===TEMPLATE(main-cycles,0) threads=auto
params:
instrument: true
bindings:
seq_key: ToString()
rampup_value: Hash(); ToString();
read_key: Div(2L); HashRangeScaled(TEMPLATE(scalefactor,1.0d)); ToString();
read_value: Div(2L); HashRangeScaled(TEMPLATE(scalefactor,1.0d)); Hash(); ToString();
write_key: Div(2L); Hash(); HashRangeScaled(TEMPLATE(scalefactor,1.0d)); ToString();
write_value: Div(2L); Hash(); HashRangeScaled(TEMPLATE(scalefactor,1.0d)); Hash(); ToString();
blocks:
- name: schema
tags:
phase: schema
statements:
- create-keyspace: |
create keyspace if not exists TEMPLATE(keyspace,baselines)
WITH replication = {'class': 'SimpleStrategy', 'replication_factor': 'TEMPLATE(rf,1)'}
AND durable_writes = true;
tags:
name: create-keyspace
- create-table: |
create table if not exists TEMPLATE(keyspace,baselines).TEMPLATE(table,incremental) (
key text,
value text,
PRIMARY KEY (key)
);
- name: schema-astra
tags:
phase: schema-astra
params:
prepared: false
statements:
- create-table: |
create table if not exists TEMPLATE(keyspace,baselines).TEMPLATE(table,incremental) (
key text,
value text,
PRIMARY KEY (key)
);
tags:
name: create-table-astra
- name: rampup
tags:
phase: rampup
params:
cl: TEMPLATE(write_cl,LOCAL_QUORUM)
statements:
- rampup-insert: |
insert into TEMPLATE(keyspace,baselines).TEMPLATE(table,incremental)
(key, value)
values ({rampup_key},{rampup_value});
tags:
name: rampup-insert
- name: main-read
tags:
phase: main
type: read
params:
ratio: 1
cl: TEMPLATE(read_cl,LOCAL_QUORUM)
statements:
- main-select: |
select * from TEMPLATE(keyspace,baselines).TEMPLATE(table,incremental) where key={read_key};
tags:
name: main-select
- name: main-write
tags:
phase: main
type: write
params:
ratio: 1
cl: TEMPLATE(write_cl,LOCAL_QUORUM)
statements:
- main-insert: |
insert into TEMPLATE(keyspace,baselines).TEMPLATE(table,incremental)
(key, value) values ({write_key}, {write_value});
tags:
name: main-insert

View File

@ -4,7 +4,7 @@
<parent>
<groupId>io.nosqlbench</groupId>
<artifactId>mvn-defaults</artifactId>
<version>4.15.18-SNAPSHOT</version>
<version>4.15.19-SNAPSHOT</version>
<relativePath>../mvn-defaults</relativePath>
</parent>
@ -24,13 +24,13 @@
<dependency>
<groupId>io.nosqlbench</groupId>
<artifactId>driver-cql-shaded</artifactId>
<version>4.15.18-SNAPSHOT</version>
<version>4.15.19-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>io.nosqlbench</groupId>
<artifactId>drivers-api</artifactId>
<version>4.15.18-SNAPSHOT</version>
<version>4.15.19-SNAPSHOT</version>
</dependency>
</dependencies>

View File

@ -5,7 +5,7 @@
<parent>
<artifactId>mvn-defaults</artifactId>
<groupId>io.nosqlbench</groupId>
<version>4.15.18-SNAPSHOT</version>
<version>4.15.19-SNAPSHOT</version>
<relativePath>../mvn-defaults</relativePath>
</parent>
@ -20,13 +20,13 @@
<dependency>
<groupId>io.nosqlbench</groupId>
<artifactId>engine-api</artifactId>
<version>4.15.18-SNAPSHOT</version>
<version>4.15.19-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>io.nosqlbench</groupId>
<artifactId>drivers-api</artifactId>
<version>4.15.18-SNAPSHOT</version>
<version>4.15.19-SNAPSHOT</version>
</dependency>
</dependencies>

View File

@ -4,7 +4,7 @@
<parent>
<groupId>io.nosqlbench</groupId>
<artifactId>mvn-defaults</artifactId>
<version>4.15.18-SNAPSHOT</version>
<version>4.15.19-SNAPSHOT</version>
<relativePath>../mvn-defaults</relativePath>
</parent>
@ -23,13 +23,13 @@
<dependency>
<groupId>io.nosqlbench</groupId>
<artifactId>engine-api</artifactId>
<version>4.15.18-SNAPSHOT</version>
<version>4.15.19-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>io.nosqlbench</groupId>
<artifactId>drivers-api</artifactId>
<version>4.15.18-SNAPSHOT</version>
<version>4.15.19-SNAPSHOT</version>
</dependency>
<dependency>

View File

@ -4,7 +4,7 @@
<parent>
<artifactId>mvn-defaults</artifactId>
<groupId>io.nosqlbench</groupId>
<version>4.15.18-SNAPSHOT</version>
<version>4.15.19-SNAPSHOT</version>
<relativePath>../mvn-defaults</relativePath>
</parent>
@ -22,13 +22,13 @@
<dependency>
<groupId>io.nosqlbench</groupId>
<artifactId>engine-api</artifactId>
<version>4.15.18-SNAPSHOT</version>
<version>4.15.19-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>io.nosqlbench</groupId>
<artifactId>drivers-api</artifactId>
<version>4.15.18-SNAPSHOT</version>
<version>4.15.19-SNAPSHOT</version>
</dependency>
</dependencies>

View File

@ -0,0 +1,117 @@
# nb -v run driver=http yaml=http-iot tags=phase:schema stargate_host=my_stargate_host
description: |
This workload emulates a time-series data model and access patterns.
This should be identical to the cql variant except for:
- We can't specify the write timestamp to make the write idempotent like we can with cql.
- The `time` binding has to have a StringDateWrapper to get the exact format that the REST API needs; See https://github.com/stargate/stargate/issues/532.
- We need to URLEncode the `data` binding because newlines can't be sent in REST calls.
- Schema creation is cql of the lack of being able to define compaction strategy in the REST API.
- There is no instrumentation with the http driver.
- There is no async mode with the http driver.
scenarios:
default:
- run driver=cql tags==phase:schema threads==1 cycles==UNDEF
- run driver=http tags==phase:rampup cycles===TEMPLATE(rampup-cycles,10000000) threads=auto
- run driver=http tags==phase:main cycles===TEMPLATE(main-cycles,10000000) threads=auto
bindings:
# http request id
request_id: ToHashedUUID(); ToString();
machine_id: Mod(<<sources:10000>>); ToHashedUUID() -> java.util.UUID
sensor_name: HashedLineToString('data/variable_words.txt')
time: Mul(<<timespeed:100>>L); Div(<<sources:10000>>L); StringDateWrapper("yyyy-MM-dd'T'hh:mm:ss'Z");
sensor_value: Normal(0.0,5.0); Add(100.0) -> double
station_id: Div(<<sources:10000>>);Mod(<<stations:100>>); ToHashedUUID() -> java.util.UUID
data: HashedFileExtractToString('data/lorem_ipsum_full.txt',800,1200); URLEncode();
blocks:
- tags:
phase: schema
params:
prepared: false
statements:
- create-keyspace: |
create keyspace if not exists <<keyspace:baselines>>
WITH replication = {'class': 'SimpleStrategy', 'replication_factor': '<<rf:1>>'}
AND durable_writes = true;
tags:
name: create-keyspace
- create-table : |
create table if not exists <<keyspace:baselines>>.<<table:iot>> (
machine_id UUID, // source machine
sensor_name text, // sensor name
time timestamp, // timestamp of collection
sensor_value double, //
station_id UUID, // source location
data text,
PRIMARY KEY ((machine_id, sensor_name), time)
) WITH CLUSTERING ORDER BY (time DESC)
AND compression = { 'sstable_compression' : '<<compression:LZ4Compressor>>' }
AND compaction = {
'class': 'TimeWindowCompactionStrategy',
'compaction_window_size': <<expiry_minutes:60>>,
'compaction_window_unit': 'MINUTES'
};
tags:
name: create-table
- truncate-table: |
truncate table <<keyspace:baselines>>.<<table:iot>>;
tags:
name: truncate-table
- name: rampup
tags:
phase: rampup
statements:
- rampup-insert: POST http://<<stargate_host:stargate>>:<<stargate_port:8082>>/v2/keyspaces/<<keyspace:baselines>>/<<table:iot>>
Accept: "application/json"
X-Cassandra-Request-Id: "{request_id}"
X-Cassandra-Token: "<<auth_token:my_auth_token>>"
Content-Type: "application/json"
body: |
{
"machine_id": "{machine_id}",
"sensor_name": "{sensor_name}",
"time": "{time}",
"sensor_value": "{sensor_value}",
"station_id": "{station_id}",
"data": "{data}"
}
tags:
name: rampup-insert
- name: main-read
tags:
phase: main
type: read
params:
ratio: <<read_ratio:1>>
statements:
- main-select: GET /v2/keyspaces/<<keyspace:baselines>>/<<table:iot>>?where=E[[{"machine_id":{"$eq":"{machine_id}"}},"sensor_name":{"$eq":"{sensor_name}"},"page-size":{"$eq":"<<limit:10>>"}}]]
Accept: "application/json"
X-Cassandra-Request-Id: "{request_id}"
X-Cassandra-Token: "<<auth_token:my_auth_token>>"
Content-Type: "application/json"
tags:
name: main-select
- name: main-write
tags:
phase: main
type: write
params:
ratio: <<write_ratio:9>>
statements:
- main-write: POST http://<<stargate_host:stargate>>:<<stargate_port:8082>>/v2/keyspaces/<<keyspace:baselines>>/<<table:iot>>
Accept: "application/json"
X-Cassandra-Request-Id: "{request_id}"
X-Cassandra-Token: "<<auth_token:my_auth_token>>"
Content-Type: "application/json"
body: |
{
"machine_id": "{machine_id}"
"sensor_name": "{sensor_name}",
"time": "{time}",
"sensor_value": "{sensor_value}",
"station_id": "{station_id}",
"data": "{data}"
}
tags:
name: main-write

View File

@ -0,0 +1,112 @@
# nb -v run driver=http yaml=http-keyvalue tags=phase:schema stargate_host=my_stargate_host
description: |
This workload emulates a key-value data model and access patterns.
This should be identical to the cql variant except for:
- There is no instrumentation with the http driver.
- There is no async mode with the http driver.
scenarios:
default:
- run driver=http tags==phase:schema threads==1 cycles==UNDEF
- run driver=http tags==phase:rampup cycles===TEMPLATE(rampup-cycles,10000000) threads=auto
- run driver=http tags==phase:main cycles===TEMPLATE(main-cycles,10000000) threads=auto
bindings:
# http request id
request_id: ToHashedUUID(); ToString();
seq_key: Mod(<<keycount:1000000000>>); ToString() -> String
seq_value: Hash(); Mod(<<valuecount:1000000000>>); ToString() -> String
rw_key: <<keydist:Uniform(0,1000000000)->int>>; ToString() -> String
rw_value: Hash(); <<valdist:Uniform(0,1000000000)->int>>; ToString() -> String
blocks:
- name: schema
tags:
phase: schema
statements:
- create-keyspace: POST http://<<stargate_host:stargate>>:<<stargate_port:8082>>/v2/schemas/keyspaces
Accept: "application/json"
X-Cassandra-Request-Id: "{request_id}"
X-Cassandra-Token: "<<auth_token:my_auth_token>>"
Content-Type: "application/json"
body: |
{
"name": "<<keyspace:baselines>>"
}
tags:
name: create-keyspace
- create-table: POST http://<<stargate_host:stargate>>:<<stargate_port:8082>>/v2/schemas/keyspaces/<<keyspace:baselines>>/tables
Accept: "application/json"
X-Cassandra-Request-Id: "{request_id}"
X-Cassandra-Token: "<<auth_token:my_auth_token>>"
Content-Type: "application/json"
body: |
{
"name": "<<table:keyvalue>>",
"columnDefinitions":
[
{
"name": "key",
"typeDefinition": "text"
},
{
"name": "value",
"typeDefinition": "text"
}
],
"primaryKey":
{
"partitionKey": ["key"]
}
}
tags:
name: create-table
- name: rampup
tags:
phase: rampup
statements:
- rampup-insert: POST http://<<stargate_host:stargate>>:<<stargate_port:8082>>/v2/keyspaces/<<keyspace:baselines>>/<<table:keyvalue>>
Accept: "application/json"
X-Cassandra-Request-Id: "{request_id}"
X-Cassandra-Token: "<<auth_token:my_auth_token>>"
Content-Type: "application/json"
body: |
{
"key": "{seq_key}",
"value": "{seq_value}"
}
tags:
name: rampup-insert
- name: main-read
tags:
phase: main
type: read
params:
ratio: 5
statements:
- main-select: GET /v2/keyspaces/<<keyspace:baselines>>/<<table:keyvalue>>?where=E[[{"key":{"$eq":"{rw_key}"}}}]]
Accept: "application/json"
X-Cassandra-Request-Id: "{request_id}"
X-Cassandra-Token: "<<auth_token:my_auth_token>>"
Content-Type: "application/json"
tags:
name: main-select
- name: main-write
tags:
phase: main
type: write
params:
ratio: 5
statements:
- main-write: POST http://<<stargate_host:stargate>>:<<stargate_port:8082>>/v2/keyspaces/<<keyspace:baselines>>/<<table:keyvalue>>
Accept: "application/json"
X-Cassandra-Request-Id: "{request_id}"
X-Cassandra-Token: "<<auth_token:my_auth_token>>"
Content-Type: "application/json"
body: |
{
"key": "{rw_key}"
"value": "{rw_value}"
}
tags:
name: main-write

View File

@ -0,0 +1,127 @@
# nb -v http-tabular rampup-cycles=1E6 main-cycles=1E9 stargate_host=my_stargate_host
description: |
This workload emulates a time-series data model and access patterns.
This should be identical to the cql variant except for:
- We need to URLEncode the `data` and `data_write` bindings because newlines can't be sent in REST calls.
- There is no instrumentation with the http driver.
- There is no async mode with the http driver.
scenarios:
default:
- run driver=http tags==phase:schema threads==1 cycles==UNDEF
- run driver=http tags==phase:rampup cycles===TEMPLATE(rampup-cycles,10000000) threads=auto
- run driver=http tags==phase:main cycles===TEMPLATE(main-cycles,10000000) threads=auto
bindings:
# http request id
request_id: ToHashedUUID(); ToString();
# for ramp-up and verify
part_layout: Div(<<partsize:1000000>>); ToString() -> String
clust_layout: Mod(<<partsize:1000000>>); ToString() -> String
data: HashedFileExtractToString('data/lorem_ipsum_full.txt',50,150); URLEncode();
# for read
limit: Uniform(1,10) -> int
part_read: Uniform(0,<<partcount:100>>)->int; ToString() -> String
clust_read: Add(1); Uniform(0,<<partsize:1000000>>)->int; ToString() -> String
# for write
part_write: Hash(); Uniform(0,<<partcount:100>>)->int; ToString() -> String
clust_write: Hash(); Add(1); Uniform(0,<<partsize:1000000>>)->int; ToString() -> String
data_write: Hash(); HashedFileExtractToString('data/lorem_ipsum_full.txt',50,150); URLEncode();
blocks:
- name: schema
tags:
phase: schema
statements:
- create-keyspace: POST http://<<stargate_host:stargate>>:<<stargate_port:8082>>/v2/schemas/keyspaces
Accept: "application/json"
X-Cassandra-Request-Id: "{request_id}"
X-Cassandra-Token: "<<auth_token:my_auth_token>>"
Content-Type: "application/json"
body: |
{
"name": "<<keyspace:baselines>>"
}
tags:
name: create-keyspace
- create-table: POST http://<<stargate_host:stargate>>:<<stargate_port:8082>>/v2/schemas/keyspaces/<<keyspace:baselines>>/tables
Accept: "application/json"
X-Cassandra-Request-Id: "{request_id}"
X-Cassandra-Token: "<<auth_token:my_auth_token>>"
Content-Type: "application/json"
body: |
{
"name": "<<table:tabular>>",
"columnDefinitions":
[
{
"name": "part",
"typeDefinition": "text"
},
{
"name": "clust",
"typeDefinition": "text"
},
{
"name": "data",
"typeDefinition": "text"
}
],
"primaryKey":
{
"partitionKey": ["part"],
"clusteringKey": ["clust"]
}
}
tags:
name: create-table
- name: rampup
tags:
phase: rampup
statements:
- rampup-insert: POST http://<<stargate_host:stargate>>:<<stargate_port:8082>>/v2/keyspaces/<<keyspace:baselines>>/<<table:tabular>>
Accept: "application/json"
X-Cassandra-Request-Id: "{request_id}"
X-Cassandra-Token: "<<auth_token:my_auth_token>>"
Content-Type: "application/json"
body: |
{
"part": "{part_layout}",
"clust": "{clust_layout}",
"data": "{data}"
}
tags:
name: rampup-insert
- name: main-read
tags:
phase: main
type: read
params:
ratio: 5
statements:
- main-select: GET /v2/keyspaces/<<keyspace:baselines>>/<<table:tabular>>?where=E[[{"part":{"$eq":"{part_layout}"}},"page-size":{"$eq":"{limit}"}}]]
Accept: "application/json"
X-Cassandra-Request-Id: "{request_id}"
X-Cassandra-Token: "<<auth_token:my_auth_token>>"
Content-Type: "application/json"
tags:
name: main-select
- name: main-write
tags:
phase: main
type: write
params:
ratio: 5
statements:
- main-write: POST http://<<stargate_host:stargate>>:<<stargate_port:8082>>/v2/keyspaces/<<keyspace:baselines>>/<<table:tabular>>
Accept: "application/json"
X-Cassandra-Request-Id: "{request_id}"
X-Cassandra-Token: "<<auth_token:my_auth_token>>"
Content-Type: "application/json"
body: |
{
"part": "{part_write}"
"clust": "{clust_write}",
"data": "{data_write}"
}
tags:
name: main-write

View File

@ -3,7 +3,7 @@
<parent>
<artifactId>nosqlbench</artifactId>
<groupId>io.nosqlbench</groupId>
<version>4.15.18-SNAPSHOT</version>
<version>4.15.19-SNAPSHOT</version>
</parent>
<modelVersion>4.0.0</modelVersion>
@ -18,7 +18,7 @@
<dependency>
<groupId>io.nosqlbench</groupId>
<artifactId>engine-api</artifactId>
<version>4.15.18-SNAPSHOT</version>
<version>4.15.19-SNAPSHOT</version>
<scope>compile</scope>
</dependency>
</dependencies>

View File

@ -5,7 +5,7 @@
<parent>
<artifactId>mvn-defaults</artifactId>
<groupId>io.nosqlbench</groupId>
<version>4.15.18-SNAPSHOT</version>
<version>4.15.19-SNAPSHOT</version>
<relativePath>../mvn-defaults</relativePath>
</parent>
@ -22,13 +22,13 @@
<dependency>
<groupId>io.nosqlbench</groupId>
<artifactId>drivers-api</artifactId>
<version>4.15.18-SNAPSHOT</version>
<version>4.15.19-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>io.nosqlbench</groupId>
<artifactId>engine-api</artifactId>
<version>4.15.18-SNAPSHOT</version>
<version>4.15.19-SNAPSHOT</version>
</dependency>
</dependencies>

View File

@ -4,7 +4,7 @@
<parent>
<artifactId>mvn-defaults</artifactId>
<groupId>io.nosqlbench</groupId>
<version>4.15.18-SNAPSHOT</version>
<version>4.15.19-SNAPSHOT</version>
<relativePath>../mvn-defaults</relativePath>
</parent>
@ -44,13 +44,13 @@
<dependency>
<groupId>io.nosqlbench</groupId>
<artifactId>engine-api</artifactId>
<version>4.15.18-SNAPSHOT</version>
<version>4.15.19-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>io.nosqlbench</groupId>
<artifactId>driver-stdout</artifactId>
<version>4.15.18-SNAPSHOT</version>
<version>4.15.19-SNAPSHOT</version>
</dependency>
<!-- <dependency>-->

View File

@ -7,7 +7,7 @@
<parent>
<artifactId>mvn-defaults</artifactId>
<groupId>io.nosqlbench</groupId>
<version>4.15.18-SNAPSHOT</version>
<version>4.15.19-SNAPSHOT</version>
<relativePath>../mvn-defaults</relativePath>
</parent>
@ -21,13 +21,13 @@
<dependency>
<groupId>io.nosqlbench</groupId>
<artifactId>engine-api</artifactId>
<version>4.15.18-SNAPSHOT</version>
<version>4.15.19-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>io.nosqlbench</groupId>
<artifactId>drivers-api</artifactId>
<version>4.15.18-SNAPSHOT</version>
<version>4.15.19-SNAPSHOT</version>
</dependency>
<dependency>

View File

@ -1,13 +1,5 @@
package io.nosqlbench.driver.mongodb;
import java.util.List;
import java.util.Objects;
import java.util.function.Function;
import io.nosqlbench.engine.api.activityconfig.yaml.OpTemplate;
import org.apache.logging.log4j.Logger;
import org.apache.logging.log4j.LogManager;
import com.codahale.metrics.Histogram;
import com.codahale.metrics.Timer;
import com.mongodb.ConnectionString;
@ -21,17 +13,23 @@ import io.nosqlbench.engine.api.activityapi.planning.SequencePlanner;
import io.nosqlbench.engine.api.activityapi.planning.SequencerType;
import io.nosqlbench.engine.api.activityconfig.ParsedStmt;
import io.nosqlbench.engine.api.activityconfig.StatementsLoader;
import io.nosqlbench.engine.api.activityconfig.yaml.StmtDef;
import io.nosqlbench.engine.api.activityconfig.yaml.OpTemplate;
import io.nosqlbench.engine.api.activityconfig.yaml.StmtsDocList;
import io.nosqlbench.engine.api.activityimpl.ActivityDef;
import io.nosqlbench.engine.api.activityimpl.SimpleActivity;
import io.nosqlbench.engine.api.metrics.ActivityMetrics;
import io.nosqlbench.engine.api.templating.StrInterpolator;
import io.nosqlbench.engine.api.util.TagFilter;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.bson.UuidRepresentation;
import org.bson.codecs.UuidCodec;
import org.bson.codecs.configuration.CodecRegistry;
import java.util.List;
import java.util.Objects;
import java.util.function.Function;
import static org.bson.codecs.configuration.CodecRegistries.fromCodecs;
import static org.bson.codecs.configuration.CodecRegistries.fromRegistries;

View File

@ -4,7 +4,7 @@
<parent>
<artifactId>mvn-defaults</artifactId>
<groupId>io.nosqlbench</groupId>
<version>4.15.18-SNAPSHOT</version>
<version>4.15.19-SNAPSHOT</version>
<relativePath>../mvn-defaults</relativePath>
</parent>
@ -42,13 +42,13 @@
<dependency>
<groupId>io.nosqlbench</groupId>
<artifactId>engine-api</artifactId>
<version>4.15.18-SNAPSHOT</version>
<version>4.15.19-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>io.nosqlbench</groupId>
<artifactId>driver-stdout</artifactId>
<version>4.15.18-SNAPSHOT</version>
<version>4.15.19-SNAPSHOT</version>
</dependency>
<!-- https://mvnrepository.com/artifact/commons-beanutils/commons-beanutils -->

View File

@ -7,7 +7,7 @@
<parent>
<artifactId>mvn-defaults</artifactId>
<groupId>io.nosqlbench</groupId>
<version>4.15.18-SNAPSHOT</version>
<version>4.15.19-SNAPSHOT</version>
<relativePath>../mvn-defaults</relativePath>
</parent>
@ -22,13 +22,13 @@
<dependency>
<groupId>io.nosqlbench</groupId>
<artifactId>engine-api</artifactId>
<version>4.15.18-SNAPSHOT</version>
<version>4.15.19-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>io.nosqlbench</groupId>
<artifactId>drivers-api</artifactId>
<version>4.15.18-SNAPSHOT</version>
<version>4.15.19-SNAPSHOT</version>
</dependency>
</dependencies>

View File

@ -7,7 +7,7 @@
<parent>
<artifactId>mvn-defaults</artifactId>
<groupId>io.nosqlbench</groupId>
<version>4.15.18-SNAPSHOT</version>
<version>4.15.19-SNAPSHOT</version>
<relativePath>../mvn-defaults</relativePath>
</parent>
@ -24,19 +24,19 @@
<dependency>
<groupId>io.nosqlbench</groupId>
<artifactId>engine-api</artifactId>
<version>4.15.18-SNAPSHOT</version>
<version>4.15.19-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>io.nosqlbench</groupId>
<artifactId>drivers-api</artifactId>
<version>4.15.18-SNAPSHOT</version>
<version>4.15.19-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>io.nosqlbench</groupId>
<artifactId>driver-stdout</artifactId>
<version>4.15.18-SNAPSHOT</version>
<version>4.15.19-SNAPSHOT</version>
</dependency>
</dependencies>

View File

@ -7,7 +7,7 @@
<parent>
<artifactId>mvn-defaults</artifactId>
<groupId>io.nosqlbench</groupId>
<version>4.15.18-SNAPSHOT</version>
<version>4.15.19-SNAPSHOT</version>
<relativePath>../mvn-defaults</relativePath>
</parent>
@ -22,13 +22,13 @@
<dependency>
<groupId>io.nosqlbench</groupId>
<artifactId>engine-api</artifactId>
<version>4.15.18-SNAPSHOT</version>
<version>4.15.19-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>io.nosqlbench</groupId>
<artifactId>drivers-api</artifactId>
<version>4.15.18-SNAPSHOT</version>
<version>4.15.19-SNAPSHOT</version>
</dependency>
<dependency>

View File

@ -5,7 +5,7 @@
<parent>
<artifactId>mvn-defaults</artifactId>
<groupId>io.nosqlbench</groupId>
<version>4.15.18-SNAPSHOT</version>
<version>4.15.19-SNAPSHOT</version>
<relativePath>../mvn-defaults</relativePath>
</parent>
@ -23,13 +23,13 @@
<dependency>
<groupId>io.nosqlbench</groupId>
<artifactId>nb-api</artifactId>
<version>4.15.18-SNAPSHOT</version>
<version>4.15.19-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>io.nosqlbench</groupId>
<artifactId>virtdata-userlibs</artifactId>
<version>4.15.18-SNAPSHOT</version>
<version>4.15.19-SNAPSHOT</version>
</dependency>
</dependencies>

View File

@ -5,7 +5,7 @@
<parent>
<artifactId>mvn-defaults</artifactId>
<groupId>io.nosqlbench</groupId>
<version>4.15.18-SNAPSHOT</version>
<version>4.15.19-SNAPSHOT</version>
<relativePath>../mvn-defaults</relativePath>
</parent>
@ -23,25 +23,25 @@
<dependency>
<groupId>io.nosqlbench</groupId>
<artifactId>nb-api</artifactId>
<version>4.15.18-SNAPSHOT</version>
<version>4.15.19-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>io.nosqlbench</groupId>
<artifactId>drivers-api</artifactId>
<version>4.15.18-SNAPSHOT</version>
<version>4.15.19-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>io.nosqlbench</groupId>
<artifactId>nb-annotations</artifactId>
<version>4.15.18-SNAPSHOT</version>
<version>4.15.19-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>io.nosqlbench</groupId>
<artifactId>virtdata-userlibs</artifactId>
<version>4.15.18-SNAPSHOT</version>
<version>4.15.19-SNAPSHOT</version>
</dependency>
<dependency>

View File

@ -17,7 +17,7 @@
package io.nosqlbench.engine.api.activityconfig;
import io.nosqlbench.engine.api.activityconfig.yaml.StmtDef;
import io.nosqlbench.engine.api.activityconfig.yaml.OpDef;
import io.nosqlbench.nb.api.config.params.NBParams;
import io.nosqlbench.nb.api.config.params.Element;
import io.nosqlbench.virtdata.core.templates.BindPoint;
@ -36,7 +36,7 @@ import java.util.regex.Pattern;
* and <pre>{anchor}</pre> anchoring conventions. This type also includes
* all of the properties from the enclosed StmtDef, in addition to a couple of
* helpers. It should allow programmers to project this type directly from an
* existing {@link StmtDef} as a substitute.
* existing {@link OpDef} as a substitute.
*/
public class ParsedStmt {
@ -44,17 +44,17 @@ public class ParsedStmt {
private final static Logger logger = LogManager.getLogger(ParsedStmt.class);
private ParsedTemplate template;
private final StmtDef stmtDef;
private final OpDef opDef;
private final ParsedTemplate parsed;
/**
* Construct a new ParsedStatement from the provided stmtDef and anchor token.
*
* @param stmtDef An existing statement def as read from the YAML API.
* @param opDef An existing statement def as read from the YAML API.
*/
public ParsedStmt(StmtDef stmtDef) {
this.stmtDef = stmtDef;
parsed = new ParsedTemplate(stmtDef.getStmt(), stmtDef.getBindings());
public ParsedStmt(OpDef opDef) {
this.opDef = opDef;
parsed = new ParsedTemplate(opDef.getStmt(), opDef.getBindings());
}
public ParsedStmt orError() {
@ -125,46 +125,46 @@ public class ParsedStmt {
}
/**
* @return the statement name from the enclosed {@link StmtDef}
* @return the statement name from the enclosed {@link OpDef}
*/
public String getName() {
return stmtDef.getName();
return opDef.getName();
}
/**
* @return the raw statement from the enclosed {@link StmtDef}
* @return the raw statement from the enclosed {@link OpDef}
*/
public String getStmt() {
return stmtDef.getStmt();
return opDef.getStmt();
}
/**
* @return the tags from the enclosed {@link StmtDef}
* @return the tags from the enclosed {@link OpDef}
*/
public Map<String, String> getTags() {
return stmtDef.getTags();
return opDef.getTags();
}
/**
* @return the bindings from the enclosed {@link StmtDef}
* @return the bindings from the enclosed {@link OpDef}
*/
public Map<String, String> getBindings() {
return stmtDef.getBindings();
return opDef.getBindings();
}
/**
* @return the params from the enclosed {@link StmtDef}
* @return the params from the enclosed {@link OpDef}
* @deprecated You should use {@link #getParamReader()} instead.
*/
public Map<String, Object> getParams() {
return stmtDef.getParams();
return opDef.getParams();
}
/**
* @return a params reader from the enclosed {@link StmtDef} params map
* @return a params reader from the enclosed {@link OpDef} params map
*/
public Element getParamReader() {
return NBParams.one(stmtDef.getParams());
return NBParams.one(opDef.getParams());
}
public List<BindPoint> getBindPoints() {

View File

@ -1,11 +1,13 @@
package io.nosqlbench.engine.api.activityconfig.rawyaml;
import io.nosqlbench.nb.api.errors.BasicError;
import java.util.*;
public class RawScenarios {
public static String STEPNAME = "%03d";
private Map<String, Map<String, String>> scenarios = new LinkedHashMap<>();
private final Map<String, Map<String, String>> scenarios = new LinkedHashMap<>();
public List<String> getScenarioNames() {
return new ArrayList<>(scenarios.keySet());
@ -20,6 +22,9 @@ public class RawScenarios {
for (Map.Entry<String, Object> namedEntry : rawNamedScenarios.entrySet()) {
String scenarioName = namedEntry.getKey();
Object scenarioObj = namedEntry.getValue();
if (scenarioObj == null) {
throw new BasicError("Unable to use a null value for scenario named " + scenarioName + " in yaml.");
}
if (scenarioObj instanceof CharSequence) {
scenarios.put(scenarioName, Map.of(String.format(STEPNAME, 1), scenarioObj.toString()));
} else if (scenarioObj instanceof List) {

View File

@ -33,7 +33,6 @@ public class RawStmtDef extends RawStmtFields {
this.statement = statement;
}
@SuppressWarnings("unchecked")
public RawStmtDef(String defaultName, Map<String, Object> map) {
@ -63,18 +62,20 @@ public class RawStmtDef extends RawStmtFields {
Map values = (Map) firstEntry.getValue();
setFieldsByReflection(values);
map = values;
} else if (firstEntry.getValue() instanceof CharSequence){
} else if (firstEntry.getValue() instanceof CharSequence) {
setStmt(((CharSequence) firstEntry.getValue()).toString());
}
map.remove(firstEntry.getKey());
if (getName().isEmpty()) {
map.remove(firstEntry.getKey());
setName(firstEntry.getKey());
} else {
throw new RuntimeException("redefined-name-in-statement-tuple: Statement name has already been set by name parameter. Remove the name parameter for a statement definition map." +
" For more details on this error see " +
"the troubleshooting section in the " +
"YAML format docs for redefined-name-statement-tuple");
}
// TODO: Add explicit check condition for this error
// else {
// throw new RuntimeException("redefined-name-in-statement-tuple: Statement name has already been set by name parameter. Remove the name parameter for a statement definition map." +
// " For more details on this error see " +
// "the troubleshooting section in the " +
// "YAML format docs for redefined-name-statement-tuple");
// }
}
if (getName().isEmpty()) {
setName(defaultName);
@ -90,7 +91,7 @@ public class RawStmtDef extends RawStmtFields {
private void setStmt(String statement) {
this.statement = statement;
}
public String getName() {
Object name = getParams().get("name");
if (name!=null) {

View File

@ -41,7 +41,7 @@ public class RawStmtsDoc extends StatementsOwner {
public static RawStmtsDoc forSingleStatement(String statement) {
RawStmtsDoc rawStmtsDoc = new RawStmtsDoc();
rawStmtsDoc.setStatementsFieldByObjectType(statement);
rawStmtsDoc.setStatementsFieldByType(statement);
return rawStmtsDoc;
}

View File

@ -35,15 +35,20 @@ public class StatementsOwner extends RawStmtFields {
}
public void setFieldsByReflection(Map<String, Object> propsmap) {
if (propsmap.containsKey("statement") && propsmap.containsKey("statements")) {
throw new RuntimeException("You can define either statement or statements, but not both.");
}
Object statementsObject = propsmap.remove("statements");
if (statementsObject==null) {
if (statementsObject == null) {
statementsObject = propsmap.remove("statement");
}
if (statementsObject!=null) {
setStatementsFieldByObjectType(statementsObject);
if (statementsObject != null) {
setStatementsFieldByType(statementsObject);
}
// if (statementsObject!=null) {
// if (statementsObject instanceof List) {
// setByObject(statementsObject);
@ -55,17 +60,17 @@ public class StatementsOwner extends RawStmtFields {
}
@SuppressWarnings("unchecked")
public void setStatementsFieldByObjectType(Object object) {
public void setStatementsFieldByType(Object object) {
if (object instanceof List) {
List<Object> stmtList = (List<Object>) object;
List<RawStmtDef> defs = new ArrayList<>(stmtList.size());
for (int i = 0; i < stmtList.size(); i++) {
String defaultName = "stmt"+(i+1);
String defaultName = "stmt" + (i + 1);
Object o = stmtList.get(i);
if (o instanceof String) {
defs.add(new RawStmtDef(defaultName,(String)o));
defs.add(new RawStmtDef(defaultName, (String) o));
} else if (o instanceof Map) {
defs.add(new RawStmtDef(defaultName,(Map<String,Object>)o));
defs.add(new RawStmtDef(defaultName, (Map<String, Object>) o));
} else {
throw new RuntimeException("Can not construct stmt def from object type:" + o.getClass());
}
@ -76,12 +81,16 @@ public class StatementsOwner extends RawStmtFields {
List<Map<String,Object>> itemizedMaps = new ArrayList<>();
for (Map.Entry<String, Object> entries : map.entrySet()) {
Object value = entries.getValue();
if (value instanceof Map) {
Map<String,Object> valueMap = ((Map<String,Object>)value);
valueMap.put("name", entries.getKey());
itemizedMaps.add(valueMap);
if (value instanceof LinkedHashMap) {
// reset order to favor naming first
LinkedHashMap<String, Object> vmap = (LinkedHashMap<String, Object>) value;
LinkedHashMap<String, Object> cp = new LinkedHashMap<>(vmap);
vmap.clear();
vmap.put("name", entries.getKey());
vmap.putAll(cp);
itemizedMaps.add(vmap);
} else if (value instanceof String) {
Map<String,Object> stmtDetails = new HashMap<>() {{
Map<String, Object> stmtDetails = new HashMap<>() {{
put("name", entries.getKey());
put("stmt", entries.getValue());
}};
@ -90,9 +99,9 @@ public class StatementsOwner extends RawStmtFields {
throw new RuntimeException("Unknown inner value type on map-based statement definition.");
}
}
setStatementsFieldByObjectType(itemizedMaps);
setStatementsFieldByType(itemizedMaps);
} else if (object instanceof String) {
setStatementsFieldByObjectType(Map.of("stmt1",(String)object));
setStatementsFieldByType(Map.of("stmt1", (String) object));
} else {
throw new RuntimeException("Unknown object type: " + object.getClass());
}

View File

@ -26,7 +26,7 @@ import java.util.Map;
import java.util.Objects;
import java.util.Optional;
public class StmtDef implements OpTemplate {
public class OpDef implements OpTemplate {
private final RawStmtDef rawStmtDef;
private final StmtsBlock block;
@ -34,7 +34,7 @@ public class StmtDef implements OpTemplate {
private final LinkedHashMap<String, String> bindings;
private final LinkedHashMap<String, String> tags;
public StmtDef(StmtsBlock block, RawStmtDef rawStmtDef) {
public OpDef(StmtsBlock block, RawStmtDef rawStmtDef) {
this.block = block;
this.rawStmtDef = rawStmtDef;
this.params = composeParams();

View File

@ -6,6 +6,120 @@ import io.nosqlbench.engine.api.util.Tagged;
import java.util.Map;
import java.util.Optional;
/**
* <p>The OpTemplate is the developer's view of the operational templates that users
* provide in YAML or some other structured format.</p>
*
* <H2>Terms</H2>
* Within this documentation, the word <i>OpTemplate</i> will refer to the template API and
* semantics. The word <i>user template</i> will refer to the configuration data as provided
* by a user.
*
* <p>OpTemplates are the native Java representation of the user templates that specify how to
* make an executable operation. OpTemplates are not created for each operation, but are used
* to create an mostly-baked intermediate form commonly known as a <i>ready op</i>.
* It is the intermediate form which is used to create an instance of an executable
* op in whichever way is the most appropriate and efficient for a given driver.</p>
*
* <p>This class serves as the canonical documentation and API for how user templates
* are mapped into a fully resolved OpTemplate. User-provided op templates can be
* any basic data structure, and are often provided as part of a YAML workload file.
* The description below will focus on structural rules rather than any particular
* encoding format. The types used are fairly universal and easy to map from one
* format to another.</p>
*
*
* <p>A long-form introduction to this format is included in the main NoSQLBench docs
* at <a href="http://docs.nosqlbench.io">docs.nosqlbench.io</a>
* under the <I>Designing Workloads</I> section.</p>
*
* <p>A few structural variations are allowed -- No specific form enforced. The reasons for this are:
* 1) It is generally obvious what as user wants to do from a given layout. 2) Data structure
* markup is generally frustrating and difficult to troubleshoot. 3) The conceptual domain of
* NB op construction is well-defined enough to avoid ambiguity.</p>
*
* <H2>Type Conventions</H2>
*
* For the purposes of simple interoperability, the types used at this interface boundary should
* be limited to common scalar types -- numbers and strings, and simple structures like maps and lists.
* The basic types defined for ECMAScript should eventually be supported, but no domain-specific
* objects which would require special encoding or decoding rules should be used.
*
* <H2>Standard Properties</H2>
*
* Each op template can have these standard properties:
* <UL>
* <LI>name - every op template has a name, even if it is auto generated for you. This is used to
* name errors in the log, to name metrics in telemetry, and so on.</LI>
* <LI>description - an optional description, defaulted to "".</LI>
* <LI>statement - An optional string value which represents an opaque form of the body of
* an op template</LI>
* <LI>params - A string-object map of zero or more named parameters, where the key is taken as the parameter
* name and the value is any simple object form as limited by type conventions above.
* <LI>bindings - A map of binding definitions, where the string key is taken as the anchor name, and the
* string value is taken as the binding recipe.</LI>
* <LI>tags - A map of tags, with string names and values</LI>
* </UL>
*
* The user-provided definition of an op template should capture a blueprint of an operation to be executed by
* a native driver. As such, you need either a statement or a set of params which can describe what
* specific type should be constructed. The rules on building an executable operation are not enforced
* by this API. Yet, responsible NB driver developers will clearly document what the rules
* are for specifying each specific type of operation supported by an NB driver with examples in YAML format.
*
* <H2>OpTemplate Construction Rules</H2>
*
* <p>The available structural forms follow a basic set of rules for constructing the OpTemplate in a consistent way.
* <OL>
* <LI>A collection of user-provided op templates is provided as a string, a list or a map.</LI>
* <LI>All maps are order-preserving, like {@link java.util.LinkedHashMap}</LI>
* <LI>For maps, the keys are taken as the names of the op template instances.</LI>
* <LI>The content of each op template can be provided as a string or as a map.</LI>
* <OL>
* <LI>If the op template entry is provided as a string, then the OpTemplate is constructed as having only a single
* <i>statement</i> property (in addition to defaults within scope).
* as provided by OpTemplate API.</LI>
* <LI>If the op template entry is provided as a map, then the OpTemplate is constructed as having all of the
* named properties defined in the standard properties above.
* Any entry in the template which is not a reserved word is assigned to the params map as a parameter, in whatever structured
* type is appropriate (scalar, lists, maps).</LI>
* </LI>
* </p>
* </OL>
*
* <H2>Example Forms</H2>
* The valid forms are shown below as examples.
*
* <H3>One String Statement</H3>
* <pre>{@code
* statement: statement
* }</pre>
*
* <H3>List of Templates</H3>
* <pre>{@code
* statements:
* - statement1
* - statement2
* }</pre>
*
* <H3>List of Maps</H3>
* <pre>{@code
* statements:
* - name: name1
* stmt: statement body
* params:
* p1: v1
* p2: v2
* }</pre>
*
* <H3>List Of Condensed Maps</H3>
* <pre>{@code
* statements:
* - name1: statement body
* p1: v1
* p2: v2
* }</pre>
*/
public interface OpTemplate extends Tagged {
String getName();
@ -30,11 +144,12 @@ public interface OpTemplate extends Tagged {
Optional<String> getOptionalStringParam(String name);
Map<String,String> getTags();
Map<String, String> getTags();
/**
* Parse the statement for anchors and return a richer view of the StmtDef which
* is simpler to use for most statement configuration needs.
*
* @return a new {@link ParsedStmt}
*/
ParsedStmt getParsed();

View File

@ -30,8 +30,8 @@ public class StmtsBlock implements Tagged, Iterable<OpTemplate> {
private final static String NameToken = "name";
private final static String StmtToken = "stmt";
private final RawStmtsBlock rawStmtsBlock;
private StmtsDoc rawStmtsDoc;
private int blockIdx;
private final StmtsDoc rawStmtsDoc;
private final int blockIdx;
public StmtsBlock(RawStmtsBlock rawStmtsBlock, StmtsDoc rawStmtsDoc, int blockIdx) {
@ -40,14 +40,14 @@ public class StmtsBlock implements Tagged, Iterable<OpTemplate> {
this.blockIdx = blockIdx;
}
public List<OpTemplate> getStmts() {
public List<OpTemplate> getOps() {
List<OpTemplate> rawOpTemplates = new ArrayList<>();
List<RawStmtDef> statements = rawStmtsBlock.getRawStmtDefs();
for (int i = 0; i < statements.size(); i++) {
rawOpTemplates.add(
new StmtDef(this, statements.get(i))
new OpDef(this, statements.get(i))
);
}
return rawOpTemplates;
@ -106,6 +106,6 @@ public class StmtsBlock implements Tagged, Iterable<OpTemplate> {
@Override
@NotNull
public Iterator<OpTemplate> iterator() {
return getStmts().iterator();
return getOps().iterator();
}
}
}

View File

@ -33,7 +33,7 @@ import java.util.stream.Collectors;
*/
public class StmtsDoc implements Tagged, Iterable<StmtsBlock> {
private RawStmtsDoc rawStmtsDoc;
private final RawStmtsDoc rawStmtsDoc;
public StmtsDoc(RawStmtsDoc rawStmtsDoc) {
this.rawStmtsDoc = rawStmtsDoc;
@ -91,7 +91,7 @@ public class StmtsDoc implements Tagged, Iterable<StmtsBlock> {
* including the inherited and overridden values from the this doc and the parent block.
*/
public List<OpTemplate> getStmts() {
return getBlocks().stream().flatMap(b -> b.getStmts().stream()).collect(Collectors.toList());
return getBlocks().stream().flatMap(b -> b.getOps().stream()).collect(Collectors.toList());
}
/**

View File

@ -188,7 +188,7 @@ public class NBCLIScenarioParser {
return sanitized;
}
private static final Pattern WordAndMaybeAssignment = Pattern.compile("(?<name>\\w+)((?<oper>=+)(?<val>.+))?");
private static final Pattern WordAndMaybeAssignment = Pattern.compile("(?<name>\\w[-_\\d\\w.]+)((?<oper>=+)(?<val>.+))?");
private static LinkedHashMap<String, CmdArg> parseStep(String cmd) {
LinkedHashMap<String, CmdArg> parsedStep = new LinkedHashMap<>();
@ -362,7 +362,6 @@ public class NBCLIScenarioParser {
.prefix(includes)
.extension("js")
.list().stream().map(Content::asPath).collect(Collectors.toList());
;
List<String> scriptNames = new ArrayList();
@ -386,9 +385,12 @@ public class NBCLIScenarioParser {
Matcher innerMatcher = innerTemplatePattern.matcher(match);
String[] matchArray = match.split(",");
// if (matchArray.length!=2) {
// throw new BasicError("TEMPLATE form must have two arguments separated by a comma, like 'TEMPLATE(a,b), not '" + match +"'");
// }
//TODO: support recursive matches
if (innerMatcher.find()) {
String[] innerMatch = innerMatcher.group(1).split(",");
String[] innerMatch = innerMatcher.group(1).split("[,:]");
//We want the outer name with the inner default value
templates.put(matchArray[0], innerMatch[1]);

View File

@ -95,7 +95,7 @@ public class CommandTemplate {
this.name = name;
Map<String, String> cmd = new HashMap<>();
// Only parse and inject the oneline form if it is defined.
// Only parse and inject the one-line form if it is defined.
// The first parser to match and return a map will be the last one tried.
// If none of the supplemental parsers work, the default params parser is used
if (oneline != null) {

View File

@ -31,9 +31,9 @@ import java.util.Map;
import static org.assertj.core.api.Assertions.assertThat;
public class StmtDefTest {
public class OpDefTest {
private final static Logger logger = LogManager.getLogger(StmtDefTest.class);
private final static Logger logger = LogManager.getLogger(OpDefTest.class);
@Test
public void testLayering() {
@ -71,7 +71,7 @@ public class StmtDefTest {
StmtsDoc doc1 = all.getStmtDocs().get(0);
StmtsBlock block1 = doc1.getBlocks().get(0);
assertThat(block1.getName()).isEqualTo("doc1--block0");
List<OpTemplate> assys = block1.getStmts();
List<OpTemplate> assys = block1.getOps();
assertThat(assys).hasSize(2);
OpTemplate sdef1 = assys.get(0);
assertThat(sdef1.getName()).isEqualTo("doc1--block0--stmt1");
@ -93,4 +93,57 @@ public class StmtDefTest {
assertThat(stmt1.getParams()).containsAllEntriesOf(Map.of("timeout", 23423, "foobar", "baz"));
}
@Test
public void testMapOfMaps() {
StmtsDocList all = StatementsLoader.loadPath(logger, "testdocs/statement_variants.yaml");
List<StmtsDoc> docs = all.getStmtDocs();
StmtsDoc doc0 = docs.get(0);
assertThat(doc0.getName()).isEqualTo("map-of-maps");
assertThat(doc0.getBlocks()).hasSize(1);
StmtsBlock block1 = doc0.getBlocks().get(0);
assertThat(block1.getName()).isEqualTo("map-of-maps--block0");
assertThat(block1.getOps()).hasSize(2);
OpTemplate op0 = block1.getOps().get(0);
assertThat(op0.getName()).isEqualTo("map-of-maps--block0--s3");
assertThat(op0.getParams()).hasSize(2);
assertThat(op0.getParams()).containsAllEntriesOf(Map.of("p1", "v7", "p2", "v8"));
OpTemplate op1 = block1.getOps().get(1);
assertThat(op1.getParams()).containsAllEntriesOf(Map.of());
assertThat(op1.getName()).isEqualTo("map-of-maps--block0--s2");
assertThat(op1.getStmt()).isEqualTo("statement2");
}
@Test
public void testBasicStringStmt() {
StmtsDocList all = StatementsLoader.loadPath(logger, "testdocs/statement_variants.yaml");
List<StmtsDoc> docs = all.getStmtDocs();
StmtsDoc doc1 = docs.get(1);
assertThat(doc1.getName()).isEqualTo("string-statement");
assertThat(doc1.getBlocks()).hasSize(1);
StmtsBlock block1 = doc1.getBlocks().get(0);
assertThat(block1.getName()).isEqualTo("string-statement--block0");
assertThat(block1.getOps()).hasSize(1);
OpTemplate op0 = block1.getOps().get(0);
assertThat(op0.getName()).isEqualTo("string-statement--block0--stmt1");
assertThat(op0.getStmt()).isEqualTo("test statement");
}
@Test
public void testListOfNamedMap() {
StmtsDocList all = StatementsLoader.loadPath(logger, "testdocs/statement_variants.yaml");
List<StmtsDoc> docs = all.getStmtDocs();
StmtsDoc doc2 = docs.get(2);
assertThat(doc2.getName()).isEqualTo("list-of-named-map");
assertThat(doc2.getBlocks()).hasSize(1);
StmtsBlock block1 = doc2.getBlocks().get(0);
assertThat(block1.getOps()).hasSize(1);
OpTemplate op0 = block1.getOps().get(0);
assertThat(op0.getName()).isEqualTo("list-of-named-map--block0--s1");
assertThat(op0.getStmt()).isNull();
// TODO: This needs to be clarified and the logic made less ambiguous
// assertThat(op0.getParams()).hasSize(1);
// assertThat(op0.getParams()).containsExactlyEntriesOf(Map.of("p1", "v2", "p2", "v2", "p3", "v3"));
}
}

View File

@ -47,9 +47,9 @@ public class StmtEscapingTest {
StmtsBlock block1 = doc1.getBlocks().get(0);
assertThat(block1.getBindings()).hasSize(0);
assertThat(block1.getTags()).hasSize(0);
assertThat(block1.getStmts()).hasSize(3);
assertThat(block1.getOps()).hasSize(3);
defs = block1.getStmts();
defs = block1.getOps();
}
@Test

View File

@ -38,14 +38,14 @@ public class ParsedStmtTest {
@Test
public void testBasicParser() {
StmtsBlock block0 = doclist.getStmtDocs().get(0).getBlocks().get(0);
OpTemplate stmtDef0 = block0.getStmts().get(0);
OpTemplate stmtDef0 = block0.getOps().get(0);
ParsedStmt parsed0 = stmtDef0.getParsed();
assertThat(parsed0.getExtraBindings()).containsExactly("alpha","gamma");
assertThat(parsed0.getMissingBindings()).containsExactly("delta");
assertThat(parsed0.hasError()).isTrue();
StmtsBlock block1 = doclist.getStmtDocs().get(0).getBlocks().get(1);
OpTemplate stmtDef1 = block1.getStmts().get(0);
OpTemplate stmtDef1 = block1.getOps().get(0);
ParsedStmt parsed1 = stmtDef1.getParsed();
assertThat(parsed1.getExtraBindings()).containsExactly();
assertThat(parsed1.getMissingBindings()).containsExactly();
@ -56,12 +56,12 @@ public class ParsedStmtTest {
public void testMultipleBindingUsage() {
StmtsBlock block2 = doclist.getStmtDocs().get(0).getBlocks().get(2);
OpTemplate stmtDef0 = block2.getStmts().get(0);
OpTemplate stmtDef0 = block2.getOps().get(0);
ParsedStmt parsed0 = stmtDef0.getParsed();
assertThat(parsed0.getMissingBindings()).isEmpty();
assertThat(parsed0.hasError()).isFalse();
OpTemplate stmtDef1 = block2.getStmts().get(1);
OpTemplate stmtDef1 = block2.getOps().get(1);
ParsedStmt parsed1 = stmtDef1.getParsed();
assertThat(parsed1.getMissingBindings().isEmpty());
assertThat(parsed1.hasError()).isFalse();

View File

@ -41,8 +41,8 @@ public class StmtDetailOverrideTest {
assertThat(doc1.getBlocks()).hasSize(2);
StmtsBlock doc1block0 = doc1.getBlocks().get(0);
assertThat(doc1block0.getStmts().size()).isEqualTo(1);
OpTemplate s = doc1block0.getStmts().get(0);
assertThat(doc1block0.getOps().size()).isEqualTo(1);
OpTemplate s = doc1block0.getOps().get(0);
assertThat(s.getName()).isEqualTo("block0--stmt1");
assertThat(s.getStmt()).isEqualTo("globalstatement1");
assertThat(s.getBindings()).hasSize(1);
@ -50,7 +50,7 @@ public class StmtDetailOverrideTest {
assertThat(s.getTags()).hasSize(1);
StmtsBlock doc1block1 = doc1.getBlocks().get(1);
List<OpTemplate> stmts = doc1block1.getStmts();
List<OpTemplate> stmts = doc1block1.getOps();
assertThat(stmts).hasSize(4);
s = stmts.get(0);

View File

@ -77,7 +77,7 @@ public class StmtsDocListTest {
@Test
public void testStmtInheritsBlockData() {
StmtsDoc doc0 = doclist.getStmtDocs().get(0);
List<OpTemplate> stmts1 = doc0.getBlocks().get(0).getStmts();
List<OpTemplate> stmts1 = doc0.getBlocks().get(0).getOps();
assertThat(stmts1).hasSize(2);
StmtsBlock block0 = doc0.getBlocks().get(0);

View File

@ -2,7 +2,6 @@ package io.nosqlbench.engine.api.templating;
import io.nosqlbench.engine.api.activityconfig.StatementsLoader;
import io.nosqlbench.engine.api.activityconfig.yaml.OpTemplate;
import io.nosqlbench.engine.api.activityconfig.yaml.StmtDef;
import io.nosqlbench.engine.api.activityconfig.yaml.StmtsDocList;
import org.junit.Test;
@ -17,8 +16,7 @@ public class CommandTemplateTest {
" - s1: test1=foo test2=bar");
OpTemplate stmtDef = stmtsDocs.getStmts().get(0);
CommandTemplate ct = new CommandTemplate(stmtDef);
// assertThat(ct.isStatic()).isTrue();
assertThat(ct.isStatic()).isTrue();
}
}
}

View File

@ -56,3 +56,4 @@ params:
bindings:
b11: b11d
b12: b12d

View File

@ -0,0 +1,16 @@
name: map-of-maps
statements:
s3:
p1: v7
p2: v8
s2: statement2
---
name: string-statement
statement: "test statement"
---
name: list-of-named-map
statements:
- s1:
p1: v1
p2: v2
p3: v3

View File

@ -4,7 +4,7 @@
<parent>
<artifactId>mvn-defaults</artifactId>
<groupId>io.nosqlbench</groupId>
<version>4.15.18-SNAPSHOT</version>
<version>4.15.19-SNAPSHOT</version>
<relativePath>../mvn-defaults</relativePath>
</parent>
@ -23,13 +23,13 @@
<dependency>
<groupId>io.nosqlbench</groupId>
<artifactId>engine-core</artifactId>
<version>4.15.18-SNAPSHOT</version>
<version>4.15.19-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>io.nosqlbench</groupId>
<artifactId>engine-docker</artifactId>
<version>4.15.18-SNAPSHOT</version>
<version>4.15.19-SNAPSHOT</version>
</dependency>
</dependencies>

View File

@ -5,7 +5,7 @@
<parent>
<artifactId>mvn-defaults</artifactId>
<groupId>io.nosqlbench</groupId>
<version>4.15.18-SNAPSHOT</version>
<version>4.15.19-SNAPSHOT</version>
<relativePath>../mvn-defaults</relativePath>
</parent>
@ -21,7 +21,7 @@
<dependency>
<groupId>io.nosqlbench</groupId>
<artifactId>engine-api</artifactId>
<version>4.15.18-SNAPSHOT</version>
<version>4.15.19-SNAPSHOT</version>
</dependency>
</dependencies>

View File

@ -5,7 +5,7 @@
<parent>
<artifactId>mvn-defaults</artifactId>
<groupId>io.nosqlbench</groupId>
<version>4.15.18-SNAPSHOT</version>
<version>4.15.19-SNAPSHOT</version>
<relativePath>../mvn-defaults</relativePath>
</parent>
@ -28,13 +28,13 @@
<dependency>
<groupId>io.nosqlbench</groupId>
<artifactId>engine-api</artifactId>
<version>4.15.18-SNAPSHOT</version>
<version>4.15.19-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>io.nosqlbench</groupId>
<artifactId>drivers-api</artifactId>
<version>4.15.18-SNAPSHOT</version>
<version>4.15.19-SNAPSHOT</version>
</dependency>
<dependency>
@ -85,7 +85,7 @@
<dependency>
<groupId>io.nosqlbench</groupId>
<artifactId>engine-clients</artifactId>
<version>4.15.18-SNAPSHOT</version>
<version>4.15.19-SNAPSHOT</version>
<scope>compile</scope>
</dependency>

View File

@ -4,7 +4,7 @@
<parent>
<artifactId>mvn-defaults</artifactId>
<groupId>io.nosqlbench</groupId>
<version>4.15.18-SNAPSHOT</version>
<version>4.15.19-SNAPSHOT</version>
<relativePath>../mvn-defaults</relativePath>
</parent>
@ -65,7 +65,7 @@
<dependency>
<groupId>io.nosqlbench</groupId>
<artifactId>engine-api</artifactId>
<version>4.15.18-SNAPSHOT</version>
<version>4.15.19-SNAPSHOT</version>
</dependency>
</dependencies>

View File

@ -4,7 +4,7 @@
<parent>
<artifactId>mvn-defaults</artifactId>
<groupId>io.nosqlbench</groupId>
<version>4.15.18-SNAPSHOT</version>
<version>4.15.19-SNAPSHOT</version>
<relativePath>../mvn-defaults</relativePath>
</parent>
@ -28,7 +28,7 @@
<dependency>
<groupId>io.nosqlbench</groupId>
<artifactId>docsys</artifactId>
<version>4.15.18-SNAPSHOT</version>
<version>4.15.19-SNAPSHOT</version>
</dependency>
</dependencies>

View File

@ -4,7 +4,7 @@
<parent>
<artifactId>mvn-defaults</artifactId>
<groupId>io.nosqlbench</groupId>
<version>4.15.18-SNAPSHOT</version>
<version>4.15.19-SNAPSHOT</version>
<relativePath>../mvn-defaults</relativePath>
</parent>
@ -22,7 +22,7 @@
<dependency>
<groupId>io.nosqlbench</groupId>
<artifactId>engine-api</artifactId>
<version>4.15.18-SNAPSHOT</version>
<version>4.15.19-SNAPSHOT</version>
</dependency>
</dependencies>

View File

@ -4,7 +4,7 @@
<parent>
<artifactId>mvn-defaults</artifactId>
<groupId>io.nosqlbench</groupId>
<version>4.15.18-SNAPSHOT</version>
<version>4.15.19-SNAPSHOT</version>
<relativePath>../mvn-defaults</relativePath>
</parent>
@ -47,7 +47,7 @@
<dependency>
<groupId>io.nosqlbench</groupId>
<artifactId>engine-cli</artifactId>
<version>4.15.18-SNAPSHOT</version>
<version>4.15.19-SNAPSHOT</version>
</dependency>
</dependencies>

View File

@ -3,7 +3,7 @@
<groupId>io.nosqlbench</groupId>
<artifactId>mvn-defaults</artifactId>
<version>4.15.18-SNAPSHOT</version>
<version>4.15.19-SNAPSHOT</version>
<packaging>pom</packaging>
<properties>

View File

@ -5,7 +5,7 @@
<parent>
<artifactId>mvn-defaults</artifactId>
<groupId>io.nosqlbench</groupId>
<version>4.15.18-SNAPSHOT</version>
<version>4.15.19-SNAPSHOT</version>
<relativePath>../mvn-defaults</relativePath>
</parent>

View File

@ -5,7 +5,7 @@
<parent>
<artifactId>mvn-defaults</artifactId>
<groupId>io.nosqlbench</groupId>
<version>4.15.18-SNAPSHOT</version>
<version>4.15.19-SNAPSHOT</version>
<relativePath>../mvn-defaults</relativePath>
</parent>
@ -31,7 +31,7 @@
<dependency>
<groupId>io.nosqlbench</groupId>
<artifactId>nb-annotations</artifactId>
<version>4.15.18-SNAPSHOT</version>
<version>4.15.19-SNAPSHOT</version>
</dependency>
<dependency>

View File

@ -5,7 +5,7 @@
<parent>
<artifactId>mvn-defaults</artifactId>
<groupId>io.nosqlbench</groupId>
<version>4.15.18-SNAPSHOT</version>
<version>4.15.19-SNAPSHOT</version>
<relativePath>../mvn-defaults</relativePath>
</parent>
@ -24,31 +24,31 @@
<dependency>
<groupId>io.nosqlbench</groupId>
<artifactId>engine-rest</artifactId>
<version>4.15.18-SNAPSHOT</version>
<version>4.15.19-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>io.nosqlbench</groupId>
<artifactId>engine-cli</artifactId>
<version>4.15.18-SNAPSHOT</version>
<version>4.15.19-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>io.nosqlbench</groupId>
<artifactId>engine-docs</artifactId>
<version>4.15.18-SNAPSHOT</version>
<version>4.15.19-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>io.nosqlbench</groupId>
<artifactId>engine-core</artifactId>
<version>4.15.18-SNAPSHOT</version>
<version>4.15.19-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>io.nosqlbench</groupId>
<artifactId>engine-extensions</artifactId>
<version>4.15.18-SNAPSHOT</version>
<version>4.15.19-SNAPSHOT</version>
</dependency>
<!-- <dependency>-->
@ -60,73 +60,73 @@
<dependency>
<groupId>io.nosqlbench</groupId>
<artifactId>driver-web</artifactId>
<version>4.15.18-SNAPSHOT</version>
<version>4.15.19-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>io.nosqlbench</groupId>
<artifactId>driver-kafka</artifactId>
<version>4.15.18-SNAPSHOT</version>
<version>4.15.19-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>io.nosqlbench</groupId>
<artifactId>driver-stdout</artifactId>
<version>4.15.18-SNAPSHOT</version>
<version>4.15.19-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>io.nosqlbench</groupId>
<artifactId>driver-diag</artifactId>
<version>4.15.18-SNAPSHOT</version>
<version>4.15.19-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>io.nosqlbench</groupId>
<artifactId>driver-tcp</artifactId>
<version>4.15.18-SNAPSHOT</version>
<version>4.15.19-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>io.nosqlbench</groupId>
<artifactId>driver-http</artifactId>
<version>4.15.18-SNAPSHOT</version>
<version>4.15.19-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>io.nosqlbench</groupId>
<artifactId>driver-jmx</artifactId>
<version>4.15.18-SNAPSHOT</version>
<version>4.15.19-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>io.nosqlbench</groupId>
<artifactId>driver-dsegraph-shaded</artifactId>
<version>4.15.18-SNAPSHOT</version>
<version>4.15.19-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>io.nosqlbench</groupId>
<artifactId>driver-cql-shaded</artifactId>
<version>4.15.18-SNAPSHOT</version>
<version>4.15.19-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>io.nosqlbench</groupId>
<artifactId>driver-cqlverify</artifactId>
<version>4.15.18-SNAPSHOT</version>
<version>4.15.19-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>io.nosqlbench</groupId>
<artifactId>driver-mongodb</artifactId>
<version>4.15.18-SNAPSHOT</version>
<version>4.15.19-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>io.nosqlbench</groupId>
<artifactId>driver-pulsar</artifactId>
<version>4.15.18-SNAPSHOT</version>
<version>4.15.19-SNAPSHOT</version>
</dependency>
<!-- <dependency>-->
@ -138,7 +138,7 @@
<dependency>
<groupId>io.nosqlbench</groupId>
<artifactId>driver-cockroachdb</artifactId>
<version>4.15.18-SNAPSHOT</version>
<version>4.15.19-SNAPSHOT</version>
</dependency>
<dependency>
@ -269,7 +269,7 @@
<dependency>
<groupId>io.nosqlbench</groupId>
<artifactId>driver-mongodb</artifactId>
<version>4.15.18-SNAPSHOT</version>
<version>4.15.19-SNAPSHOT</version>
</dependency>
</dependencies>
</profile>

View File

@ -7,7 +7,7 @@
<parent>
<artifactId>mvn-defaults</artifactId>
<groupId>io.nosqlbench</groupId>
<version>4.15.18-SNAPSHOT</version>
<version>4.15.19-SNAPSHOT</version>
<relativePath>mvn-defaults</relativePath>
</parent>

View File

@ -7,7 +7,7 @@
<parent>
<groupId>io.nosqlbench</groupId>
<artifactId>mvn-defaults</artifactId>
<version>4.15.18-SNAPSHOT</version>
<version>4.15.19-SNAPSHOT</version>
<relativePath>../mvn-defaults</relativePath>
</parent>
@ -23,14 +23,14 @@
<dependency>
<groupId>io.nosqlbench</groupId>
<version>4.15.18-SNAPSHOT</version>
<version>4.15.19-SNAPSHOT</version>
<artifactId>nb-api</artifactId>
</dependency>
<dependency>
<groupId>io.nosqlbench</groupId>
<artifactId>virtdata-lang</artifactId>
<version>4.15.18-SNAPSHOT</version>
<version>4.15.19-SNAPSHOT</version>
</dependency>

View File

@ -7,7 +7,7 @@
<parent>
<artifactId>mvn-defaults</artifactId>
<groupId>io.nosqlbench</groupId>
<version>4.15.18-SNAPSHOT</version>
<version>4.15.19-SNAPSHOT</version>
<relativePath>../mvn-defaults</relativePath>
</parent>

View File

@ -7,7 +7,7 @@
<parent>
<artifactId>mvn-defaults</artifactId>
<groupId>io.nosqlbench</groupId>
<version>4.15.18-SNAPSHOT</version>
<version>4.15.19-SNAPSHOT</version>
<relativePath>../mvn-defaults</relativePath>
</parent>
@ -20,7 +20,7 @@
<dependency>
<groupId>io.nosqlbench</groupId>
<artifactId>virtdata-api</artifactId>
<version>4.15.18-SNAPSHOT</version>
<version>4.15.19-SNAPSHOT</version>
</dependency>
<dependency>

View File

@ -9,11 +9,23 @@ import java.util.function.LongToIntFunction;
public class HashRangeScaled implements LongToIntFunction {
private final Hash hash = new Hash();
private final double scalefactor;
public HashRangeScaled(double scalefactor) {
this.scalefactor = scalefactor;
}
public HashRangeScaled() {
this.scalefactor = 1.0D;
}
@Override
public int applyAsInt(long operand) {
if (operand==0) { return 0; }
if (operand == 0) {
return 0;
}
long l = hash.applyAsLong(operand);
return (int) ((l % operand) % Integer.MAX_VALUE);
return (int) (((l % operand) * scalefactor) % Integer.MAX_VALUE);
}
}

View File

@ -5,20 +5,29 @@ import io.nosqlbench.virtdata.api.annotations.ThreadSafeMapper;
import java.util.function.LongUnaryOperator;
/**
* Return a pseudo-random value which can only be as large as the input.
* Return a pseudo-random value which can only be as large as the input times
* a scale factor, with a default scale factor of 1.0d
*/
@ThreadSafeMapper
public class HashRangeScaled implements LongUnaryOperator {
private Hash hash = new Hash();
private final double scalefactor;
private final Hash hash = new Hash();
public HashRangeScaled(double scalefactor) {
this.scalefactor = scalefactor;
}
public HashRangeScaled() {
this.scalefactor = 1.0D;
}
@Override
public long applyAsLong(long operand) {
if (operand==0) { return 0; }
if (operand == 0) {
return 0;
}
long hashed = hash.applyAsLong(operand);
return hashed % operand;
return (long) ((hashed % operand) * scalefactor);
}
}

View File

@ -8,10 +8,21 @@ import java.util.function.IntUnaryOperator;
public class HashRangeScaled implements IntUnaryOperator {
private final Hash hash = new Hash();
private final double scalefactor;
public HashRangeScaled(double scalefactor) {
this.scalefactor = scalefactor;
}
public HashRangeScaled() {
this.scalefactor = 1.0D;
}
@Override
public int applyAsInt(int operand) {
if (operand==0) { return 0; }
return hash.applyAsInt(operand) % operand;
if (operand == 0) {
return 0;
}
return (int) ((hash.applyAsInt(operand) % operand) * scalefactor) % Integer.MAX_VALUE;
}
}

View File

@ -1,7 +1,10 @@
package io.nosqlbench.virtdata.library.basics.shared.from_long.to_long;
import org.assertj.core.data.Percentage;
import org.junit.Test;
import java.util.LongSummaryStatistics;
import static org.assertj.core.api.Assertions.assertThat;
public class HashRangeScaledTest {
@ -11,8 +14,51 @@ public class HashRangeScaledTest {
HashRangeScaled hrs = new HashRangeScaled();
for (long i = 0; i < 100; i++) {
long l = hrs.applyAsLong(i);
assertThat(l).isBetween(0L,i);
assertThat(l).isBetween(0L, i);
}
}
}
@Test
public void testHashRangeScaledLongs() {
// This presumes a sliding triangular distribution in the data
HashRangeScaled hrs = new HashRangeScaled();
LongSummaryStatistics lss = new LongSummaryStatistics();
long top = 1000000;
for (long i = 0; i < top; i++) {
lss.accept(hrs.applyAsLong(i));
}
System.out.println(lss);
assertThat(lss.getAverage()).isCloseTo(top / 4d, Percentage.withPercentage(1d));
}
@Test
public void testHashRangeScaledLongsHalf() {
// This presumes a sliding triangular distribution in the data
HashRangeScaled hrs = new HashRangeScaled(0.5d);
LongSummaryStatistics lss = new LongSummaryStatistics();
long top = 1000000;
for (long i = 0; i < top; i++) {
lss.accept(hrs.applyAsLong(i));
}
System.out.println(lss);
assertThat(lss.getAverage()).isCloseTo(top / 8d, Percentage.withPercentage(1d));
}
@Test
public void testHashRangeScaledLongsDoubled() {
// This presumes a sliding triangular distribution in the data
HashRangeScaled hrs = new HashRangeScaled(2d);
LongSummaryStatistics lss = new LongSummaryStatistics();
long top = 1000000;
for (long i = 0; i < top; i++) {
lss.accept(hrs.applyAsLong(i));
}
System.out.println(lss);
assertThat(lss.getAverage()).isCloseTo(top / 2d, Percentage.withPercentage(1d));
}
}

View File

@ -4,7 +4,7 @@
<parent>
<artifactId>mvn-defaults</artifactId>
<groupId>io.nosqlbench</groupId>
<version>4.15.18-SNAPSHOT</version>
<version>4.15.19-SNAPSHOT</version>
<relativePath>../mvn-defaults</relativePath>
</parent>
@ -22,13 +22,13 @@
<dependency>
<groupId>io.nosqlbench</groupId>
<artifactId>virtdata-api</artifactId>
<version>4.15.18-SNAPSHOT</version>
<version>4.15.19-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>io.nosqlbench</groupId>
<artifactId>virtdata-lib-basics</artifactId>
<version>4.15.18-SNAPSHOT</version>
<version>4.15.19-SNAPSHOT</version>
</dependency>
</dependencies>

View File

@ -7,7 +7,7 @@
<parent>
<artifactId>mvn-defaults</artifactId>
<groupId>io.nosqlbench</groupId>
<version>4.15.18-SNAPSHOT</version>
<version>4.15.19-SNAPSHOT</version>
<relativePath>../mvn-defaults</relativePath>
</parent>
@ -20,13 +20,13 @@
<dependency>
<groupId>io.nosqlbench</groupId>
<artifactId>virtdata-api</artifactId>
<version>4.15.18-SNAPSHOT</version>
<version>4.15.19-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>io.nosqlbench</groupId>
<artifactId>virtdata-lib-basics</artifactId>
<version>4.15.18-SNAPSHOT</version>
<version>4.15.19-SNAPSHOT</version>
</dependency>
<dependency>

View File

@ -4,7 +4,7 @@
<parent>
<artifactId>mvn-defaults</artifactId>
<groupId>io.nosqlbench</groupId>
<version>4.15.18-SNAPSHOT</version>
<version>4.15.19-SNAPSHOT</version>
<relativePath>../mvn-defaults</relativePath>
</parent>
@ -20,7 +20,7 @@
<dependency>
<groupId>io.nosqlbench</groupId>
<artifactId>virtdata-lib-basics</artifactId>
<version>4.15.18-SNAPSHOT</version>
<version>4.15.19-SNAPSHOT</version>
</dependency>
</dependencies>

View File

@ -7,7 +7,7 @@
<parent>
<artifactId>mvn-defaults</artifactId>
<groupId>io.nosqlbench</groupId>
<version>4.15.18-SNAPSHOT</version>
<version>4.15.19-SNAPSHOT</version>
<relativePath>../mvn-defaults</relativePath>
</parent>
@ -18,7 +18,7 @@
<dependency>
<groupId>io.nosqlbench</groupId>
<artifactId>virtdata-api</artifactId>
<version>4.15.18-SNAPSHOT</version>
<version>4.15.19-SNAPSHOT</version>
</dependency>
<dependency>

View File

@ -4,7 +4,7 @@
<parent>
<artifactId>mvn-defaults</artifactId>
<groupId>io.nosqlbench</groupId>
<version>4.15.18-SNAPSHOT</version>
<version>4.15.19-SNAPSHOT</version>
<relativePath>../mvn-defaults</relativePath>
</parent>
@ -18,36 +18,36 @@
<dependency>
<groupId>io.nosqlbench</groupId>
<artifactId>virtdata-realdata</artifactId>
<version>4.15.18-SNAPSHOT</version>
<version>4.15.19-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>io.nosqlbench</groupId>
<artifactId>virtdata-lib-realer</artifactId>
<version>4.15.18-SNAPSHOT</version>
<version>4.15.19-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>io.nosqlbench</groupId>
<artifactId>virtdata-api</artifactId>
<version>4.15.18-SNAPSHOT</version>
<version>4.15.19-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>io.nosqlbench</groupId>
<artifactId>virtdata-lib-random</artifactId>
<version>4.15.18-SNAPSHOT</version>
<version>4.15.19-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>io.nosqlbench</groupId>
<version>4.15.18-SNAPSHOT</version>
<version>4.15.19-SNAPSHOT</version>
<artifactId>virtdata-lib-basics</artifactId>
</dependency>
<dependency>
<groupId>io.nosqlbench</groupId>
<version>4.15.18-SNAPSHOT</version>
<version>4.15.19-SNAPSHOT</version>
<artifactId>virtdata-lib-curves4</artifactId>
</dependency>
@ -55,7 +55,7 @@
<dependency>
<groupId>io.nosqlbench</groupId>
<artifactId>docsys</artifactId>
<version>4.15.18-SNAPSHOT</version>
<version>4.15.19-SNAPSHOT</version>
</dependency>
</dependencies>