Merge branch 'main' into main

This commit is contained in:
Jonathan Shook 2020-12-17 13:12:15 -06:00 committed by GitHub
commit 27a45ee464
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
396 changed files with 14830 additions and 13263 deletions

View File

@ -15,7 +15,7 @@ jobs:
- uses: actions/setup-java@v1
name: setup java
with:
java-version: '14'
java-version: '15'
java-package: jdk
architecture: x64

View File

@ -10,191 +10,235 @@ jobs:
runs-on: ubuntu-18.04
steps:
- name: checkout repo
uses: actions/checkout@v2
- name: checkout repo
uses: actions/checkout@v2
- name: setup java
uses: actions/setup-java@v1
with:
java-version: '14'
java-package: jdk
architecture: x64
- name: setup java
uses: actions/setup-java@v1
with:
java-version: '15'
java-package: jdk
architecture: x64
- name: avoid release loop
run: scripts/avoid-release-loop.sh
env:
GIT_RELEASE_BOT_NAME: "nb-droid"
- name: avoid release loop
run: scripts/avoid-release-loop.sh
env:
GIT_RELEASE_BOT_NAME: "nb-droid"
- name: capture tty for gpg
run: |
echo "::set-env name=TTY::"$(tty)
echo "::set-env name=GPG_TTY::"$(tty)
- name: capture tty for gpg
run: |
echo "TTY="$(tty) >> $GITHUB_ENV
echo "GPG_TTY="$(tty) >> $GITHUB_ENV
# echo "::set-env name=TTY::"$(tty)
# echo "::set-env name=GPG_TTY::"$(tty)
- name: initialize gpg
run: |
set -x
echo "${{ secrets.GITHUB_GPG_KEY }}" | base64 -d > private.key
gpg --import --batch ./private.key
rm ./private.key
echo "gnupg files:"
ls -l ~/.gnupg/
- name: initialize gpg
run: |
set -x
echo "${{ secrets.GITHUB_GPG_KEY }}" | base64 -d > private.key
gpg --import --batch ./private.key
rm ./private.key
echo "gnupg files:"
ls -l ~/.gnupg/
- name: set git username
run: git config --global user.email "${{ secrets.NBDROID_EMAIL }}"
- name: set git username
run: git config --global user.email "${{ secrets.NBDROID_EMAIL }}"
- name: set git email
run: git config --global user.name "${{ secrets.NBDROID_NAME }}"
- name: free disk space
run: |
sudo swapoff -a
sudo rm -f /swapfile
sudo apt clean
docker rmi $(docker image ls -aq)
df -h
- name: Cache Maven packages
uses: actions/cache@v1
with:
path: ~/.m2
key: ${{ runner.os }}-m2-${{ hashFiles('**/pom.xml') }}
restore-keys: ${{ runner.os }}-m2
- name: read versions
run: |
set -x
CURRENT_VERSION=$(mvn help:evaluate -Dexpression=project.version -q -DforceStdout)
RELEASE_VERSION=${CURRENT_VERSION%%-SNAPSHOT}
BASE_VERSION=$(echo "$RELEASE_VERSION" | cut -d'.' -f1-2)
MINOR_VERSION=$(echo "$RELEASE_VERSION" | cut -d'.' -f3)
NEXT_MINOR_VERSION=$(( MINOR_VERSION+1))
NEXT_SNAPSHOT="${BASE_VERSION}.${NEXT_MINOR_VERSION}-SNAPSHOT"
RELEASE_TAGNAME="nosqlbench-${RELEASE_VERSION}"
echo "::set-env name=NEXT_SNAPSHOT::${NEXT_SNAPSHOT}"
echo "::set-env name=RELEASE_VERSION::${RELEASE_VERSION}"
echo "::set-env name=RELEASE_TAGNAME::${RELEASE_TAGNAME}"
- name: prepare release summary
id: prepare_summary
run: |
#summary=$(scripts/release-notes.sh)
summary=$(cat RELEASENOTES.md)
summary="${summary//'%'/'%25'}"
summary="${summary//$'\n'/'%0A'}"
summary="${summary//$'\r'/'%0D'}"
echo "::set-output name=release_summary::$summary"
- name: prepare release
run: scripts/release-prepare.sh
env:
RELEASE_BRANCH_NAME: "main"
GIT_RELEASE_BOT_NAME: "nb-droid"
GIT_RELEASE_BOT_EMAIL: ${{ secrets.GIT_RELEASE_BOT_EMAIL }}
ACCESS_TOKEN: ${{ secrets.GITHUB_ACCESS_TOKEN }}
GPG_ENABLED: "true"
GPG_KEY_ID: ${{ secrets.GITHUB_GPG_KEY_ID }}
GPG_KEY: ${{ secrets.GITHUB_GPG_KEY }}
GPG_SERVER_NAME: ${{ secrets.GPG_SERVER_NAME }}
GPG_PASSPHRASE: ${{ secrets.GPG_PASSPHRASE }}
MAVEN_REPO_SERVER_ID: ${{ secrets.MAVEN_REPO_SERVER_ID }}
MAVEN_REPO_SERVER_USERNAME: ${{ secrets.MVN_REPO_PRIVATE_REPO_USER }}
MAVEN_REPO_SERVER_PASSWORD: ${{ secrets.MVN_REPO_PRIVATE_REPO_PASSWORD }}
- name: bundle integration test logs
run: |
pwd
ls -l
mkdir -p itlogs/nb
cp -R nb/logs itlogs/nb
- name: upload integration test logs
uses: actions/upload-artifact@v1
with:
name: itlogs
path: itlogs
- name: set git email
run: git config --global user.name "${{ secrets.NBDROID_NAME }}"
- name: perform release
run: scripts/release-perform.sh
continue-on-error: true
env:
RELEASE_BRANCH_NAME: "main"
GIT_RELEASE_BOT_NAME: "nb-droid"
GIT_RELEASE_BOT_EMAIL: ${{ secrets.GIT_RELEASE_BOT_EMAIL }}
ACCESS_TOKEN: ${{ secrets.GITHUB_ACCESS_TOKEN }}
GPG_ENABLED: "true"
GPG_KEY_ID: ${{ secrets.GITHUB_GPG_KEY_ID }}
GPG_KEY: ${{ secrets.GITHUB_GPG_KEY }}
GPG_SERVER_NAME: ${{ secrets.GPG_SERVER_NAME }}
GPG_PASSPHRASE: ${{ secrets.GPG_PASSPHRASE }}
MAVEN_REPO_SERVER_ID: ${{ secrets.MAVEN_REPO_SERVER_ID }}
MAVEN_REPO_SERVER_USERNAME: ${{ secrets.MVN_REPO_PRIVATE_REPO_USER }}
MAVEN_REPO_SERVER_PASSWORD: ${{ secrets.MVN_REPO_PRIVATE_REPO_PASSWORD }}
- name: bundle artifacts
run: |
pwd
ls -l
mkdir staging
cp nb/target/nb.jar nb/target/nb staging
- name: upload artifacts
uses: actions/upload-artifact@v1
with:
name: binaries
path: staging
- name: docker push
uses: docker/build-push-action@v1
with:
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_PASSWORD }}
repository: nosqlbench/nosqlbench
tags: latest, ${{ env.RELEASE_VERSION }}
tag_with_ref: false
- name: free disk space
run: |
sudo swapoff -a
sudo rm -f /swapfile
sudo apt clean
docker rmi $(docker image ls -aq)
df -h
- name: bundle guidebook
run: mkdir guidebook && cp -R nb/target/guidebook guidebook
- name: Cache Maven packages
uses: actions/cache@v1
with:
path: ~/.m2
key: ${{ runner.os }}-m2-${{ hashFiles('**/pom.xml') }}
restore-keys: ${{ runner.os }}-m2
- name: upload guidebook
uses: actions/upload-artifact@v1
with:
name: guidebook
path: guidebook
- name: read versions
run: |
set -x
CURRENT_VERSION=$(mvn help:evaluate -Dexpression=project.version -q -DforceStdout)
RELEASE_VERSION=${CURRENT_VERSION%%-SNAPSHOT}
BASE_VERSION=$(echo "$RELEASE_VERSION" | cut -d'.' -f1-2)
MINOR_VERSION=$(echo "$RELEASE_VERSION" | cut -d'.' -f3)
NEXT_MINOR_VERSION=$(( MINOR_VERSION+1))
NEXT_SNAPSHOT="${BASE_VERSION}.${NEXT_MINOR_VERSION}-SNAPSHOT"
RELEASE_TAGNAME="nosqlbench-${RELEASE_VERSION}"
echo "NEXT_SNAPSHOT=${NEXT_SNAPSHOT}" >> $GITHUB_ENV
echo "RELEASE_VERSION=${RELEASE_VERSION}" >> $GITHUB_ENV
echo "RELEASE_TAGNAME=${RELEASE_TAGNAME}" >> $GITHUB_ENV
# echo "::set-env name=NEXT_SNAPSHOT::${NEXT_SNAPSHOT}"
# echo "::set-env name=RELEASE_VERSION::${RELEASE_VERSION}"
# echo "::set-env name=RELEASE_TAGNAME::${RELEASE_TAGNAME}"
- name: create release
id: create_release
uses: actions/create-release@v1
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
with:
tag_name: ${{ env.RELEASE_TAGNAME }}
release_name: Release ${{ env.RELEASE_TAGNAME }}
draft: false
prerelease: false
body: ${{ steps.prepare_summary.outputs.release_summary }}
- name: prepare release summary
id: prepare_summary
run: |
#summary=$(scripts/release-notes.sh)
summary=$(cat RELEASENOTES.md)
summary="${summary//'%'/'%25'}"
summary="${summary//$'\n'/'%0A'}"
summary="${summary//$'\r'/'%0D'}"
echo "::set-output name=release_summary::$summary"
- name: upload nb.jar
id: upload-nb-jar
uses: actions/upload-release-asset@v1
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
with:
upload_url: ${{ steps.create_release.outputs.upload_url }}
asset_path: nb/target/nb.jar
asset_name: nb.jar
asset_content_type: application/octet-stream
- name: select release type from branch name
run: |
current_branch=$(git rev-parse --abbrev-ref HEAD)
if [[ ${current_branch} == *"-rc"* ]]
then
echo "PRERELEASE=true" >> $GITHUB_ENV
echo "DOCKER_TAGS=nosqlbench/nosqlbench:${{ env.RELEASE_VERSION }}" >> $GITHUB_ENV
else
echo "PRERELEASE=false" >> $GITHUB_ENV
echo "DOCKER_TAGS=nosqlbench/nosqlbench:latest,nosqlbench/nosqlbench:${{ env.RELEASE_VERSION }}" >> $GITHUB_ENV
fi
- name: upload nb binary
id: upload-nb-binary
uses: actions/upload-release-asset@v1
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
with:
upload_url: ${{ steps.create_release.outputs.upload_url }}
asset_path: nb/target/nb
asset_name: nb
asset_content_type: application/octet-stream
- name: prepare Maven release
run: scripts/release-prepare.sh
env:
RELEASE_BRANCH_PATTERN: "release"
PRERELEASE_BRANCH_PATTERN: "main"
GIT_RELEASE_BOT_NAME: "nb-droid"
GIT_RELEASE_BOT_EMAIL: ${{ secrets.GIT_RELEASE_BOT_EMAIL }}
ACCESS_TOKEN: ${{ secrets.GITHUB_ACCESS_TOKEN }}
GPG_ENABLED: "true"
GPG_KEY_ID: ${{ secrets.GITHUB_GPG_KEY_ID }}
GPG_KEY: ${{ secrets.GITHUB_GPG_KEY }}
GPG_SERVER_NAME: ${{ secrets.GPG_SERVER_NAME }}
GPG_PASSPHRASE: ${{ secrets.GPG_PASSPHRASE }}
MAVEN_REPO_SERVER_ID: ${{ secrets.MAVEN_REPO_SERVER_ID }}
MAVEN_REPO_SERVER_USERNAME: ${{ secrets.MVN_REPO_PRIVATE_REPO_USER }}
MAVEN_REPO_SERVER_PASSWORD: ${{ secrets.MVN_REPO_PRIVATE_REPO_PASSWORD }}
- name: Setup docker buildx
uses: docker/setup-buildx-action@v1
- name: docker hub login
uses: docker/login-action@v1
with:
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_PASSWORD }}
- name: docker test build
uses: docker/build-push-action@v2
with:
context: .
file: Dockerfile
pull: true
push: false
load: true
tags: ${{ env.DOCKER_TAGS }}
- name: sanity check docker image
run: |
docker run --rm nosqlbench/nosqlbench:${{ env.RELEASE_VERSION }} --version
- name: bundle integration test logs
run: |
pwd
find nb
mkdir -p itlogs/nb
cp -R nb/logs itlogs/nb
- name: upload integration test logs
uses: actions/upload-artifact@v1
with:
name: itlogs
path: itlogs
- name: perform Maven release
run: scripts/release-perform.sh
continue-on-error: true
env:
RELEASE_BRANCH_NAME: "main"
GIT_RELEASE_BOT_NAME: "nb-droid"
GIT_RELEASE_BOT_EMAIL: ${{ secrets.GIT_RELEASE_BOT_EMAIL }}
ACCESS_TOKEN: ${{ secrets.GITHUB_ACCESS_TOKEN }}
GPG_ENABLED: "true"
GPG_KEY_ID: ${{ secrets.GITHUB_GPG_KEY_ID }}
GPG_KEY: ${{ secrets.GITHUB_GPG_KEY }}
GPG_SERVER_NAME: ${{ secrets.GPG_SERVER_NAME }}
GPG_PASSPHRASE: ${{ secrets.GPG_PASSPHRASE }}
MAVEN_REPO_SERVER_ID: ${{ secrets.MAVEN_REPO_SERVER_ID }}
MAVEN_REPO_SERVER_USERNAME: ${{ secrets.MVN_REPO_PRIVATE_REPO_USER }}
MAVEN_REPO_SERVER_PASSWORD: ${{ secrets.MVN_REPO_PRIVATE_REPO_PASSWORD }}
- name: bundle artifacts
run: |
pwd
ls -l
mkdir staging
cp nb/target/nb.jar nb/target/nb staging
- name: upload artifacts
uses: actions/upload-artifact@v1
with:
name: binaries
path: staging
- name: docker push to hub
uses: docker/build-push-action@v2
with:
context: .
file: Dockerfile
pull: true
push: true
tags: ${{ env.DOCKER_TAGS }}
- name: bundle guidebook
run: mkdir guidebook && cp -R nb/target/guidebook guidebook
- name: upload guidebook
uses: actions/upload-artifact@v1
with:
name: guidebook
path: guidebook
- name: create github release
id: create_github_release
uses: actions/create-release@v1
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
with:
tag_name: ${{ env.RELEASE_TAGNAME }}
release_name: Release ${{ env.RELEASE_TAGNAME }}
draft: false
prerelease: ${{ env.PRERELEASE }}
body: ${{ steps.prepare_summary.outputs.release_summary }}
- name: upload nb.jar to github release
id: upload-nb-jar
uses: actions/upload-release-asset@v1
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
with:
upload_url: ${{ steps.create_github_release.outputs.upload_url }}
asset_path: nb/target/nb.jar
asset_name: nb.jar
asset_content_type: application/octet-stream
- name: upload nb binary to github release
id: upload-nb-binary
uses: actions/upload-release-asset@v1
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
with:
upload_url: ${{ steps.create_github_release.outputs.upload_url }}
asset_path: nb/target/nb
asset_name: nb
asset_content_type: application/octet-stream
docs:
needs: release
@ -210,8 +254,8 @@ jobs:
- name: download guidebook
uses: actions/download-artifact@v1
with:
name: guidebook
path: guidebook
name: guidebook
path: guidebook
- run: ls -la
@ -220,30 +264,30 @@ jobs:
NBDROID_NAME: ${{ secrets.NBDROID_NAME }}
NBDROID_TOKEN: ${{ secrets.NBDROID_TOKEN }}
run: |
git clone https://${{secrets.NBDROID_NAME}}:${{secrets.NBDROID_TOKEN}}@github.com/nosqlbench/nosqlbench-docs.git nosqlbench-docs
cd nosqlbench-docs
echo "files listing"
find .
git remote set-url origin https://${{secrets.NBDROID_NAME}}:${{secrets.NBDROID_TOKEN}}@github.com/nosqlbench/nosqlbench-docs.git
git remote -v
git clone https://${{secrets.NBDROID_NAME}}:${{secrets.NBDROID_TOKEN}}@github.com/nosqlbench/nosqlbench-docs.git nosqlbench-docs
cd nosqlbench-docs
echo "files listing"
find .
git remote set-url origin https://${{secrets.NBDROID_NAME}}:${{secrets.NBDROID_TOKEN}}@github.com/nosqlbench/nosqlbench-docs.git
git remote -v
- name: push changes
env:
NBDROID_NAME: ${{ secrets.NBDROID_NAME }}
NBDROID_TOKEN: ${{ secrets.NBDROID_TOKEN }}
run: |
rsync -av --delete guidebook/guidebook/ nosqlbench-docs/docs/
echo "docs.nosqlbench.io" > nosqlbench-docs/docs/CNAME
cd nosqlbench-docs
git add docs
git add -u
CHANGES=$(git status --porcelain 2>/dev/null| wc -l)
echo "found $CHANGES to push for doc updates"
if (( $CHANGES > 0 ))
then
git commit -m"docs update for $GITHUB_REF"
git push
fi
rsync -av --delete guidebook/guidebook/ nosqlbench-docs/docs/
echo "docs.nosqlbench.io" > nosqlbench-docs/docs/CNAME
cd nosqlbench-docs
git add docs
git add -u
CHANGES=$(git status --porcelain 2>/dev/null| wc -l)
echo "found $CHANGES to push for doc updates"
if (( $CHANGES > 0 ))
then
git commit -m"docs update for $GITHUB_REF"
git push
fi

View File

@ -0,0 +1 @@
eyJrIjoibVFjRkV5Z096VHE0MjhXYk1RM2p1cnFuUkhQMXVZNGwiLCJuIjoibm9zcWxiZW5jaC0xMC4xMC4xMDAuNTItMTYwNzMxMDE2MDM0OCIsImlkIjoxfQ==

View File

@ -4,12 +4,21 @@
The latest release of NoSQLBench is always available from github releases.
- download [the latest release of nb](https://github.com/nosqlbench/nosqlbench/releases/latest/download/nb), a linux
binary
-
download [the latest release of nb](https://github.com/nosqlbench/nosqlbench/releases/latest/download/nb)
, a linux binary
- To download it with curl,
use `curl -L -O https://github.com/nosqlbench/nosqlbench/releases/latest/download/nb`
.
- (be sure to `chmod +x nb` once you download it)
- download [the latest release of nb.jar](https://github.com/nosqlbench/nosqlbench/releases/latest/download/nb.jar), a
single-jar application.
- This requires java 14 or later, make sure your `java -version` command says that you are on Java 14 or later.
-
download [the latest release of nb.jar](https://github.com/nosqlbench/nosqlbench/releases/latest/download/nb.jar)
, a single-jar application.
- This requires java 15 or later, make sure your `java -version`
command says that you are on Java 15 or later.
- To download it with curl,
use `curl -L -O https://github.com/nosqlbench/nosqlbench/releases/latest/download/nb.jar`
.
## Docker

View File

@ -1,4 +1,4 @@
FROM openjdk:14-alpine
FROM openjdk:15-alpine
RUN apk --no-cache add curl
COPY nb/target/nb.jar nb.jar

View File

@ -1,19 +1,7 @@
- f702736f (HEAD -> main) flatten maps in OpTemplates to allow map consumers
- 4a9fa81b pull Named service up
- 574a89e7 pull ConfigAware api up
- 71195d0a http release fixes
- 3fa89ebc partial work for --args
- 262df571 improved error messages
- 453b1b2c extract ratio from optemplate before construction
- dcf458c4 annotation progress
- 7017b6cd add SplitLongs function
- 0758670f add nb clients module
- 1dbf36c4 add grafana annotation client
- 120d9761 remove warning for speculative defaults
- c78f1e92 typos and minor fixes
- f3f1b8f4 add annotations design sketch
- 394580d6 more idea sketches
- 9e5972f5 comment cleanup
- 06edfcc9 fix NPE when binding has undefined value
- 510746b7 improve mismatched scenario name feedback
- 735b32e1 add illustrative test for EpochMillisToJavaLocalDate
- 07800059 move grafana auto-key to grafana client
- 12acd07f parameterize docker prom tag
- 8052e64e make docker prom tag configurable
- bb0884e2 grafana client improvements
- c83f4d87 prototype metrics summarizer
- 1d107d87 update nb4 dashboard def
- trigger release

View File

@ -0,0 +1,244 @@
## RateLimiter Design
The nosqlbench rate limiter is a hybrid design, combining ideas from
well-known algorithms with a heavy dose of mechanical sympathy. The
resulting implementation provides the following:
1. A basic design that can be explained in one page (this page!)
2. High throughput, compared to other rate limiters tested.
3. Graceful degradation with increasing concurrency.
4. Clearly defined behavioral semantics.
5. Efficient burst capability, for tunable catch-up rates.
6. Efficient calculation of wait time.
## Parameters
**rate** - In simplest terms, users simply need to configure the *rate*.
For example, `rate=12000` specifies an op rate of 12000 ops/second.
**burst rate** - Additionally, users may specify a burst rate which can be
used to recover unused time when a client is able to go faster than the
strict limit. The burst rate is multiplied by the _op rate_ to arrive at
the maximum rate when wait time is available to recover. For
example, `rate=12000,1.1`
specifies that a client may operate at 12000 ops/s _when it is caught up_,
while allowing it to go at a rate of up to 13200 ops/s _when it is behind
schedule_.
## Design Principles
The core design of the rate limiter is based on
the [token bucket](https://en.wikipedia.org/wiki/Token_bucket) algorithm
as established in the telecom industry for rate metering. Additional
refinements have been added to allow for flexible and reliable use on
non-realtime systems.
The unit of scheduling used in this design is the token, corresponding
directly to a nanosecond of time. The scheduling time that is made
available to callers is stored in a pool of tokens which is set to a
configured size. The size of the token pool determines how many grants are
allowed to be dispatched before the next one is forced to wait for
available tokens.
At some regular frequency, a filler thread adds tokens (nanoseconds of
time to be distributed to waiting ops) to the pool. The callers which are
waiting for these tokens consume a number of tokens serially. If the pool
does not contain the requested number of tokens, then the caller is
blocked using basic synchronization primitives. When the pool is filled
any blocked callers are unblocked.
The hybrid rate limiter tracks and accumulates both the passage of system
time and the usage rate of this time as a measurement of progress. The
delta between these two reference points in time captures a very simple
and empirical value of imposed wait time.
That is, the time which was allocated but which was not used always
represents a slow down which is imposed by external factors. This
manifests as slower response when considering the target rate to be
equivalent to user load.
## Design Details
In fact, there are three pools. The _active_ pool, the _bursting_ pool,
and the
_waiting_ pool. The active pool has a limited size based on the number of
operations that are allowed to be granted concurrently.
The bursting pool is sized according to the relative burst rate and the
size of the active pool. For example, with an op rate of 1000 ops/s and a
burst rate of 1.1, the active pool can be sized to 1E9 nanos (one second
of nanos), and the burst pool can be sized to 1E8 (1/10 of that), thus
yielding a combined pool size of 1E9 + 1E8, or 1100000000 ns.
The waiting pool is where all extra tokens are held in reserve. It is
unlimited except by the size of a long value. The size of the waiting pool
is a direct measure of wait time in nanoseconds.
Within the pools, tokens (time) are neither created nor destroyed. They
are added by the filler based on the passage of time, and consumed by
callers when they become available. In between these operations, the net
sum of tokens is preserved. In short, when time deltas are observed in the
system clock, this time is accumulated into the available scheduling time
of the token pools. In this way, the token pool acts as a metered
dispenser of scheduling time to waiting (or not) consumers.
The filler thread adds tokens to the pool according to the system
real-time clock, at some estimated but unreliable interval. The frequency
of filling is set high enough to give a reliable perception of time
passing smoothly, but low enough to avoid wasting too much thread time in
calling overhead. (It is set to 1K/s by default). Each time filling
occurs, the real-time clock is check-pointed, and the time delta is fed
into the pool filling logic as explained below.
## Visual Explanation
The diagram below explains the moving parts of the hybrid rate limiter.
The arrows represent the flow of tokens (ns) as a form of scheduling
currency.
The top box shows an active token filler thread which polls the system
clock and accumulates new time into the token pool.
The bottom boxes represent concurrent readers of the token pool. These are
typically independent threads which do a blocking read for tokens once
they are ready to execute the rate-limited task.
![Hybrid Ratelimiter Schematic](hybrid_ratelimiter.png)
In the middle, the passive component in this diagram is the token pool
itself. When the token filler adds tokens, it never blocks. However, the
token filler can cause any readers of the token pool to unblock so that
they can acquire newly available tokens.
When time is added to the token pool, the following steps are taken:
1) New tokens (based on measured time elapsed since the last fill) are
added to the active pool until it is full.
2) Any extra tokens are added to the waiting pool.
3) If the waiting pool has any tokens, and there is room in the bursting
pool, some tokens are moved from the waiting pool to the bursting pool
according to how many will fit.
When a caller asks for a number of tokens, the combined total from the
active and burst pools is available to that caller. If the number of
tokens needed is not yet available, then the caller will block until
tokens are added.
## Bursting Logic
Tokens in the waiting pool represent time that has not been claimed by a
caller. Tokens accumulate in the waiting pool as a side-effect of
continuous filling outpacing continuous draining, thus creating a backlog
of operations.
The pool sizes determine both the maximum instantaneously available
operations as well as the rate at which unclaimed time can be back-filled
back into the active or burst pools.
### Normalizing for Jitter
Since it is not possible to schedule the filler thread to trigger on a
strict and reliable schedule (as in a real-time system), the method of
moving tokens from the waiting pool to the bursting pool must account for
differences in timing. Thus, tokens which are activated for bursting are
scaled according to the amount of time added in the last fill, relative to
the maximum active pool. This means that a full pool fill will allow a
full burst pool fill, presuming wait time is positive by that amount. It
also means that the same effect can be achieved by ten consecutive fills
of a tenth the time each. In effect, bursting is normalized to the passage
of time along with the burst rate, with a maximum cap imposed when
operations are unclaimed by callers.
## Mechanical Trade-offs
In this implementation, it is relatively easy to explain how accuracy and
performance trade-off. They are competing concerns. Consider these two
extremes of an isochronous configuration:
### Slow Isochronous
For example, the rate limiter could be configured for strict isochronous
behavior by setting the active pool size to *one* op of nanos and the
burst rate to 1.0, thus disabling bursting. If the op rate requested is 1
op/s, this configuration will work relatively well, although *any* caller
which doesn't show up (or isn't already waiting) when the tokens become
available will incur a waittime penalty. The odds of this are relatively
low for a high-velocity client.
### Fast Isochronous
However, if the op rate for this type of configuration is set to 1E8
operations per second, then the filler thread will be adding 100 ops worth
of time when there is only *one* op worth of active pool space. This is
due to the fact that filling can only occur at a maximal frequency which
has been set to 1K fills/s on average. That will create artificial wait
time, since the token consumers and producers would not have enough pool
space to hold the tokens needed during fill. It is not possible on most
systems to fill the pool at arbitrarily high fill frequencies. Thus, it is
important for users to understand the limits of the machinery when using
high rates. In most scenarios, these limits will not be onerous.
### Boundary Rules
Taking these effects into account, the default configuration makes some
reasonable trade-offs according to the rules below. These rules should
work well for most rates below 50M ops/s. The net effect of these rules is
to increase work bulking within the token pools as rates go higher.
Trying to go above 50M ops/s while also forcing isochronous behavior will
result in artificial wait-time. For this reason, the pool size itself is
not user-configurable at this time.
- The pool size will always be at least as big as two ops. This rule
ensures that there is adequate buffer space for tokens when callers are
accessing the token pools near the rate of the filler thread. If this
were not ensured, then artificial wait time would be injected due to
overflow error.
- The pool size will always be at least as big as 1E6 nanos, or 1/1000 of
a second. This rule ensures that the filler thread has a reasonably
attainable update frequency which will prevent underflow in the active
or burst pools.
- The number of ops that can fit in the pool will determine how many ops
can be dispatched between fills. For example, an op rate of 1E6 will
mean that up to 1000 ops worth of tokens may be present between fills,
and up to 1000 ops may be allowed to start at any time before the next
fill.
.1 ops/s : .2 seconds worth 1 ops/s : 2 seconds worth 100 ops/s : 2
seconds worth
In practical terms, this means that rates slower than 1K ops/S will have
their strictness controlled by the burst rate in general, and rates faster
than 1K ops/S will automatically include some op bulking between fills.
## History
A CAS-oriented method which compensated for RTC calling overhead was used
previously. This method afforded very high performance, but it was
difficult to reason about.
This implementation replaces that previous version. Basic synchronization
primitives (implicit locking via synchronized methods) performed
surprisingly well -- well enough to discard the complexity of the previous
implementation.
Further, this version is much easier to study and reason about.
## New Challenges
While the current implementation works well for most basic cases, high CPU
contention has shown that it can become an artificial bottleneck. Based on
observations on higher end systems with many cores running many threads
and high target rates, it appears that the rate limiter becomes a resource
blocker or forces too much thread management.
Strategies for handling this should be considered:
1) Make callers able to pseudo-randomly (or not randomly) act as a token
filler, such that active consumers can do some work stealing from the
original token filler thread.
2) Analyze the timing and history of a high-contention scenario for
weaknesses in the parameter adjustment rules above.
3) Add internal micro-batching at the consumer interface, such that
contention cost is lower in general.
4) Partition the rate limiter into multiple slices.

Binary file not shown.

After

Width:  |  Height:  |  Size: 69 KiB

File diff suppressed because it is too large Load Diff

After

Width:  |  Height:  |  Size: 143 KiB

View File

@ -9,7 +9,8 @@ can be used by the documentation system.
## MarkdownDocs Service
The primary markdown service that is meant to be consumed by the documetnation system is known simply as
The primary markdown service that is meant to be consumed by the
documentation system is known simply as
MarkdownDocs

18
devdocs/java_versions.md Normal file
View File

@ -0,0 +1,18 @@
# Java Version Updates
This is a list of all the places that the Java version is referenced in
the NoSQLBench project. If you change versions or need to do
version-specific troubleshooting, this list will help.
- In the Dockerfile, in the parent image "FROM" tag.
[here](../Dockerfile)
- In the Maven pom files, under `<source>...</source>`, `<release>...
</release>`, `<target>...</target>`
[here](../mvn-defaults/pom.xml)
- In some cases, a Maven variable '<java.target.version>...</java.target.
version>` is used.
[here](../mvn-defaults/pom.xml)
- In the nb appimage build scripts under nb/build-bin.sh.
[here](../nb/build-bin.sh)
- In the github actions workflows for the Java runtime version

View File

@ -4,7 +4,7 @@ NOTE: Here, annotations are notes that are stored in a metrics system for
review, not _Java Annotations_.
The annotations support in nosqlbench is meant to allow for automatic
annotation of important timestamps and qualifying details for a
grafanaAnnotation of important timestamps and qualifying details for a
nosqlbench scenario.
# Annotation Semantics
@ -18,38 +18,72 @@ Annotations always have at least one timestamp, and up to two
. Annotations with one timestamp mark an instant where an event
is known to have occurred.
When instrumenting an event for annotation, both positive and negative
When instrumenting an event for grafanaAnnotation, both positive and negative
outcomes must be instrumented. That is, if a user is expecting an
annotation marker for when an activity was started, they should
instead see an error annotation if there indeed was an error. The
grafanaAnnotation marker for when an activity was started, they should
instead see an error grafanaAnnotation if there indeed was an error. The
successful outcome of starting an activity is a different event
than the failure of it, but they both speak to the outcome of
trying to start an activity.
# NoSQLBench Event Taxonomy
# NoSQLBench Annotation Level
Each annotation comes from a particular level of execution with
NoSQLBench. Starting from the top, each layer is nested within
the last. The conceptual view of this would appear as:
+--------+
| op |
+------------+
| motor |
+-----------------+
| activity |
+---------------------+
| scripting |
+-------------------------+ +---------------+
| scenario | | application |
+-------------------------------------------------+
| CLI ( Command Line Interface ) |
+-------------------------------------------------+
That is, every op happens within a thread motor, every thread motor
happens within an activity, and so on.
- cli
- cli.render
- cli.execution
- cli.error
- cli.render - When the CLI renders a scenario script
- cli.execution - When the CLI executes a scenario
- cli.error - When there is an error at the CLI level
- scenario
- scenario.start
- scenario.stop
- scenario.error
- scenario.params - When a scenario is configured with parameters
- scenario.start - When a scenario is started
- scenario.stop - When a scenario is stopped
- scenario.error - When a scenario throws an error
- scripting
- extensions - When an extension service object is created
- activity
- activity.start
- activity.stop
- activity.param
- activity.error
- thread
- thread.state
- thread.error
- user
- note
- extension
- activity.params - When params are initially set or changed
- activity.start - Immediately before an activity is started
- activity.stop - When an activity is stopped
- activity.error - When an activity throws an error
- motor
- thread.state - When a motor thread changes state
- thread.error - When a motor thread throws an error
- op
-- There are no op-level events at this time
- application
-- There are no application-level events at this time
## tags
These standard tags should be added to every annotation emitted by
NoSQLBench:
**appname**: "nosqlbench"
**layer**: one of the core layers as above
**event**: The name of the event within the layer as shown above
type
: <specific event name>
layer

View File

@ -1,86 +0,0 @@
# Args Files
An argsfile (Command Line Arguments File) is a simple text file which
contains defaults for command-line arguments. You can use an args
file to contain a set of global defaults that you want to use by
default and automatically.
A command, `-argsfile <path>` is used to specify an args file. You can
use it like an instant import statement in the middle of a command
line. Notice that this option uses only a single dash. This
distinguishes the argsfile options from the others in general. These are meta
options which can modify how options are loaded, so it is important
that the look distinctive from everything else.
## Default argsfile
The default args file location is `$HOME/.nosqlbench/argsfile`. If this
file is present, it is loaded by nosqlbench when it starts even if you
don't ask it to. That is, nosqlbench behaves as if your first set of
command line arguments is `-argsfile "$HOME/.nosqlbench/argsfile
`. However, unlike when you specify `-argsfile ...` explicitly on
your command line, this form will not throw an error if the file is
missing. This means that when you explicitly ask for an args file
to be loaded, and it does not exist, an error is thrown. If you
don't ask for it, but the default one does exist, it is loaded
automatically before other options are processed.
## Args file format
An args file simply contains an argument on each line, like this:
--docker-metrics
--annotate all
--grafana-baseurl http://localhost:3000/
## Pinning options
It is possible to pin an option to the default args file by use of the
`-pin` meta-option. This option will take the following command line
argument and add it to the currently active args file. That means, if
you use `-pin --docker-metrics`, then `--docker-metrics` is added to
the args file. If there is an exact duplicate of the same option
and value, then it is skipped, but if the option name is the same
with a different value, then it is added at the end. This allows
for options which may be called multiple times normally.
If the `-pin` option occurs after an explicit use of `-argsfile
<filename>`, then the filename used in this argument is the one that
is modified.
After the `-pin` option, the following argument is taken as any global
option (--with-double-dashes) and any non-option values after it which
are not commands (reserved words)
When the `-pin` option is used, it does not cause the pinned option
to be excluded from the current command line call. The effects of the
pinned option will take place in the current nosqlbench invocation
just as they would without the `-pin`. However, when pinning global
options when there are no commands on the command line, nosqlbench
will not run a scenario, so this form is suitable for setting
arguments.
As a special case, if the `-pin` is the last option of
## Unpinning options.
To reverse the effect of pinning an option, you simply use
`-unpin ...`.
The behavior of -unpin is slightly different than -pin. Specifically,
an option which is unpinned will be removed from the arg list, and will
not be used in the current invocation of nosqlbench after removal.
Further, you can specify `-unpin --grafana-baseurl` to unpin an option which
normally has an argument, and all instances of that argument will be
removed. If you want to unpin a specific instance of a multi-valued
option, or one that can be specified more than once with different
parameter values, then you must provide the value as well, as in
`-unpin --log-histograms 'histodata.log:.*:1m'`
# Setting defaults, the simple way
To simply set global defaults, you can run nosqlbench with a command
line like this:
./nb -pin --docker-metrics-at metricsnode -pin --annotate all

View File

@ -9,7 +9,7 @@
<parent>
<artifactId>mvn-defaults</artifactId>
<groupId>io.nosqlbench</groupId>
<version>3.12.155-SNAPSHOT</version>
<version>4.15.6-SNAPSHOT</version>
<relativePath>../mvn-defaults</relativePath>
</parent>
@ -18,7 +18,7 @@
<dependency>
<groupId>io.nosqlbench</groupId>
<artifactId>nb-api</artifactId>
<version>3.12.155-SNAPSHOT</version>
<version>4.15.6-SNAPSHOT</version>
</dependency>
<dependency>
@ -98,7 +98,7 @@
<dependency>
<groupId>io.nosqlbench</groupId>
<artifactId>virtdata-api</artifactId>
<version>3.12.155-SNAPSHOT</version>
<version>4.15.6-SNAPSHOT</version>
</dependency>
</dependencies>

View File

@ -14,21 +14,6 @@ import java.util.Arrays;
public class DocServerApp {
private static final Logger logger = LogManager.getLogger(DocServerApp.class);
// static {
// // defer to an extant logger context if it is there, otherwise
// // assume a local and docserver specific logging configuration
//
// LoggerContext context = (LoggerContext) LoggerFactory.getILoggerFactory();
// if (context.getLoggerList().size() == 1 && context.getLoggerList().get(0).getName().equals("ROOT")) {
// configureDocServerLogging(context);
// logger = LoggerFactory.getLogger(DocServerApp.class);
// logger.info("Configured logging system from logback-docsys.xml");
// } else {
// logger = LoggerFactory.getLogger(DocServerApp.class);
// logger.info("Configured logging within existing logging context.");
// }
// }
public static void main(String[] args) {
if (args.length > 0 && args[0].contains("help")) {
showHelp();

View File

@ -1,39 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<configuration>
<statusListener class="ch.qos.logback.core.status.NopStatusListener"/>
<appender name="DOCSLOGS" class="ch.qos.logback.core.rolling.RollingFileAppender">
<param name="Append" value="false"/>
<append>false</append>
<file>logs/docserver.log</file>
<encoder>
<Pattern>%d{HH:mm:ss.SSS} [%thread] %-5level %logger{36} - %msg%n</Pattern>
</encoder>
<rollingPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedRollingPolicy">
<append>false</append>
<fileNamePattern>logs/archived/app.%d{yyyy-MM-dd}.%i.log.gz</fileNamePattern>
<!-- each archived file, size max 10MB -->
<maxFileSize>50MB</maxFileSize>
<!-- total size of all archive files, if total size > 20GB, it will delete old archived file -->
<totalSizeCap>250GB</totalSizeCap>
<!-- 60 days to keep -->
<maxHistory>1</maxHistory>
<cleanHistoryOnStart>true</cleanHistoryOnStart>
</rollingPolicy>
</appender>
<logger name="io.nosqlbench.docsys" level="debug" additivity="false">
<appender-ref ref="DOCSLOGS"/>
</logger>
<appender name="STDOUT" class="ch.qos.logback.core.ConsoleAppender">
<encoder>
<Pattern>%d{HH:mm:ss.SSS} [%thread] %-5level %logger{36} - %msg%n</Pattern>
</encoder>
</appender>
<root level="trace">
<appender-ref ref="STDOUT"/>
<!-- <appender-ref ref="DOCSLOGS"/>-->
</root>
</configuration>

View File

@ -4,7 +4,7 @@
<parent>
<groupId>io.nosqlbench</groupId>
<artifactId>mvn-defaults</artifactId>
<version>3.12.155-SNAPSHOT</version>
<version>4.15.6-SNAPSHOT</version>
<relativePath>../mvn-defaults</relativePath>
</parent>
@ -23,13 +23,13 @@
<dependency>
<groupId>io.nosqlbench</groupId>
<artifactId>engine-api</artifactId>
<version>3.12.155-SNAPSHOT</version>
<version>4.15.6-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>io.nosqlbench</groupId>
<artifactId>drivers-api</artifactId>
<version>3.12.155-SNAPSHOT</version>
<version>4.15.6-SNAPSHOT</version>
</dependency>
<dependency>

View File

@ -3,18 +3,18 @@ package io.nosqlbench.activitytype.cql.codecsupport;
import com.datastax.driver.core.CodecRegistry;
import com.datastax.driver.core.Session;
import com.datastax.driver.core.UserType;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.logging.log4j.Logger;
import org.apache.logging.log4j.LogManager;
import java.util.ArrayList;
import java.util.List;
import java.util.ServiceLoader;
public class UDTCodecInjector {
private final static Logger logger = LoggerFactory.getLogger(UDTCodecInjector.class);
private final static Logger logger = LogManager.getLogger(UDTCodecInjector.class);
private List<UserCodecProvider> codecProviders = new ArrayList<>();
private List<UserType> userTypes = new ArrayList<>();
private final List<UserCodecProvider> codecProviders = new ArrayList<>();
private final List<UserType> userTypes = new ArrayList<>();
public void injectUserProvidedCodecs(Session session, boolean allowAcrossKeyspaces) {

View File

@ -1,8 +1,8 @@
package io.nosqlbench.activitytype.cql.codecsupport;
import com.datastax.driver.core.*;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.logging.log4j.Logger;
import org.apache.logging.log4j.LogManager;
import java.lang.reflect.Constructor;
import java.util.*;
@ -10,7 +10,7 @@ import java.util.stream.Collectors;
public abstract class UserCodecProvider {
private final static Logger logger = LoggerFactory.getLogger(UserCodecProvider.class);
private final static Logger logger = LogManager.getLogger(UserCodecProvider.class);
public List<UDTTransformCodec> registerCodecsForCluster(
Session session,
@ -131,7 +131,7 @@ public abstract class UserCodecProvider {
.orElseThrow(
() -> new RuntimeException("Unable to find UDTJavaType annotation for " + codecClass.getCanonicalName())
);
return (Class<?>) javaType;
return javaType;
}

View File

@ -4,8 +4,8 @@ import com.datastax.driver.core.*;
import com.datastax.driver.core.policies.*;
import io.netty.util.HashedWheelTimer;
import io.nosqlbench.nb.api.errors.BasicError;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.logging.log4j.Logger;
import org.apache.logging.log4j.LogManager;
import java.net.InetSocketAddress;
import java.util.*;
@ -16,7 +16,7 @@ import java.util.regex.Pattern;
import java.util.stream.Collectors;
public class CQLOptions {
private final static Logger logger = LoggerFactory.getLogger(CQLOptions.class);
private final static Logger logger = LogManager.getLogger(CQLOptions.class);
private final static Pattern CORE_AND_MAX_RQ_PATTERN = Pattern.compile("(?<core>\\d+)(:(?<max>\\d+)(:(?<rq>\\d+))?)?(,(?<rcore>\\d+)(:(?<rmax>\\d+)(:(?<rrq>\\d+))?)?)?(,?heartbeat_interval_s:(?<heartbeatinterval>\\d+))?(,?idle_timeout_s:(?<idletimeout>\\d+))?(,?pool_timeout_ms:(?<pooltimeout>\\d+))?");
private final static Pattern PERCENTILE_EAGER_PATTERN = Pattern.compile("^p(?<pctile>[^:]+)(:(?<executions>\\d+))?(:(?<tracked>\\d+)ms)?$");

View File

@ -19,8 +19,8 @@ import io.nosqlbench.engine.api.activityapi.core.MultiPhaseAction;
import io.nosqlbench.engine.api.activityapi.core.SyncAction;
import io.nosqlbench.engine.api.activityapi.planning.OpSequence;
import io.nosqlbench.engine.api.activityimpl.ActivityDef;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.logging.log4j.Logger;
import org.apache.logging.log4j.LogManager;
import java.util.List;
import java.util.concurrent.TimeUnit;
@ -28,7 +28,7 @@ import java.util.concurrent.TimeUnit;
@SuppressWarnings("Duplicates")
public class CqlAction implements SyncAction, MultiPhaseAction, ActivityDefObserver {
private final static Logger logger = LoggerFactory.getLogger(CqlAction.class);
private final static Logger logger = LogManager.getLogger(CqlAction.class);
private final int slot;
private final CqlActivity cqlActivity;
private final ActivityDef activityDef;

View File

@ -44,8 +44,8 @@ import io.nosqlbench.engine.api.util.TagFilter;
import io.nosqlbench.engine.api.util.Unit;
import io.nosqlbench.nb.api.errors.BasicError;
import org.apache.commons.lang3.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.logging.log4j.Logger;
import org.apache.logging.log4j.LogManager;
import java.io.FileWriter;
import java.io.IOException;
@ -56,7 +56,7 @@ import java.util.*;
@SuppressWarnings("Duplicates")
public class CqlActivity extends SimpleActivity implements Activity, ActivityDefObserver {
private final static Logger logger = LoggerFactory.getLogger(CqlActivity.class);
private final static Logger logger = LogManager.getLogger(CqlActivity.class);
private final ExceptionCountMetrics exceptionCountMetrics;
private final ExceptionHistoMetrics exceptionHistoMetrics;
private final ActivityDef activityDef;

View File

@ -34,11 +34,6 @@ public class CqlActivityType implements ActivityType<CqlActivity> {
throw new RuntimeException("Currently, the cql activity type requires yaml/workload activity parameter.");
}
// allow shortcut: yaml parameter provide the default alias name
if (activityDef.getAlias().equals(ActivityDef.DEFAULT_ALIAS)) {
activityDef.getParams().set("alias",yaml.get());
}
return new CqlActivity(activityDef);
}

View File

@ -23,8 +23,8 @@ import io.nosqlbench.engine.api.activityapi.core.ops.fluent.opfacets.SucceededOp
import io.nosqlbench.engine.api.activityapi.core.ops.fluent.opfacets.TrackedOp;
import io.nosqlbench.engine.api.activityapi.planning.OpSequence;
import io.nosqlbench.engine.api.activityimpl.ActivityDef;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.logging.log4j.Logger;
import org.apache.logging.log4j.LogManager;
import java.util.List;
import java.util.concurrent.TimeUnit;
@ -33,7 +33,7 @@ import java.util.function.LongFunction;
@SuppressWarnings("Duplicates")
public class CqlAsyncAction extends BaseAsyncAction<CqlOpData, CqlActivity> {
private final static Logger logger = LoggerFactory.getLogger(CqlAsyncAction.class);
private final static Logger logger = LogManager.getLogger(CqlAsyncAction.class);
private final ActivityDef activityDef;
private List<RowCycleOperator> rowOps;

View File

@ -25,8 +25,8 @@ import io.nosqlbench.virtdata.api.annotations.ThreadSafeMapper;
import io.nosqlbench.virtdata.library.basics.shared.from_long.to_string.ModuloLineToString;
import org.apache.commons.csv.CSVParser;
import org.apache.commons.csv.CSVRecord;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.logging.log4j.Logger;
import org.apache.logging.log4j.LogManager;
import java.util.ArrayList;
import java.util.List;
@ -40,11 +40,11 @@ import java.util.function.LongFunction;
*/
@ThreadSafeMapper
public class ModuloCSVLineToUUID implements LongFunction<UUID> {
private final static Logger logger = LoggerFactory.getLogger(ModuloLineToString.class);
private final static Logger logger = LogManager.getLogger(ModuloLineToString.class);
private List<String> lines = new ArrayList<>();
private final List<String> lines = new ArrayList<>();
private String filename;
private final String filename;
@Example({"ModuloCSVLineToUUID('data/myfile.csv','lat')","load values for 'lat' from the CSV file myfile.csv."})
public ModuloCSVLineToUUID(String filename, String fieldname) {

View File

@ -71,8 +71,8 @@ public class CustomFunc955 implements LongFunction<Map<?,?>>, ConfigAware {
}
@Override
public void applyConfig(Map<String, ?> elements) {
this.cluster = Optional.ofNullable(elements.get("cluster"))
public void applyConfig(Map<String, ?> providedConfig) {
this.cluster = Optional.ofNullable(providedConfig.get("cluster"))
.map(Cluster.class::cast)
.orElseThrow();
this.tupleType = cluster.getMetadata().newTupleType(DataType.cint(), DataType.bigint());
@ -80,8 +80,8 @@ public class CustomFunc955 implements LongFunction<Map<?,?>>, ConfigAware {
@Override
public ConfigModel getConfigModel() {
return new MutableConfigModel()
.add("<cluster>", Cluster.class)
return new MutableConfigModel(this)
.optional("<cluster>", Cluster.class)
.asReadOnly();
}
}

View File

@ -3,8 +3,8 @@ package io.nosqlbench.activitytype.cql.errorhandling;
import com.datastax.driver.core.exceptions.*;
import io.nosqlbench.activitytype.cql.errorhandling.exceptions.*;
import io.nosqlbench.engine.api.activityapi.cyclelog.buffers.results.ResultReadable;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.logging.log4j.Logger;
import org.apache.logging.log4j.LogManager;
import java.util.ArrayList;
import java.util.HashMap;
@ -64,10 +64,10 @@ public enum CQLExceptionEnum implements ResultReadable {
EbdseCycleException(CqlGenericCycleException.class, 42),
MaxTriesExhaustedException(io.nosqlbench.activitytype.cql.errorhandling.exceptions.MaxTriesExhaustedException.class,43);
private final static Logger logger = LoggerFactory.getLogger(CQLExceptionEnum.class);
private final static Logger logger = LogManager.getLogger(CQLExceptionEnum.class);
private static Map<String, Integer> codesByName = getCodesByName();
private static String[] namesByCode = getNamesByCode();
private static final String[] namesByCode = getNamesByCode();
private final Class<? extends Exception> exceptionClass;
private final int resultCode;

View File

@ -8,18 +8,18 @@ import io.nosqlbench.activitytype.cql.errorhandling.exceptions.RowVerificationEx
import io.nosqlbench.engine.api.activityapi.errorhandling.CycleErrorHandler;
import io.nosqlbench.engine.api.activityapi.errorhandling.HashedErrorHandler;
import io.nosqlbench.engine.api.metrics.ExceptionCountMetrics;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.logging.log4j.Logger;
import org.apache.logging.log4j.LogManager;
public class HashedCQLErrorHandler extends HashedErrorHandler<Throwable, ErrorStatus> {
private static final Logger logger = LoggerFactory.getLogger(HashedCQLErrorHandler.class);
private static final Logger logger = LogManager.getLogger(HashedCQLErrorHandler.class);
// private static Set<Class<? extends Throwable>> UNVERIFIED_ERRORS = new HashSet<Class<? extends Throwable>>() {{
// add(RowVerificationException.class);
// add(ResultSetVerificationException.class);
// }};
private ExceptionCountMetrics exceptionCountMetrics;
private static ThreadLocal<Integer> tlResultCode = ThreadLocal.withInitial(() -> (0));
private final ExceptionCountMetrics exceptionCountMetrics;
private static final ThreadLocal<Integer> tlResultCode = ThreadLocal.withInitial(() -> (0));
public HashedCQLErrorHandler(ExceptionCountMetrics exceptionCountMetrics) {
this.exceptionCountMetrics = exceptionCountMetrics;

View File

@ -6,8 +6,8 @@ import io.nosqlbench.activitytype.cql.errorhandling.exceptions.CQLExceptionDetai
import io.nosqlbench.engine.api.activityapi.errorhandling.CycleErrorHandler;
import io.nosqlbench.engine.api.metrics.ExceptionCountMetrics;
import io.nosqlbench.engine.api.metrics.ExceptionHistoMetrics;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.logging.log4j.Logger;
import org.apache.logging.log4j.LogManager;
/**
* A contextualized error handler that can catch a cycle-specific error.
@ -42,10 +42,10 @@ import org.slf4j.LoggerFactory;
@SuppressWarnings("Duplicates")
public class NBCycleErrorHandler implements CycleErrorHandler<Throwable, ErrorStatus> {
private static final Logger logger = LoggerFactory.getLogger(NBCycleErrorHandler.class);
private static final Logger logger = LogManager.getLogger(NBCycleErrorHandler.class);
private ErrorResponse errorResponse;
private ExceptionCountMetrics exceptionCountMetrics;
private final ErrorResponse errorResponse;
private final ExceptionCountMetrics exceptionCountMetrics;
private final ExceptionHistoMetrics exceptionHistoMetrics;
private boolean throwExceptionOnStop=false;

View File

@ -3,8 +3,8 @@ package io.nosqlbench.activitytype.cql.statements.binders;
import com.datastax.driver.core.*;
import io.nosqlbench.activitytype.cql.core.CQLBindHelper;
import io.nosqlbench.virtdata.core.bindings.ValuesArrayBinder;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.logging.log4j.Logger;
import org.apache.logging.log4j.LogManager;
import java.util.List;
@ -15,7 +15,7 @@ import java.util.List;
* order to explain in more detail what is happening for users.
*/
public class DiagnosticPreparedBinder implements ValuesArrayBinder<PreparedStatement, Statement> {
public static final Logger logger = LoggerFactory.getLogger(DiagnosticPreparedBinder.class);
public static final Logger logger = LogManager.getLogger(DiagnosticPreparedBinder.class);
@Override
public Statement bindValues(PreparedStatement prepared, Object[] values) {
ColumnDefinitions columnDefinitions = prepared.getVariables();

View File

@ -3,8 +3,8 @@ package io.nosqlbench.activitytype.cql.statements.binders;
import com.datastax.driver.core.PreparedStatement;
import com.datastax.driver.core.Statement;
import io.nosqlbench.virtdata.core.bindings.ValuesArrayBinder;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.logging.log4j.Logger;
import org.apache.logging.log4j.LogManager;
import java.util.Arrays;
@ -19,7 +19,7 @@ import java.util.Arrays;
* will become the default.
*/
public class DirectArrayValuesBinder implements ValuesArrayBinder<PreparedStatement, Statement> {
public final static Logger logger = LoggerFactory.getLogger(DirectArrayValuesBinder.class);
public final static Logger logger = LogManager.getLogger(DirectArrayValuesBinder.class);
@Override
public Statement bindValues(PreparedStatement preparedStatement, Object[] objects) {

View File

@ -3,14 +3,14 @@ package io.nosqlbench.activitytype.cql.statements.binders;
import com.datastax.driver.core.*;
import io.nosqlbench.virtdata.api.bindings.VALUE;
import io.nosqlbench.virtdata.core.bindings.ValuesArrayBinder;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.logging.log4j.Logger;
import org.apache.logging.log4j.LogManager;
import java.nio.ByteBuffer;
import java.util.List;
public class UnsettableValuesBinder implements ValuesArrayBinder<PreparedStatement, Statement> {
private final static Logger logger = LoggerFactory.getLogger(UnsettableValuesBinder.class);
private final static Logger logger = LogManager.getLogger(UnsettableValuesBinder.class);
private final Session session;
private final CodecRegistry codecRegistry;

View File

@ -11,11 +11,12 @@ import io.nosqlbench.activitytype.cql.core.ProxyTranslator;
import io.nosqlbench.engine.api.activityapi.core.Shutdownable;
import io.nosqlbench.engine.api.activityimpl.ActivityDef;
import io.nosqlbench.engine.api.metrics.ActivityMetrics;
import io.nosqlbench.engine.api.scripting.NashornEvaluator;
import io.nosqlbench.engine.api.scripting.ExprEvaluator;
import io.nosqlbench.engine.api.scripting.GraalJsEvaluator;
import io.nosqlbench.engine.api.util.SSLKsFactory;
import io.nosqlbench.nb.api.errors.BasicError;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import javax.net.ssl.SSLContext;
import java.io.File;
@ -28,7 +29,7 @@ import java.util.*;
public class CQLSessionCache implements Shutdownable {
private final static Logger logger = LoggerFactory.getLogger(CQLSessionCache.class);
private final static Logger logger = LogManager.getLogger(CQLSessionCache.class);
private final static String DEFAULT_SESSION_ID = "default";
private static final CQLSessionCache instance = new CQLSessionCache();
private final Map<String, Session> sessionCache = new HashMap<>();
@ -129,7 +130,7 @@ public class CQLSessionCache implements Shutdownable {
if (clusteropts.isPresent()) {
try {
logger.info("applying cbopts:" + clusteropts.get());
NashornEvaluator<DseCluster.Builder> clusterEval = new NashornEvaluator<>(DseCluster.Builder.class);
ExprEvaluator<DseCluster.Builder> clusterEval = new GraalJsEvaluator<>(DseCluster.Builder.class);
clusterEval.put("builder", builder);
String importEnv =
"load(\"nashorn:mozilla_compat.js\");\n" +

View File

@ -1,14 +1,14 @@
package io.nosqlbench.activitytype.cql.statements.core;
import com.datastax.driver.core.ConsistencyLevel;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.logging.log4j.Logger;
import org.apache.logging.log4j.LogManager;
import java.util.*;
import java.util.stream.Collectors;
public class CQLStatementDef {
private final static Logger logger = LoggerFactory.getLogger(CQLStatementDef.class);
private final static Logger logger = LogManager.getLogger(CQLStatementDef.class);
private Map<String,String> params = new HashMap<>();
private String name = "";

View File

@ -1,7 +1,7 @@
package io.nosqlbench.activitytype.cql.statements.core;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.logging.log4j.Logger;
import org.apache.logging.log4j.LogManager;
import java.util.*;
import java.util.regex.Matcher;
@ -9,7 +9,7 @@ import java.util.regex.Pattern;
import java.util.stream.Collectors;
public class CQLStatementDefParser {
private final static Logger logger = LoggerFactory.getLogger(CQLStatementDefParser.class);
private final static Logger logger = LogManager.getLogger(CQLStatementDefParser.class);
// private final static Pattern templateToken = Pattern.compile("<<(\\w+(:(.+?))?)>>");
private final static Pattern stmtToken = Pattern.compile("\\?(\\w+[-_\\d\\w]*)|\\{(\\w+[-_\\d\\w.]*)}");
private final static String UNSET_VALUE = "UNSET-VALUE";
@ -93,9 +93,7 @@ public class CQLStatementDefParser {
if (!namedBindings.contains(tokenName)) {
missingBindings.add(tokenName);
} else {
if (missingAnchors.contains(tokenName)) {
missingAnchors.remove(tokenName);
}
missingAnchors.remove(tokenName);
}
}
@ -114,11 +112,11 @@ public class CQLStatementDefParser {
}
public static class ParseResult {
private Set<String> missingGenerators;
private Set<String> missingAnchors;
private String statement;
private final Set<String> missingGenerators;
private final Set<String> missingAnchors;
private final String statement;
private Map<String,String> bindings;
private String name;
private final String name;
public ParseResult(String stmt, String name, Map<String,String> bindings, Set<String> missingGenerators, Set<String> missingAnchors) {
this.missingGenerators = missingGenerators;

View File

@ -15,15 +15,15 @@ import io.nosqlbench.engine.api.metrics.ActivityMetrics;
import io.nosqlbench.virtdata.core.bindings.BindingsTemplate;
import io.nosqlbench.virtdata.core.bindings.ContextualBindingsArrayTemplate;
import io.nosqlbench.virtdata.core.bindings.ValuesArrayBinder;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.logging.log4j.Logger;
import org.apache.logging.log4j.LogManager;
import java.io.Writer;
import java.util.Map;
public class ReadyCQLStatementTemplate {
private final static Logger logger = LoggerFactory.getLogger(ReadyCQLStatementTemplate.class);
private final static Logger logger = LogManager.getLogger(ReadyCQLStatementTemplate.class);
private final Session session;
private final ContextualBindingsArrayTemplate<?, Statement> template;
private final long ratio;

View File

@ -3,25 +3,21 @@ package io.nosqlbench.activitytype.cql.statements.core;
import io.nosqlbench.engine.api.activityimpl.ActivityInitializationError;
import io.nosqlbench.nb.api.content.Content;
import io.nosqlbench.nb.api.content.NBIO;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.yaml.snakeyaml.TypeDescription;
import org.yaml.snakeyaml.Yaml;
import org.yaml.snakeyaml.constructor.Constructor;
import java.io.BufferedReader;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.function.Function;
import java.util.stream.Collectors;
@SuppressWarnings("ALL")
public class YamlCQLStatementLoader {
private final static Logger logger = LoggerFactory.getLogger(YamlCQLStatementLoader.class);
private final static Logger logger = LogManager.getLogger(YamlCQLStatementLoader.class);
List<Function<String, String>> transformers = new ArrayList<>();
public YamlCQLStatementLoader() {

View File

@ -4,8 +4,8 @@ import com.datastax.driver.core.ColumnDefinitions;
import com.datastax.driver.core.Row;
import io.nosqlbench.activitytype.cql.api.RowCycleOperator;
import io.nosqlbench.virtdata.library.basics.core.threadstate.SharedState;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.logging.log4j.Logger;
import org.apache.logging.log4j.LogManager;
import java.util.Arrays;
import java.util.HashMap;
@ -16,7 +16,7 @@ import java.util.stream.Collectors;
* Save specific variables to the thread local object map
*/
public class Save implements RowCycleOperator {
private final static Logger logger = LoggerFactory.getLogger(Save.class);
private final static Logger logger = LogManager.getLogger(Save.class);
ThreadLocal<HashMap<String, Object>> tl_objectMap = SharedState.tl_ObjectMap;

View File

@ -2,15 +2,15 @@ package io.nosqlbench.activitytype.cql.statements.rsoperators;
import com.datastax.driver.core.*;
import io.nosqlbench.activitytype.cql.api.ResultSetCycleOperator;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.logging.log4j.Logger;
import org.apache.logging.log4j.LogManager;
/**
* Logs a trace-level event for the result set, including
* cycles, rows, fetched row count, and the statement.
*/
public class CqlResultSetLogger implements ResultSetCycleOperator {
private final static Logger logger = LoggerFactory.getLogger(CqlResultSetLogger.class);
private final static Logger logger = LogManager.getLogger(CqlResultSetLogger.class);
private static String getQueryString(Statement stmt) {
if (stmt instanceof PreparedStatement) {

View File

@ -7,8 +7,8 @@ import com.datastax.driver.core.Statement;
import io.nosqlbench.activitytype.cql.api.ResultSetCycleOperator;
import io.nosqlbench.activitytype.cql.statements.modifiers.StatementModifier;
import io.nosqlbench.engine.api.util.SimpleConfig;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.logging.log4j.Logger;
import org.apache.logging.log4j.LogManager;
import java.io.FileDescriptor;
import java.io.FileWriter;
@ -18,9 +18,9 @@ import java.util.Date;
public class TraceLogger implements ResultSetCycleOperator, StatementModifier {
private final static Logger logger = LoggerFactory.getLogger(TraceLogger.class);
private final static Logger logger = LogManager.getLogger(TraceLogger.class);
private static SimpleDateFormat sdf = new SimpleDateFormat("HH:mm:ss.SSS");
private static final SimpleDateFormat sdf = new SimpleDateFormat("HH:mm:ss.SSS");
private final long modulo;
private final String filename;
private final FileWriter writer;

View File

@ -4,7 +4,7 @@
<parent>
<groupId>io.nosqlbench</groupId>
<artifactId>mvn-defaults</artifactId>
<version>3.12.141-SNAPSHOT</version>
<version>3.12.157-SNAPSHOT</version>
<relativePath>../mvn-defaults</relativePath>
</parent>
@ -23,13 +23,13 @@
<dependency>
<groupId>io.nosqlbench</groupId>
<artifactId>engine-api</artifactId>
<version>3.12.141-SNAPSHOT</version>
<version>3.12.157-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>io.nosqlbench</groupId>
<artifactId>drivers-api</artifactId>
<version>3.12.146-SNAPSHOT</version>
<version>3.12.157-SNAPSHOT</version>
</dependency>
@ -45,25 +45,6 @@
<version>4.7.2</version>
</dependency>
<!-- <dependency>-->
<!-- <groupId>com.datastax.dse</groupId>-->
<!-- <artifactId>dse-java-driver-core</artifactId>-->
<!-- <version>2.4.0</version>-->
<!-- </dependency>-->
<!-- <dependency>-->
<!-- <groupId>com.datastax.dse</groupId>-->
<!-- <artifactId>dse-java-driver-core</artifactId>-->
<!-- </dependency>-->
<!-- <dependency>-->
<!-- <groupId>com.datastax.dse</groupId>-->
<!-- <artifactId>dse-java-driver-extras</artifactId>-->
<!-- </dependency>-->
<!-- <dependency>-->
<!-- <groupId>com.datastax.dse</groupId>-->
<!-- <artifactId>dse-java-driver-mapping</artifactId>-->
<!-- </dependency>-->
<!-- For CQL compression option -->
<dependency>
@ -77,6 +58,13 @@
<artifactId>snappy-java</artifactId>
</dependency>
<dependency>
<groupId>io.nosqlbench</groupId>
<artifactId>virtdata-lib-basics</artifactId>
<version>3.12.157-SNAPSHOT</version>
<scope>compile</scope>
</dependency>
</dependencies>
</project>

View File

@ -2,17 +2,17 @@ package io.nosqlbench.activitytype.cqld4.codecsupport;
import com.datastax.oss.driver.api.core.session.Session;
import com.datastax.oss.driver.api.core.type.codec.registry.CodecRegistry;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.logging.log4j.Logger;
import org.apache.logging.log4j.LogManager;
import java.util.ArrayList;
import java.util.List;
import java.util.ServiceLoader;
public class UDTCodecInjector {
private final static Logger logger = LoggerFactory.getLogger(UDTCodecInjector.class);
private final static Logger logger = LogManager.getLogger(UDTCodecInjector.class);
private List<UserCodecProvider> codecProviders = new ArrayList<>();
private final List<UserCodecProvider> codecProviders = new ArrayList<>();
public void injectUserProvidedCodecs(Session session, boolean allowAcrossKeyspaces) {

View File

@ -6,8 +6,8 @@ import com.datastax.oss.driver.api.core.type.UserDefinedType;
import com.datastax.oss.driver.api.core.type.codec.TypeCodec;
import com.datastax.oss.driver.api.core.type.codec.registry.CodecRegistry;
import com.datastax.oss.driver.api.core.type.codec.registry.MutableCodecRegistry;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.logging.log4j.Logger;
import org.apache.logging.log4j.LogManager;
import java.lang.reflect.Constructor;
import java.util.*;
@ -15,7 +15,7 @@ import java.util.stream.Collectors;
public abstract class UserCodecProvider {
private final static Logger logger = LoggerFactory.getLogger(UserCodecProvider.class);
private final static Logger logger = LogManager.getLogger(UserCodecProvider.class);
public List<UDTTransformCodec<?>> registerCodecsForCluster(
Session session,
@ -139,7 +139,7 @@ public abstract class UserCodecProvider {
.orElseThrow(
() -> new RuntimeException("Unable to find UDTJavaType annotation for " + codecClass.getCanonicalName())
);
return (Class<?>) javaType;
return javaType;
}

View File

@ -1,7 +1,7 @@
package io.nosqlbench.activitytype.cqld4.core;
public class CQLOptions {
// private final static Logger logger = LoggerFactory.getLogger(CQLOptions.class);
// private final static Logger logger = LogManager.getLogger(CQLOptions.class);
//
// private final static Pattern CORE_AND_MAX_RQ_PATTERN = Pattern.compile("(?<core>\\d+)(:(?<max>\\d+)(:(?<rq>\\d+))?)?(,(?<rcore>\\d+)(:(?<rmax>\\d+)(:(?<rrq>\\d+))?)?)?(,?heartbeat_interval_s:(?<heartbeatinterval>\\d+))?(,?idle_timeout_s:(?<idletimeout>\\d+))?(,?pool_timeout_ms:(?<pooltimeout>\\d+))?");
// private final static Pattern PERCENTILE_EAGER_PATTERN = Pattern.compile("^p(?<pctile>[^:]+)(:(?<executions>\\d+))?(:(?<tracked>\\d+)ms)?$");

View File

@ -17,8 +17,8 @@ import io.nosqlbench.engine.api.activityapi.core.MultiPhaseAction;
import io.nosqlbench.engine.api.activityapi.core.SyncAction;
import io.nosqlbench.engine.api.activityapi.planning.OpSequence;
import io.nosqlbench.engine.api.activityimpl.ActivityDef;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.logging.log4j.Logger;
import org.apache.logging.log4j.LogManager;
import java.util.List;
import java.util.concurrent.CompletionStage;
@ -27,7 +27,7 @@ import java.util.concurrent.TimeUnit;
@SuppressWarnings("Duplicates")
public class CqlAction implements SyncAction, MultiPhaseAction, ActivityDefObserver {
private final static Logger logger = LoggerFactory.getLogger(CqlAction.class);
private final static Logger logger = LogManager.getLogger(CqlAction.class);
private final int slot;
private final CqlActivity cqlActivity;
private final ActivityDef activityDef;

View File

@ -45,8 +45,8 @@ import io.nosqlbench.engine.api.util.SimpleConfig;
import io.nosqlbench.engine.api.templating.StrInterpolator;
import io.nosqlbench.engine.api.util.TagFilter;
import io.nosqlbench.engine.api.util.Unit;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.logging.log4j.Logger;
import org.apache.logging.log4j.LogManager;
import java.io.FileWriter;
import java.io.IOException;
@ -57,7 +57,7 @@ import java.util.*;
@SuppressWarnings("Duplicates")
public class CqlActivity extends SimpleActivity implements Activity, ActivityDefObserver {
private final static Logger logger = LoggerFactory.getLogger(CqlActivity.class);
private final static Logger logger = LogManager.getLogger(CqlActivity.class);
private final ExceptionCountMetrics exceptionCountMetrics;
private final ExceptionHistoMetrics exceptionHistoMetrics;
private final ActivityDef activityDef;

View File

@ -3,7 +3,6 @@ package io.nosqlbench.activitytype.cqld4.core;
import com.datastax.oss.driver.api.core.data.TupleValue;
import com.datastax.oss.driver.api.core.type.UserDefinedType;
import io.nosqlbench.activitytype.cqld4.codecsupport.UDTJavaType;
import io.nosqlbench.engine.api.activityapi.core.ActionDispenser;
import io.nosqlbench.engine.api.activityapi.core.ActivityType;
import io.nosqlbench.engine.api.activityimpl.ActivityDef;
@ -35,11 +34,6 @@ public class CqlActivityType implements ActivityType<CqlActivity> {
throw new RuntimeException("Currently, the cql activity type requires yaml/workload activity parameter.");
}
// allow shortcut: yaml parameter provide the default alias name
if (activityDef.getAlias().equals(ActivityDef.DEFAULT_ALIAS)) {
activityDef.getParams().set("alias",yaml.get());
}
return new CqlActivity(activityDef);
}

View File

@ -22,8 +22,8 @@ import io.nosqlbench.engine.api.activityapi.core.ops.fluent.opfacets.SucceededOp
import io.nosqlbench.engine.api.activityapi.core.ops.fluent.opfacets.TrackedOp;
import io.nosqlbench.engine.api.activityapi.planning.OpSequence;
import io.nosqlbench.engine.api.activityimpl.ActivityDef;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.logging.log4j.Logger;
import org.apache.logging.log4j.LogManager;
import java.util.List;
import java.util.concurrent.CompletionStage;
@ -33,7 +33,7 @@ import java.util.function.LongFunction;
@SuppressWarnings("Duplicates")
public class CqlAsyncAction extends BaseAsyncAction<CqlOpData, CqlActivity> {
private final static Logger logger = LoggerFactory.getLogger(CqlAsyncAction.class);
private final static Logger logger = LogManager.getLogger(CqlAsyncAction.class);
private final ActivityDef activityDef;
private List<RowCycleOperator> rowOps;
@ -79,7 +79,7 @@ public class CqlAsyncAction extends BaseAsyncAction<CqlOpData, CqlActivity> {
@Override
public void startOpCycle(TrackedOp<CqlOpData> opc) {
CqlOpData cqlop = opc.getData();
CqlOpData cqlop = opc.getOpData();
long cycle = opc.getCycle();
// bind timer covers all statement selection and binding, skipping, transforming logic
@ -123,7 +123,7 @@ public class CqlAsyncAction extends BaseAsyncAction<CqlOpData, CqlActivity> {
public void onSuccess(StartedOp<CqlOpData> sop, AsyncResultSet resultSet) {
CqlOpData cqlop = sop.getData();
CqlOpData cqlop = sop.getOpData();
HashedCQLErrorHandler.resetThreadStatusCode();
if (cqlop.skipped) {
@ -218,7 +218,7 @@ public class CqlAsyncAction extends BaseAsyncAction<CqlOpData, CqlActivity> {
public void onFailure(StartedOp<CqlOpData> startedOp) {
CqlOpData cqlop = startedOp.getData();
CqlOpData cqlop = startedOp.getOpData();
long serviceTime = startedOp.getCurrentServiceTimeNanos();
// Even if this is retryable, we expose error events

View File

@ -1,18 +1,12 @@
package io.nosqlbench.activitytype.cqld4.errorhandling;
import com.datastax.dse.driver.api.core.servererrors.UnfitClientException;
import com.datastax.oss.driver.api.core.*;
import com.datastax.oss.driver.api.core.RequestThrottlingException;
import com.datastax.oss.driver.api.core.connection.ClosedConnectionException;
import com.datastax.oss.driver.api.core.connection.ConnectionInitException;
import com.datastax.oss.driver.api.core.connection.FrameTooLongException;
import com.datastax.oss.driver.api.core.connection.HeartbeatException;
import com.datastax.oss.driver.api.core.servererrors.*;
import com.datastax.oss.driver.internal.core.channel.ClusterNameMismatchException;
import com.datastax.oss.driver.shaded.guava.common.collect.ComputationException;
import io.nosqlbench.activitytype.cqld4.errorhandling.exceptions.CqlGenericCycleException;
import io.nosqlbench.engine.api.activityapi.cyclelog.buffers.results.ResultReadable;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import java.util.ArrayList;
import java.util.HashMap;
@ -92,10 +86,10 @@ public enum CQLExceptionEnum implements ResultReadable {
RequestThrottlingException(RequestThrottlingException.class,57),
CqlGenericCycleException(CqlGenericCycleException.class,58);
private final static Logger logger = LoggerFactory.getLogger(CQLExceptionEnum.class);
private final static Logger logger = LogManager.getLogger(CQLExceptionEnum.class);
private static Map<String, Integer> codesByName = getCodesByName();
private static String[] namesByCode = getNamesByCode();
private static final String[] namesByCode = getNamesByCode();
private final Class<? extends Exception> exceptionClass;
private final int resultCode;

View File

@ -16,18 +16,18 @@ import io.nosqlbench.activitytype.cqld4.errorhandling.exceptions.RowVerification
import io.nosqlbench.engine.api.activityapi.errorhandling.CycleErrorHandler;
import io.nosqlbench.engine.api.activityapi.errorhandling.HashedErrorHandler;
import io.nosqlbench.engine.api.metrics.ExceptionCountMetrics;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.logging.log4j.Logger;
import org.apache.logging.log4j.LogManager;
public class HashedCQLErrorHandler extends HashedErrorHandler<Throwable, ErrorStatus> {
private static final Logger logger = LoggerFactory.getLogger(HashedCQLErrorHandler.class);
private static final Logger logger = LogManager.getLogger(HashedCQLErrorHandler.class);
// private static Set<Class<? extends Throwable>> UNVERIFIED_ERRORS = new HashSet<Class<? extends Throwable>>() {{
// add(RowVerificationException.class);
// add(ResultSetVerificationException.class);
// }};
private ExceptionCountMetrics exceptionCountMetrics;
private static ThreadLocal<Integer> tlResultCode = ThreadLocal.withInitial(() -> (0));
private final ExceptionCountMetrics exceptionCountMetrics;
private static final ThreadLocal<Integer> tlResultCode = ThreadLocal.withInitial(() -> (0));
public HashedCQLErrorHandler(ExceptionCountMetrics exceptionCountMetrics) {
this.exceptionCountMetrics = exceptionCountMetrics;

View File

@ -5,8 +5,8 @@ import io.nosqlbench.activitytype.cqld4.errorhandling.exceptions.CQLCycleWithSta
import io.nosqlbench.engine.api.activityapi.errorhandling.CycleErrorHandler;
import io.nosqlbench.engine.api.metrics.ExceptionCountMetrics;
import io.nosqlbench.engine.api.metrics.ExceptionHistoMetrics;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.logging.log4j.Logger;
import org.apache.logging.log4j.LogManager;
/**
* A contextualized error handler that can catch a cycle-specific error.
@ -41,7 +41,7 @@ import org.slf4j.LoggerFactory;
@SuppressWarnings("Duplicates")
public class NBCycleErrorHandler implements CycleErrorHandler<Throwable, ErrorStatus> {
private static final Logger logger = LoggerFactory.getLogger(NBCycleErrorHandler.class);
private static final Logger logger = LogManager.getLogger(NBCycleErrorHandler.class);
private final ErrorResponse errorResponse;
private final ExceptionCountMetrics exceptionCountMetrics;

View File

@ -5,8 +5,8 @@ import com.datastax.oss.driver.api.core.cql.*;
import com.datastax.oss.driver.api.core.type.DataType;
import io.nosqlbench.activitytype.cqld4.core.CQLBindHelper;
import io.nosqlbench.virtdata.core.bindings.ValuesArrayBinder;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.logging.log4j.Logger;
import org.apache.logging.log4j.LogManager;
import java.util.ArrayList;
import java.util.List;
@ -19,7 +19,7 @@ import java.util.List;
*/
public class DiagnosticPreparedBinder implements ValuesArrayBinder<PreparedStatement, Statement<?>> {
public static final Logger logger = LoggerFactory.getLogger(DiagnosticPreparedBinder.class);
public static final Logger logger = LogManager.getLogger(DiagnosticPreparedBinder.class);
private final CqlSession session;
public DiagnosticPreparedBinder(CqlSession session) {

View File

@ -4,8 +4,8 @@ import com.datastax.oss.driver.api.core.CqlSession;
import com.datastax.oss.driver.api.core.cql.PreparedStatement;
import com.datastax.oss.driver.api.core.cql.Statement;
import io.nosqlbench.virtdata.core.bindings.ValuesArrayBinder;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.logging.log4j.Logger;
import org.apache.logging.log4j.LogManager;
import java.util.Arrays;
@ -20,7 +20,7 @@ import java.util.Arrays;
* will become the default.
*/
public class DirectArrayValuesBinder implements ValuesArrayBinder<PreparedStatement, Statement<?>> {
public final static Logger logger = LoggerFactory.getLogger(DirectArrayValuesBinder.class);
public final static Logger logger = LogManager.getLogger(DirectArrayValuesBinder.class);
private final CqlSession session;
public DirectArrayValuesBinder(CqlSession session) {

View File

@ -1,6 +1,5 @@
package io.nosqlbench.activitytype.cqld4.statements.binders;
import com.datastax.driver.core.*;
import com.datastax.oss.driver.api.core.ProtocolVersion;
import com.datastax.oss.driver.api.core.cql.*;
import com.datastax.oss.driver.api.core.session.Session;
@ -9,16 +8,15 @@ import com.datastax.oss.driver.api.core.type.codec.TypeCodec;
import com.datastax.oss.driver.api.core.type.codec.registry.CodecRegistry;
import io.nosqlbench.virtdata.api.bindings.VALUE;
import io.nosqlbench.virtdata.core.bindings.ValuesArrayBinder;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
public class UnsettableValuesBinder implements ValuesArrayBinder<PreparedStatement, Statement<?>> {
private final static Logger logger = LoggerFactory.getLogger(UnsettableValuesBinder.class);
private final static Logger logger = LogManager.getLogger(UnsettableValuesBinder.class);
private final Session session;
private final CodecRegistry codecRegistry;

View File

@ -19,12 +19,12 @@ import io.nosqlbench.activitytype.cqld4.core.ProxyTranslator;
import io.nosqlbench.engine.api.activityapi.core.Shutdownable;
import io.nosqlbench.engine.api.activityimpl.ActivityDef;
import io.nosqlbench.engine.api.metrics.ActivityMetrics;
import io.nosqlbench.engine.api.scripting.NashornEvaluator;
import io.nosqlbench.engine.api.scripting.GraalJsEvaluator;
import io.nosqlbench.engine.api.util.SSLKsFactory;
import org.apache.tinkerpop.gremlin.driver.Cluster;
import org.graalvm.options.OptionMap;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.logging.log4j.Logger;
import org.apache.logging.log4j.LogManager;
import java.io.File;
import java.io.IOException;
@ -37,10 +37,10 @@ import java.util.concurrent.ConcurrentHashMap;
public class CQLSessionCache implements Shutdownable {
private final static Logger logger = LoggerFactory.getLogger(CQLSessionCache.class);
private final static Logger logger = LogManager.getLogger(CQLSessionCache.class);
private final static String DEFAULT_SESSION_ID = "default";
private static CQLSessionCache instance = new CQLSessionCache();
private Map<String, SessionConfig> sessionCache = new HashMap<>();
private static final CQLSessionCache instance = new CQLSessionCache();
private final Map<String, SessionConfig> sessionCache = new HashMap<>();
public final static class SessionConfig extends ConcurrentHashMap<String,String> {
@ -170,7 +170,7 @@ public class CQLSessionCache implements Shutdownable {
if (clusteropts.isPresent()) {
try {
logger.info("applying cbopts:" + clusteropts.get());
NashornEvaluator<CqlSessionBuilder> clusterEval = new NashornEvaluator<>(CqlSessionBuilder.class);
GraalJsEvaluator<CqlSessionBuilder> clusterEval = new GraalJsEvaluator<>(CqlSessionBuilder.class);
clusterEval.put("builder", builder);
String importEnv =
"load(\"nashorn:mozilla_compat.js\");\n" +

View File

@ -1,14 +1,14 @@
package io.nosqlbench.activitytype.cqld4.statements.core;
import com.datastax.oss.driver.api.core.ConsistencyLevel;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.logging.log4j.Logger;
import org.apache.logging.log4j.LogManager;
import java.util.*;
import java.util.stream.Collectors;
public class CQLStatementDef {
private final static Logger logger = LoggerFactory.getLogger(CQLStatementDef.class);
private final static Logger logger = LogManager.getLogger(CQLStatementDef.class);
private Map<String,String> params = new HashMap<>();
private String name = "";

View File

@ -1,7 +1,7 @@
package io.nosqlbench.activitytype.cqld4.statements.core;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.logging.log4j.Logger;
import org.apache.logging.log4j.LogManager;
import java.util.*;
import java.util.regex.Matcher;
@ -9,7 +9,7 @@ import java.util.regex.Pattern;
import java.util.stream.Collectors;
public class CQLStatementDefParser {
private final static Logger logger = LoggerFactory.getLogger(CQLStatementDefParser.class);
private final static Logger logger = LogManager.getLogger(CQLStatementDefParser.class);
// private final static Pattern templateToken = Pattern.compile("<<(\\w+(:(.+?))?)>>");
private final static Pattern stmtToken = Pattern.compile("\\?(\\w+[-_\\d\\w]*)|\\{(\\w+[-_\\d\\w.]*)}");
private final static String UNSET_VALUE = "UNSET-VALUE";
@ -93,9 +93,7 @@ public class CQLStatementDefParser {
if (!namedBindings.contains(tokenName)) {
missingBindings.add(tokenName);
} else {
if (missingAnchors.contains(tokenName)) {
missingAnchors.remove(tokenName);
}
missingAnchors.remove(tokenName);
}
}
@ -114,11 +112,11 @@ public class CQLStatementDefParser {
}
public static class ParseResult {
private Set<String> missingGenerators;
private Set<String> missingAnchors;
private String statement;
private final Set<String> missingGenerators;
private final Set<String> missingAnchors;
private final String statement;
private Map<String,String> bindings;
private String name;
private final String name;
public ParseResult(String stmt, String name, Map<String,String> bindings, Set<String> missingGenerators, Set<String> missingAnchors) {
this.missingGenerators = missingGenerators;

View File

@ -16,15 +16,15 @@ import io.nosqlbench.engine.api.metrics.ActivityMetrics;
import io.nosqlbench.virtdata.core.bindings.BindingsTemplate;
import io.nosqlbench.virtdata.core.bindings.ContextualBindingsArrayTemplate;
import io.nosqlbench.virtdata.core.bindings.ValuesArrayBinder;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.logging.log4j.Logger;
import org.apache.logging.log4j.LogManager;
import java.io.Writer;
import java.util.Map;
public class ReadyCQLStatementTemplate {
private final static Logger logger = LoggerFactory.getLogger(ReadyCQLStatementTemplate.class);
private final static Logger logger = LogManager.getLogger(ReadyCQLStatementTemplate.class);
private final Session session;
private final ContextualBindingsArrayTemplate<?, Statement<?>> template;
private final long ratio;

View File

@ -3,8 +3,8 @@ package io.nosqlbench.activitytype.cqld4.statements.core;
import io.nosqlbench.engine.api.activityimpl.ActivityInitializationError;
import io.nosqlbench.nb.api.content.Content;
import io.nosqlbench.nb.api.content.NBIO;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.logging.log4j.Logger;
import org.apache.logging.log4j.LogManager;
import org.yaml.snakeyaml.TypeDescription;
import org.yaml.snakeyaml.Yaml;
import org.yaml.snakeyaml.constructor.Constructor;
@ -17,7 +17,7 @@ import java.util.function.Function;
@SuppressWarnings("ALL")
public class YamlCQLStatementLoader {
private final static Logger logger = LoggerFactory.getLogger(YamlCQLStatementLoader.class);
private final static Logger logger = LogManager.getLogger(YamlCQLStatementLoader.class);
List<Function<String, String>> transformers = new ArrayList<>();
public YamlCQLStatementLoader() {

View File

@ -1,12 +1,11 @@
package io.nosqlbench.activitytype.cqld4.statements.rowoperators;
import com.datastax.oss.driver.api.core.cql.ColumnDefinition;
import com.datastax.oss.driver.api.core.cql.ColumnDefinitions;
import com.datastax.oss.driver.api.core.cql.Row;
import io.nosqlbench.activitytype.cqld4.api.RowCycleOperator;
import io.nosqlbench.virtdata.library.basics.core.threadstate.SharedState;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.logging.log4j.Logger;
import org.apache.logging.log4j.LogManager;
import java.util.Arrays;
import java.util.HashMap;
@ -18,11 +17,11 @@ import java.util.stream.StreamSupport;
* Save specific variables to the thread local object map
*/
public class Save implements RowCycleOperator {
private final static Logger logger = LoggerFactory.getLogger(Save.class);
private final static Logger logger = LogManager.getLogger(Save.class);
ThreadLocal<HashMap<String, Object>> tl_objectMap = SharedState.tl_ObjectMap;
private String[] varnames;
private final String[] varnames;
public Save(String... varnames) {
this.varnames = varnames;

View File

@ -2,15 +2,15 @@ package io.nosqlbench.activitytype.cqld4.statements.rsoperators;
import com.datastax.oss.driver.api.core.cql.*;
import io.nosqlbench.activitytype.cqld4.api.D4ResultSetCycleOperator;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.logging.log4j.Logger;
import org.apache.logging.log4j.LogManager;
/**
* Logs a trace-level event for the result set, including
* cycles, rows, fetched row count, and the statement.
*/
public class CqlD4ResultSetLogger implements D4ResultSetCycleOperator {
private final static Logger logger = LoggerFactory.getLogger(CqlD4ResultSetLogger.class);
private final static Logger logger = LogManager.getLogger(CqlD4ResultSetLogger.class);
private static String getQueryString(Statement stmt) {
if (stmt instanceof PreparedStatement) {

View File

@ -4,8 +4,8 @@ import com.datastax.oss.driver.api.core.cql.*;
import io.nosqlbench.activitytype.cqld4.api.D4ResultSetCycleOperator;
import io.nosqlbench.activitytype.cqld4.core.StatementModifier;
import io.nosqlbench.engine.api.util.SimpleConfig;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.logging.log4j.Logger;
import org.apache.logging.log4j.LogManager;
import java.io.FileDescriptor;
import java.io.FileWriter;
@ -15,9 +15,9 @@ import java.util.Date;
public class TraceLogger implements D4ResultSetCycleOperator, StatementModifier {
private final static Logger logger = LoggerFactory.getLogger(TraceLogger.class);
private final static Logger logger = LogManager.getLogger(TraceLogger.class);
private static SimpleDateFormat sdf = new SimpleDateFormat("HH:mm:ss.SSS");
private static final SimpleDateFormat sdf = new SimpleDateFormat("HH:mm:ss.SSS");
private final long modulo;
private final String filename;
private final FileWriter writer;

View File

@ -4,7 +4,7 @@
<parent>
<groupId>io.nosqlbench</groupId>
<artifactId>mvn-defaults</artifactId>
<version>3.12.155-SNAPSHOT</version>
<version>4.15.6-SNAPSHOT</version>
<relativePath>../mvn-defaults</relativePath>
</parent>
@ -24,13 +24,13 @@
<dependency>
<groupId>io.nosqlbench</groupId>
<artifactId>driver-cql-shaded</artifactId>
<version>3.12.155-SNAPSHOT</version>
<version>4.15.6-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>io.nosqlbench</groupId>
<artifactId>drivers-api</artifactId>
<version>3.12.155-SNAPSHOT</version>
<version>4.15.6-SNAPSHOT</version>
</dependency>
</dependencies>

View File

@ -5,8 +5,8 @@ import io.nosqlbench.activitytype.cql.statements.rsoperators.AssertSingleRowResu
import io.nosqlbench.engine.api.activityimpl.ActivityDef;
import io.nosqlbench.virtdata.core.bindings.Bindings;
import io.nosqlbench.virtdata.core.bindings.BindingsTemplate;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.logging.log4j.Logger;
import org.apache.logging.log4j.LogManager;
import java.util.ArrayList;
import java.util.List;
@ -15,7 +15,7 @@ import java.util.Optional;
public class CqlVerifyActivity extends CqlActivity {
private final static Logger logger = LoggerFactory.getLogger(CqlVerifyActivity.class);
private final static Logger logger = LogManager.getLogger(CqlVerifyActivity.class);
private BindingsTemplate expectedValuesTemplate;
private VerificationMetrics verificationMetrics;

View File

@ -5,7 +5,7 @@
<parent>
<artifactId>mvn-defaults</artifactId>
<groupId>io.nosqlbench</groupId>
<version>3.12.155-SNAPSHOT</version>
<version>4.15.6-SNAPSHOT</version>
<relativePath>../mvn-defaults</relativePath>
</parent>
@ -20,18 +20,13 @@
<dependency>
<groupId>io.nosqlbench</groupId>
<artifactId>engine-api</artifactId>
<version>3.12.155-SNAPSHOT</version>
<version>4.15.6-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>io.nosqlbench</groupId>
<artifactId>drivers-api</artifactId>
<version>3.12.155-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-api</artifactId>
<version>4.15.6-SNAPSHOT</version>
</dependency>
</dependencies>

View File

@ -24,8 +24,8 @@ import io.nosqlbench.engine.api.activityapi.cyclelog.buffers.op_output.StrideOut
import io.nosqlbench.engine.api.activityapi.ratelimits.RateLimiter;
import io.nosqlbench.engine.api.activityimpl.ActivityDef;
import io.nosqlbench.engine.api.activityimpl.ParameterMap;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.logging.log4j.Logger;
import org.apache.logging.log4j.LogManager;
import java.util.List;
import java.util.concurrent.BlockingQueue;
@ -38,7 +38,7 @@ import java.util.stream.Collectors;
public class AsyncDiagAction extends BaseAsyncAction<DiagOpData, DiagActivity> implements Thread.UncaughtExceptionHandler, StrideOutputConsumer<DiagOpData> {
private final static Logger logger = LoggerFactory.getLogger(AsyncDiagAction.class);
private final static Logger logger = LogManager.getLogger(AsyncDiagAction.class);
private long lastUpdate;
private long quantizedInterval;
@ -213,8 +213,8 @@ public class AsyncDiagAction extends BaseAsyncAction<DiagOpData, DiagActivity> i
private final AsyncDiagAction action;
AsyncDiagAction mainContext;
private volatile boolean running = true;
private Thread thread;
private String name;
private final Thread thread;
private final String name;
public OpFinisher(String name, BlockingQueue<StartedOp<DiagOpData>> queue, AsyncDiagAction action) {
this.queue = queue;

View File

@ -20,16 +20,16 @@ import io.nosqlbench.engine.api.activityapi.core.MultiPhaseAction;
import io.nosqlbench.engine.api.activityapi.core.SyncAction;
import io.nosqlbench.engine.api.activityapi.ratelimits.RateLimiter;
import io.nosqlbench.engine.api.activityimpl.ActivityDef;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.logging.log4j.Logger;
import org.apache.logging.log4j.LogManager;
public class DiagAction implements SyncAction, ActivityDefObserver, MultiPhaseAction {
private final static Logger logger = LoggerFactory.getLogger(DiagAction.class);
private final static Logger logger = LogManager.getLogger(DiagAction.class);
private final ActivityDef activityDef;
private final DiagActivity diagActivity;
private int slot;
private final int slot;
private long lastUpdate;
private long quantizedInterval;
private long reportModulo;

View File

@ -16,14 +16,13 @@ package io.nosqlbench.activitytype.diag;
import io.nosqlbench.engine.api.activityapi.core.Action;
import io.nosqlbench.engine.api.activityapi.core.ActionDispenser;
import org.slf4j.Logger;
import static org.slf4j.LoggerFactory.getLogger;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
public class DiagActionDispenser implements ActionDispenser {
private final static Logger logger = getLogger(DiagActionDispenser.class);
private DiagActivity activity;
private final static Logger logger = LogManager.getLogger(DiagActionDispenser.class);
private final DiagActivity activity;
public DiagActionDispenser(DiagActivity activity) {
this.activity = activity;

View File

@ -19,8 +19,8 @@ import io.nosqlbench.engine.api.activityapi.core.ActionDispenser;
import io.nosqlbench.engine.api.activityapi.core.ActivityType;
import io.nosqlbench.engine.api.activityimpl.ActivityDef;
import io.nosqlbench.nb.annotations.Service;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.logging.log4j.Logger;
import org.apache.logging.log4j.LogManager;
/**
* The DiagActivity, aka "diag", is simply a diagnostic activity.
@ -33,7 +33,7 @@ import org.slf4j.LoggerFactory;
@Service(ActivityType.class)
public class DiagActivityType implements ActivityType<DiagActivity> {
private static final Logger logger = LoggerFactory.getLogger(DiagActivityType.class);
private static final Logger logger = LogManager.getLogger(DiagActivityType.class);
@Override
public String getName() {

View File

@ -17,14 +17,14 @@
package io.nosqlbench.activitytype.diag;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.logging.log4j.Logger;
import org.apache.logging.log4j.LogManager;
import java.security.InvalidParameterException;
import java.util.concurrent.atomic.AtomicLong;
public class SequenceBlocker {
private final static Logger logger = LoggerFactory.getLogger(SequenceBlocker.class);
private final static Logger logger = LogManager.getLogger(SequenceBlocker.class);
private final AtomicLong sequence;
private final AtomicLong waiting=new AtomicLong(0L);
private final boolean errorsAreFatal;

View File

@ -1,266 +1,259 @@
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<modelVersion>4.0.0</modelVersion>
<parent>
<groupId>io.nosqlbench</groupId>
<artifactId>mvn-defaults</artifactId>
<version>3.12.155-SNAPSHOT</version>
<relativePath>../mvn-defaults</relativePath>
</parent>
<parent>
<groupId>io.nosqlbench</groupId>
<artifactId>mvn-defaults</artifactId>
<version>4.15.6-SNAPSHOT</version>
<relativePath>../mvn-defaults</relativePath>
</parent>
<artifactId>driver-dsegraph-shaded</artifactId>
<packaging>jar</packaging>
<name>${project.artifactId}</name>
<artifactId>driver-dsegraph-shaded</artifactId>
<packaging>jar</packaging>
<name>${project.artifactId}</name>
<description>
A DSE Graph ActivityType driver for nosqlbench, based on http://nosqlbench.io/
</description>
<description>
A DSE Graph ActivityType driver for nosqlbench, based on http://nosqlbench.io/
</description>
<dependencies>
<dependencies>
<!-- core dependencies -->
<!-- core dependencies -->
<dependency>
<groupId>io.nosqlbench</groupId>
<artifactId>engine-api</artifactId>
<version>3.12.155-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>io.nosqlbench</groupId>
<artifactId>engine-api</artifactId>
<version>4.15.6-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>io.nosqlbench</groupId>
<artifactId>drivers-api</artifactId>
<version>3.12.155-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>io.nosqlbench</groupId>
<artifactId>drivers-api</artifactId>
<version>4.15.6-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>com.datastax.dse</groupId>
<artifactId>dse-java-driver-graph</artifactId>
<version>1.9.0</version>
</dependency>
<dependency>
<groupId>com.datastax.dse</groupId>
<artifactId>dse-java-driver-graph</artifactId>
<version>1.9.0</version>
</dependency>
<dependency>
<groupId>com.datastax.dse</groupId>
<artifactId>dse-java-driver-core</artifactId>
<version>1.9.0</version>
</dependency>
<dependency>
<groupId>com.datastax.dse</groupId>
<artifactId>dse-java-driver-core</artifactId>
<version>1.9.0</version>
</dependency>
<dependency>
<groupId>com.datastax.dse</groupId>
<artifactId>dse-java-driver-extras</artifactId>
<version>1.9.0</version>
</dependency>
<dependency>
<groupId>com.datastax.dse</groupId>
<artifactId>dse-java-driver-mapping</artifactId>
<version>1.9.0</version>
</dependency>
<dependency>
<groupId>com.datastax.dse</groupId>
<artifactId>dse-java-driver-extras</artifactId>
<version>1.9.0</version>
</dependency>
<dependency>
<groupId>com.datastax.dse</groupId>
<artifactId>dse-java-driver-mapping</artifactId>
<version>1.9.0</version>
</dependency>
<!-- For CQL compression option -->
<dependency>
<groupId>org.lz4</groupId>
<artifactId>lz4-java</artifactId>
</dependency>
<!-- For CQL compression option -->
<dependency>
<groupId>org.lz4</groupId>
<artifactId>lz4-java</artifactId>
</dependency>
<!-- For CQL compression option -->
<dependency>
<groupId>org.xerial.snappy</groupId>
<artifactId>snappy-java</artifactId>
</dependency>
<!-- For CQL compression option -->
<dependency>
<groupId>org.xerial.snappy</groupId>
<artifactId>snappy-java</artifactId>
</dependency>
<dependency>
<groupId>org.antlr</groupId>
<artifactId>antlr4-runtime</artifactId>
<version>4.8</version>
</dependency>
<dependency>
<groupId>org.antlr</groupId>
<artifactId>antlr4-runtime</artifactId>
<version>4.8</version>
</dependency>
<!-- <dependency>-->
<!-- <groupId>io.netty</groupId>-->
<!-- <artifactId>netty-transport-native-epoll</artifactId>-->
<!-- <version>4.1.47.Final</version>-->
<!-- <classifier>linux-x86_64</classifier>-->
<!-- </dependency>-->
<!-- <dependency>-->
<!-- <groupId>io.netty</groupId>-->
<!-- <artifactId>netty-transport-native-epoll</artifactId>-->
<!-- <version>4.1.47.Final</version>-->
<!-- <classifier>linux-x86_64</classifier>-->
<!-- </dependency>-->
<!-- test only scope -->
<!-- test only scope -->
<!-- This is added as shaded to satisfy old jmx reporting dependencies-->
<!-- This is added as shaded to satisfy old jmx reporting dependencies-->
<dependency>
<groupId>io.dropwizard.metrics</groupId>
<artifactId>metrics-core</artifactId>
<version>3.2.2</version>
</dependency>
<!-- <dependency>-->
<!-- <groupId>org.apache.commons</groupId>-->
<!-- <artifactId>commons-lang3</artifactId>-->
<!-- <version>3.7</version>-->
<!-- </dependency>-->
<dependency>
<groupId>io.dropwizard.metrics</groupId>
<artifactId>metrics-core</artifactId>
<version>3.2.2</version>
</dependency>
<!-- <dependency>-->
<!-- <groupId>org.apache.commons</groupId>-->
<!-- <artifactId>commons-lang3</artifactId>-->
<!-- <version>3.7</version>-->
<!-- </dependency>-->
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-api</artifactId>
<version>1.7.25</version>
</dependency>
<!-- test only scope -->
<!-- test only scope -->
<dependency>
<groupId>org.testng</groupId>
<artifactId>testng</artifactId>
<version>6.13.1</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.testng</groupId>
<artifactId>testng</artifactId>
<version>6.13.1</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.assertj</groupId>
<artifactId>assertj-core-java8</artifactId>
<version>1.0.0m1</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.assertj</groupId>
<artifactId>assertj-core-java8</artifactId>
<version>1.0.0m1</version>
<scope>test</scope>
</dependency>
<!-- compile only scope -->
<!-- compile only scope -->
<dependency>
<groupId>org.yaml</groupId>
<artifactId>snakeyaml</artifactId>
<version>1.23</version>
</dependency>
<dependency>
<groupId>org.yaml</groupId>
<artifactId>snakeyaml</artifactId>
<version>1.23</version>
</dependency>
</dependencies>
</dependencies>
<build>
<plugins>
<build>
<plugins>
<!--
If this plugin is re-enabled, the local CQL grammar will
be overwritten. The grammar has some syntax issues, so
fixes will be made to it before it is submitted back.
<!--
If this plugin is re-enabled, the local CQL grammar will
be overwritten. The grammar has some syntax issues, so
fixes will be made to it before it is submitted back.
(lack of composite key syntax, nested type syntax, etc)
-->
<!-- <plugin>-->
<!-- <groupId>com.googlecode.maven-download-plugin</groupId>-->
<!-- <artifactId>download-maven-plugin</artifactId>-->
<!-- <version>1.4.0</version>-->
<!-- <executions>-->
<!-- <execution>-->
<!-- <id>get-cql-lexer</id>-->
<!-- <phase>generate-sources</phase>-->
<!-- <goals>-->
<!-- <goal>wget</goal>-->
<!-- </goals>-->
<!-- <configuration>-->
<!-- <url>-->
<!-- https://raw.githubusercontent.com/antlr/grammars-v4/master/cql3/CqlLexer.g4-->
<!-- </url>-->
<!-- <outputFileName>CqlLexer.g4</outputFileName>-->
<!-- <outputDirectory>src/main/grammars/cql3/-->
<!-- </outputDirectory>-->
<!-- </configuration>-->
<!-- </execution>-->
<!-- <execution>-->
<!-- <id>get-cql-parser</id>-->
<!-- <phase>generate-sources</phase>-->
<!-- <goals>-->
<!-- <goal>wget</goal>-->
<!-- </goals>-->
<!-- <configuration>-->
<!-- <url>-->
<!-- https://raw.githubusercontent.com/antlr/grammars-v4/master/cql3/CqlParser.g4-->
<!-- </url>-->
<!-- <outputFileName>CqlParser.g4</outputFileName>-->
<!-- <outputDirectory>src/main/grammars/cql3/-->
<!-- </outputDirectory>-->
<!-- </configuration>-->
<!-- </execution>-->
<!-- </executions>-->
<!-- </plugin>-->
(lack of composite key syntax, nested type syntax, etc)
-->
<!-- <plugin>-->
<!-- <groupId>com.googlecode.maven-download-plugin</groupId>-->
<!-- <artifactId>download-maven-plugin</artifactId>-->
<!-- <version>1.4.0</version>-->
<!-- <executions>-->
<!-- <execution>-->
<!-- <id>get-cql-lexer</id>-->
<!-- <phase>generate-sources</phase>-->
<!-- <goals>-->
<!-- <goal>wget</goal>-->
<!-- </goals>-->
<!-- <configuration>-->
<!-- <url>-->
<!-- https://raw.githubusercontent.com/antlr/grammars-v4/master/cql3/CqlLexer.g4-->
<!-- </url>-->
<!-- <outputFileName>CqlLexer.g4</outputFileName>-->
<!-- <outputDirectory>src/main/grammars/cql3/-->
<!-- </outputDirectory>-->
<!-- </configuration>-->
<!-- </execution>-->
<!-- <execution>-->
<!-- <id>get-cql-parser</id>-->
<!-- <phase>generate-sources</phase>-->
<!-- <goals>-->
<!-- <goal>wget</goal>-->
<!-- </goals>-->
<!-- <configuration>-->
<!-- <url>-->
<!-- https://raw.githubusercontent.com/antlr/grammars-v4/master/cql3/CqlParser.g4-->
<!-- </url>-->
<!-- <outputFileName>CqlParser.g4</outputFileName>-->
<!-- <outputDirectory>src/main/grammars/cql3/-->
<!-- </outputDirectory>-->
<!-- </configuration>-->
<!-- </execution>-->
<!-- </executions>-->
<!-- </plugin>-->
<plugin>
<groupId>org.antlr</groupId>
<artifactId>antlr4-maven-plugin</artifactId>
<version>4.8</version>
<configuration>
<sourceDirectory>src/main/grammars/cql3
</sourceDirectory>
<arguments>
<argument>-package</argument>
<argument>io.nosqlbench.generators.cql.generated
</argument>
</arguments>
<outputDirectory>
src/main/java/io/nosqlbench/generators/cql/generated
</outputDirectory>
</configuration>
<executions>
<execution>
<id>antlr</id>
<goals>
<goal>antlr4</goal>
</goals>
<phase>generate-sources</phase>
</execution>
</executions>
</plugin>
<plugin>
<groupId>org.antlr</groupId>
<artifactId>antlr4-maven-plugin</artifactId>
<version>4.8</version>
<configuration>
<sourceDirectory>src/main/grammars/cql3
</sourceDirectory>
<arguments>
<argument>-package</argument>
<argument>io.nosqlbench.generators.cql.generated
</argument>
</arguments>
<outputDirectory>
src/main/java/io/nosqlbench/generators/cql/generated
</outputDirectory>
</configuration>
<executions>
<execution>
<id>antlr</id>
<goals>
<goal>antlr4</goal>
</goals>
<phase>generate-sources</phase>
</execution>
</executions>
</plugin>
<plugin>
<artifactId>maven-shade-plugin</artifactId>
<version>3.2.3</version>
<executions>
<execution>
<phase>package</phase>
<goals>
<goal>shade</goal>
</goals>
</execution>
</executions>
<configuration>
<createDependencyReducedPom>false</createDependencyReducedPom>
<promoteTransitiveDependencies>true</promoteTransitiveDependencies>
<createSourcesJar>true</createSourcesJar>
<!-- <shadedArtifactAttached>true</shadedArtifactAttached>-->
<!-- <shadedClassifierName>shaded</shadedClassifierName>-->
<relocations>
<relocation>
<pattern>com.google.common</pattern>
<shadedPattern>com.datastax.internal.com_google_common</shadedPattern>
</relocation>
<!-- <relocation>-->
<!-- <pattern>com.datastax</pattern>-->
<!-- <shadedPattern>dse19.com.datastax</shadedPattern>-->
<!-- </relocation>-->
<!-- <relocation>-->
<!-- <pattern>io.netty</pattern>-->
<!-- <shadedPattern>dse19.io.netty</shadedPattern>-->
<!-- </relocation>-->
</relocations>
<artifactSet>
<includes>
<include>*:*</include>
</includes>
</artifactSet>
<transformers combine.children="append">
<transformer implementation="org.apache.maven.plugins.shade.resource.ManifestResourceTransformer">
<mainClass>io.nosqlbench.engine.cli.NBCLI</mainClass>
</transformer>
</transformers>
<!-- <finalName>${project.artifactId}</finalName>-->
<filters>
<filter>
<artifact>*:*</artifact>
<excludes>
<exclude>META-INF/*.SF</exclude>
<exclude>META-INF/*.DSA</exclude>
<exclude>META-INF/*.RSA</exclude>
</excludes>
</filter>
</filters>
</configuration>
</plugin>
</plugins>
</build>
<plugin>
<artifactId>maven-shade-plugin</artifactId>
<version>3.2.3</version>
<executions>
<execution>
<phase>package</phase>
<goals>
<goal>shade</goal>
</goals>
</execution>
</executions>
<configuration>
<createDependencyReducedPom>false</createDependencyReducedPom>
<promoteTransitiveDependencies>true</promoteTransitiveDependencies>
<createSourcesJar>true</createSourcesJar>
<!-- <shadedArtifactAttached>true</shadedArtifactAttached>-->
<!-- <shadedClassifierName>shaded</shadedClassifierName>-->
<relocations>
<relocation>
<pattern>com.google.common</pattern>
<shadedPattern>com.datastax.internal.com_google_common</shadedPattern>
</relocation>
<!-- <relocation>-->
<!-- <pattern>com.datastax</pattern>-->
<!-- <shadedPattern>dse19.com.datastax</shadedPattern>-->
<!-- </relocation>-->
<!-- <relocation>-->
<!-- <pattern>io.netty</pattern>-->
<!-- <shadedPattern>dse19.io.netty</shadedPattern>-->
<!-- </relocation>-->
</relocations>
<artifactSet>
<includes>
<include>*:*</include>
</includes>
</artifactSet>
<transformers combine.children="append">
<transformer implementation="org.apache.maven.plugins.shade.resource.ManifestResourceTransformer">
<mainClass>io.nosqlbench.engine.cli.NBCLI</mainClass>
</transformer>
</transformers>
<!-- <finalName>${project.artifactId}</finalName>-->
<filters>
<filter>
<artifact>*:*</artifact>
<excludes>
<exclude>META-INF/*.SF</exclude>
<exclude>META-INF/*.DSA</exclude>
<exclude>META-INF/*.RSA</exclude>
</excludes>
</filter>
</filters>
</configuration>
</plugin>
</plugins>
</build>
</project>

View File

@ -11,18 +11,18 @@ import io.nosqlbench.engine.api.activityapi.core.ActivityDefObserver;
import io.nosqlbench.engine.api.activityapi.core.SyncAction;
import io.nosqlbench.engine.api.activityapi.planning.OpSequence;
import io.nosqlbench.engine.api.activityimpl.ActivityDef;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import java.util.List;
@SuppressWarnings("Duplicates")
public class GraphAction implements SyncAction, ActivityDefObserver {
private static final Logger logger = LoggerFactory.getLogger(GraphAction.class);
private static final Logger logger = LogManager.getLogger(GraphAction.class);
List<BindableGraphStatement> readyGraphStmts;
private int slot;
private GraphActivity activity;
private final int slot;
private final GraphActivity activity;
private int maxTries = 10;
private boolean showstmts;
private GraphErrorHandler graphErrorHandler;
@ -74,7 +74,7 @@ public class GraphAction implements SyncAction, ActivityDefObserver {
} catch (Exception e) {
if (!graphErrorHandler.HandleError(e, simpleGraphStatement, cycleValue)) {
e.printStackTrace();
logger.error(e.toString(),e);
logger.error(e.toString(), e);
break;
}
}

View File

@ -5,7 +5,7 @@ import io.nosqlbench.engine.api.activityapi.core.ActionDispenser;
public class GraphActionDispenser implements ActionDispenser {
private GraphActivity activity;
private final GraphActivity activity;
public GraphActionDispenser(GraphActivity activity) {
this.activity = activity;

View File

@ -17,18 +17,17 @@ import io.nosqlbench.engine.api.activityapi.planning.SequencerType;
import io.nosqlbench.engine.api.activityconfig.ParsedStmt;
import io.nosqlbench.engine.api.activityconfig.StatementsLoader;
import io.nosqlbench.engine.api.activityconfig.yaml.OpTemplate;
import io.nosqlbench.engine.api.activityconfig.yaml.StmtDef;
import io.nosqlbench.engine.api.activityconfig.yaml.StmtsDocList;
import io.nosqlbench.engine.api.activityimpl.ActivityDef;
import io.nosqlbench.engine.api.activityimpl.ParameterMap;
import io.nosqlbench.engine.api.activityimpl.SimpleActivity;
import io.nosqlbench.engine.api.metrics.ActivityMetrics;
import io.nosqlbench.engine.api.metrics.ExceptionMeterMetrics;
import io.nosqlbench.engine.api.scripting.NashornEvaluator;
import io.nosqlbench.engine.api.scripting.GraalJsEvaluator;
import io.nosqlbench.engine.api.templating.StrInterpolator;
import io.nosqlbench.engine.api.util.TagFilter;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import java.io.IOException;
import java.nio.file.Files;
@ -37,12 +36,10 @@ import java.nio.file.Paths;
import java.util.Arrays;
import java.util.List;
import java.util.Optional;
import java.util.stream.Stream;
import java.util.stream.StreamSupport;
@SuppressWarnings("Duplicates")
public class GraphActivity extends SimpleActivity implements ActivityDefObserver {
private final static Logger logger = LoggerFactory.getLogger(GraphActivity.class);
private final static Logger logger = LogManager.getLogger(GraphActivity.class);
public Timer bindTimer;
public Timer executeTimer;
@ -89,7 +86,7 @@ public class GraphActivity extends SimpleActivity implements ActivityDefObserver
);
SequencePlanner<ReadyGraphStatementTemplate> planner = new SequencePlanner<>(sequencerType);
String yaml_loc = activityDef.getParams().getOptionalString("yaml","workload").orElse("default");
String yaml_loc = activityDef.getParams().getOptionalString("yaml", "workload").orElse("default");
StrInterpolator interp = new StrInterpolator(activityDef);
StmtsDocList unfiltered = StatementsLoader.loadPath(logger, yaml_loc, interp, "activities");
@ -191,7 +188,7 @@ public class GraphActivity extends SimpleActivity implements ActivityDefObserver
if (clusteropts.isPresent()) {
try {
logger.info("applying cbopts:" + clusteropts.get());
NashornEvaluator<DseCluster.Builder> clusterEval = new NashornEvaluator<>(DseCluster.Builder.class);
GraalJsEvaluator<DseCluster.Builder> clusterEval = new GraalJsEvaluator<>(DseCluster.Builder.class);
clusterEval.put("builder", builder);
String importEnv =
"load(\"nashorn:mozilla_compat.js\");\n" +

View File

@ -17,10 +17,10 @@ public class GraphStmtParser {
Matcher m = stmtToken.matcher(statement);
while (m.find()) {
String namedAnchor = m.group(1);
if (namedAnchor==null) {
namedAnchor=m.group(2);
if (namedAnchor==null) {
throw new RuntimeException("Pattern '" + stmtToken.pattern() + "' failed to match '" + statement +"'");
if (namedAnchor == null) {
namedAnchor = m.group(2);
if (namedAnchor == null) {
throw new RuntimeException("Pattern '" + stmtToken.pattern() + "' failed to match '" + statement + "'");
}
}
if (!bindings.containsKey(namedAnchor)) {

View File

@ -3,19 +3,19 @@ package com.datastax.ebdrivers.dsegraph.errorhandling;
import com.datastax.driver.core.exceptions.*;
import com.datastax.driver.dse.graph.GraphStatement;
import io.nosqlbench.engine.api.metrics.ExceptionMeterMetrics;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import java.util.concurrent.ExecutionException;
@SuppressWarnings("Duplicates")
public class GraphErrorHandler {
private final static Logger logger = LoggerFactory.getLogger(GraphErrorHandler.class);
private final static Logger logger = LogManager.getLogger(GraphErrorHandler.class);
private final ErrorResponse realErrorResponse;
// private final ErrorResponse unappliedResponse;
private final ErrorResponse retryableResponse;
private ExceptionMeterMetrics exceptionMeterMetrics;
private final ExceptionMeterMetrics exceptionMeterMetrics;
public GraphErrorHandler(
ErrorResponse realErrorResponse,
@ -41,9 +41,9 @@ public class GraphErrorHandler {
if (e != null) {
throw e;
}
} catch ( ExecutionException |
} catch (ExecutionException |
InvalidQueryException | ReadFailureException | WriteFailureException
| SyntaxError realerror) {
| SyntaxError realerror) {
if (e instanceof SyntaxError) {
logger.error("Syntax error:" + GraphQueryStringMapper.getQueryString(statement));

View File

@ -5,7 +5,7 @@ import java.util.List;
import java.util.stream.Collectors;
public class BindableGraphStatementsTemplate {
private List<ReadyGraphStatementTemplate> templateList = new ArrayList<>();
private final List<ReadyGraphStatementTemplate> templateList = new ArrayList<>();
public void addTemplate(ReadyGraphStatementTemplate template) {
this.templateList.add(template);

View File

@ -5,7 +5,7 @@ import io.nosqlbench.virtdata.core.bindings.ContextualBindings;
public class ReadyGraphStatement implements BindableGraphStatement {
private ContextualBindings<SimpleGraphStatement, SimpleGraphStatement> contextualBindings;
private final ContextualBindings<SimpleGraphStatement, SimpleGraphStatement> contextualBindings;
public ReadyGraphStatement(ContextualBindings<SimpleGraphStatement, SimpleGraphStatement> contextualBindings) {
this.contextualBindings = contextualBindings;

View File

@ -13,8 +13,8 @@ import java.util.Map;
public class ReadyGraphStatementTemplate {
private ContextualBindingsTemplate<SimpleGraphStatement,SimpleGraphStatement> contextualBindingsTemplate;
private String name;
private final ContextualBindingsTemplate<SimpleGraphStatement, SimpleGraphStatement> contextualBindingsTemplate;
private final String name;
private String[] fields;
public ReadyGraphStatementTemplate(String name, String stmtTemplate, List<BindPoint> bindPoints, String[] fields) {
@ -22,7 +22,7 @@ public class ReadyGraphStatementTemplate {
SimpleGraphStatement simpleGraphStatement = new SimpleGraphStatement(stmtTemplate);
BindingsTemplate bindingsTemplate = new BindingsTemplate(bindPoints);
contextualBindingsTemplate = new ContextualBindingsTemplate<>(
simpleGraphStatement, bindingsTemplate ,
simpleGraphStatement, bindingsTemplate,
new ParameterizedGraphStatementValuesBinder(fields)
);
}
@ -32,13 +32,13 @@ public class ReadyGraphStatementTemplate {
SimpleGraphStatement simpleGraphStatement = new SimpleGraphStatement(stmtTemplate);
BindingsTemplate bindingsTemplate = new BindingsTemplate(bindPoints);
contextualBindingsTemplate = new ContextualBindingsTemplate<>(simpleGraphStatement, bindingsTemplate , new ParameterizedIteratedGraphStatementValuesBinder(fields, repeat));
contextualBindingsTemplate = new ContextualBindingsTemplate<>(simpleGraphStatement, bindingsTemplate, new ParameterizedIteratedGraphStatementValuesBinder(fields, repeat));
}
public static class ParameterizedIteratedGraphStatementValuesBinder implements ValuesBinder<SimpleGraphStatement, SimpleGraphStatement> {
private final String[] fields;
private int repeat;
private final int repeat;
public ParameterizedIteratedGraphStatementValuesBinder(String[] fields, int repeat) {
this.fields = fields;
@ -58,13 +58,14 @@ public class ReadyGraphStatementTemplate {
private final Map valuesMap = new HashMap();
private final ThreadLocal<Map<String, Object>> mapTL;
public ParameterizedGraphStatementValuesBinder(String[] fields){
this.fields = fields;
public ParameterizedGraphStatementValuesBinder(String[] fields) {
this.fields = fields;
for (String field : fields) {
valuesMap.put(field, null);
}
mapTL = ThreadLocal.withInitial(() -> new HashMap<String, Object>(valuesMap));
}
@Override
public SimpleGraphStatement bindValues(SimpleGraphStatement context, Bindings bindings, long cycle) {
bindings.updateMap(mapTL.get(), cycle);

View File

@ -57,11 +57,11 @@ blocks:
name: prod-mode
- name: main
tags:
phase: main
phase: main
statements:
- main-add: >-
device = graph.addVertex(label, 'device','deviceid', {deviceid}, 'type', {type}, 'os', {os}, 'osversion', {osversion});
session = graph.addVertex(label, 'session', 'sessionid', {sessionid}, 'ipaddress', {ipaddress}, 'deviceid', {deviceid}, 'createdtime', {createdtime});
session.addEdge('using', device);
device = graph.addVertex(label, 'device','deviceid', {deviceid}, 'type', {type}, 'os', {os}, 'osversion', {osversion});
session = graph.addVertex(label, 'session', 'sessionid', {sessionid}, 'ipaddress', {ipaddress}, 'deviceid', {deviceid}, 'createdtime', {createdtime});
session.addEdge('using', device);
tags:
name: main-add

View File

@ -1,7 +1,7 @@
tags:
type: testtag
kind: somekind
oevure: bananas
type: testtag
kind: somekind
oevure: bananas
name: outerblock
statements:
- some foo

View File

@ -5,7 +5,7 @@
<parent>
<artifactId>mvn-defaults</artifactId>
<groupId>io.nosqlbench</groupId>
<version>3.12.155-SNAPSHOT</version>
<version>4.15.6-SNAPSHOT</version>
<relativePath>../mvn-defaults</relativePath>
</parent>
@ -22,13 +22,13 @@
<dependency>
<groupId>io.nosqlbench</groupId>
<artifactId>engine-api</artifactId>
<version>3.12.155-SNAPSHOT</version>
<version>4.15.6-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>io.nosqlbench</groupId>
<artifactId>drivers-api</artifactId>
<version>3.12.155-SNAPSHOT</version>
<version>4.15.6-SNAPSHOT</version>
</dependency>
</dependencies>

View File

@ -7,11 +7,10 @@ import io.nosqlbench.engine.api.activityapi.core.SyncAction;
import io.nosqlbench.engine.api.activityapi.planning.OpSequence;
import io.nosqlbench.engine.api.activityimpl.ActivityDef;
import io.nosqlbench.nb.api.errors.BasicError;
import io.nosqlbench.virtdata.core.templates.StringBindings;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import java.io.*;
import java.io.FileNotFoundException;
import java.net.http.HttpClient;
import java.net.http.HttpRequest;
import java.net.http.HttpResponse;
@ -23,19 +22,16 @@ import java.util.concurrent.TimeUnit;
public class HttpAction implements SyncAction {
private final static Logger logger = LoggerFactory.getLogger(HttpAction.class);
private final static Logger logger = LogManager.getLogger(HttpAction.class);
private final HttpActivity httpActivity;
private final int slot;
private final int maxTries = 1;
private boolean showstmts;
private OpSequence<ReadyHttpOp> sequencer;
private HttpClient client;
private final HttpResponse.BodyHandler<String> bodyreader = HttpResponse.BodyHandlers.ofString();
private final long timeoutMillis=30000L;
public HttpAction(ActivityDef activityDef, int slot, HttpActivity httpActivity) {
this.slot = slot;
@ -54,25 +50,15 @@ public class HttpAction implements SyncAction {
@Override
public int runCycle(long cycleValue) {
StringBindings stringBindings;
String statement = null;
InputStream result = null;
// The request to be used must be constructed from the template each time.
HttpOp httpOp = null;
// The bind timer captures all the time involved in preparing the
// operation for execution, including data generation as well as
// op construction
// The request to be used must be constructed from the template each time.
HttpOp httpOp=null;
// A specifier for what makes a response ok. If this is provided, then it is
// either a list of valid http status codes, or if non-numeric, a regex for the body
// which must match.
// If not provided, then status code 200 is the only thing required to be matched.
String ok;
try (Timer.Context bindTime = httpActivity.bindTimer.time()) {
ReadyHttpOp readHTTPOperation = httpActivity.getSequencer().get(cycleValue);
ReadyHttpOp readHTTPOperation = sequencer.get(cycleValue);
httpOp = readHTTPOperation.apply(cycleValue);
} catch (Exception e) {
if (httpActivity.isDiagnosticMode()) {
@ -135,98 +121,10 @@ public class HttpAction implements SyncAction {
}
}
// if (ok == null) {
// if (response.statusCode() != 200) {
// throw new ResponseError("Result had status code " +
// response.statusCode() + ", but 'ok' was not set for this statement," +
// "so it is considered an error.");
// }
// } else {
// String[] oks = ok.split(",");
// for (String ok_condition : oks) {
// if (ok_condition.charAt(0)>='0' && ok_condition.charAt(0)<='9') {
// int matching_status = Integer.parseInt(ok_condition);
// } else {
// Pattern successRegex = Pattern.compile(ok);
// }
// }
//// Matcher matcher = successRegex.matcher(String.valueOf(response.statusCode()));
//// if (!matcher.matches()) {
//// throw new BasicError("status code " + response.statusCode() + " did not match " + success);
//// }
// }
}
return 0;
}
// String body = future.body();
// String[] splitStatement = statement.split("\\?");
// String path, query;
//
// path = splitStatement[0];
// query = "";
//
// if (splitStatement.length >= 2) {
// query = splitStatement[1];
// }
//
// URI uri = new URI(
// "http",
// null,
// host,
// httpActivity.getPort(),
// path,
// query,
// null);
//
// statement = uri.toString();
//
// showstmts = httpActivity.getShowstmts();
// if (showstmts) {
// logger.info("STMT(cycle=" + cycleValue + "):\n" + statement);
// }
// } catch (URISyntaxException e) {
// e.printStackTrace();
// }
//
// long nanoStartTime=System.nanoTime();
//
// Timer.Context resultTime = httpActivity.resultTimer.time();
// try {
// StringBuilder res = new StringBuilder();
//
// BufferedReader rd = new BufferedReader(new InputStreamReader(result));
// String line;
// while ((line = rd.readLine()) != null) {
// res.append(line);
// }
// rd.close();
//
// } catch (Exception e) {
// long resultNanos = resultTime.stop();
// resultTime = null;
// } finally {
// if (resultTime != null) {
// resultTime.stop();
// }
//
// }
//
// }
// long resultNanos = System.nanoTime() - nanoStartTime;
// httpActivity.resultSuccessTimer.update(resultNanos, TimeUnit.NANOSECONDS);
// protected HttpActivity getHttpActivity () {
// return httpActivity;
// }
// }
private HttpRequest.BodyPublisher bodySourceFrom(Map<String, String> cmdMap) {
if (cmdMap.containsKey("body")) {
String body = cmdMap.remove("body");

View File

@ -10,14 +10,14 @@ import io.nosqlbench.engine.api.activityapi.planning.OpSequence;
import io.nosqlbench.engine.api.activityimpl.ActivityDef;
import io.nosqlbench.engine.api.activityimpl.SimpleActivity;
import io.nosqlbench.engine.api.metrics.ActivityMetrics;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.logging.log4j.Logger;
import org.apache.logging.log4j.LogManager;
import java.net.http.HttpClient;
import java.util.function.Function;
public class HttpActivity extends SimpleActivity implements Activity, ActivityDefObserver {
private final static Logger logger = LoggerFactory.getLogger(HttpActivity.class);
private final static Logger logger = LogManager.getLogger(HttpActivity.class);
private final ActivityDef activityDef;
public HttpConsoleFormats console;

View File

@ -4,13 +4,13 @@ import io.nosqlbench.engine.api.activityapi.core.ActionDispenser;
import io.nosqlbench.engine.api.activityapi.core.ActivityType;
import io.nosqlbench.engine.api.activityimpl.ActivityDef;
import io.nosqlbench.nb.annotations.Service;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.logging.log4j.Logger;
import org.apache.logging.log4j.LogManager;
@Service(ActivityType.class)
public class HttpActivityType implements ActivityType<HttpActivity> {
private static final Logger logger = LoggerFactory.getLogger(HttpActivityType.class);
private static final Logger logger = LogManager.getLogger(HttpActivityType.class);
@Override
public String getName() {

View File

@ -7,8 +7,8 @@ import io.nosqlbench.activitytype.http.HttpActivity;
import io.nosqlbench.engine.api.activityapi.core.BaseAsyncAction;
import io.nosqlbench.engine.api.activityapi.core.ops.fluent.opfacets.TrackedOp;
import io.nosqlbench.engine.api.activityapi.planning.OpSequence;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.logging.log4j.Logger;
import org.apache.logging.log4j.LogManager;
import java.net.http.HttpClient;
import java.net.http.HttpResponse;
@ -17,7 +17,7 @@ import java.util.function.LongFunction;
public class HttpAsyncAction extends BaseAsyncAction<HttpAsyncOp, HttpActivity> {
private final static Logger logger = LoggerFactory.getLogger(HttpAsyncAction.class);
private final static Logger logger = LogManager.getLogger(HttpAsyncAction.class);
private OpSequence<ReadyHttpOp> sequencer;
private HttpClient client;

View File

@ -5,7 +5,7 @@
<parent>
<artifactId>mvn-defaults</artifactId>
<groupId>io.nosqlbench</groupId>
<version>3.12.155-SNAPSHOT</version>
<version>4.15.6-SNAPSHOT</version>
<relativePath>../mvn-defaults</relativePath>
</parent>
@ -22,13 +22,13 @@
<dependency>
<groupId>io.nosqlbench</groupId>
<artifactId>drivers-api</artifactId>
<version>3.12.155-SNAPSHOT</version>
<version>4.15.6-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>io.nosqlbench</groupId>
<artifactId>engine-api</artifactId>
<version>3.12.155-SNAPSHOT</version>
<version>4.15.6-SNAPSHOT</version>
</dependency>
</dependencies>

View File

@ -4,12 +4,12 @@ import io.nosqlbench.driver.jmx.ops.JmxOp;
import io.nosqlbench.engine.api.activityapi.core.SyncAction;
import io.nosqlbench.engine.api.activityapi.planning.OpSequence;
import io.nosqlbench.engine.api.activityimpl.ActivityDef;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.logging.log4j.Logger;
import org.apache.logging.log4j.LogManager;
public class JMXAction implements SyncAction {
private final static Logger logger = LoggerFactory.getLogger(JMXAction.class);
private final static Logger logger = LogManager.getLogger(JMXAction.class);
private final ActivityDef activityDef;
private final int slot;

View File

@ -1,16 +1,15 @@
package io.nosqlbench.driver.jmx;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import java.io.Console;
import java.io.IOException;
import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
import java.nio.file.Path;
public class SecureUtils {
private final static Logger logger = LoggerFactory.getLogger(SecureUtils.class);
private final static Logger logger = LogManager.getLogger(SecureUtils.class);
public static String readSecret(String description, String source) {
if (source==null) {

View File

@ -1,7 +1,7 @@
package io.nosqlbench.driver.jmx.ops;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.logging.log4j.Logger;
import org.apache.logging.log4j.LogManager;
import javax.management.MBeanServerConnection;
import javax.management.ObjectName;
@ -12,7 +12,7 @@ import javax.management.remote.JMXConnector;
*/
public abstract class JmxOp {
protected final static Logger logger = LoggerFactory.getLogger(JmxOp.class);
protected final static Logger logger = LogManager.getLogger(JmxOp.class);
protected JMXConnector connector;
protected ObjectName objectName;

View File

@ -4,7 +4,7 @@
<parent>
<artifactId>mvn-defaults</artifactId>
<groupId>io.nosqlbench</groupId>
<version>3.12.155-SNAPSHOT</version>
<version>4.15.6-SNAPSHOT</version>
<relativePath>../mvn-defaults</relativePath>
</parent>
@ -37,27 +37,27 @@
<!-- https://mvnrepository.com/artifact/io.confluent/kafka-avro-serializer -->
<dependency>
<groupId>io.confluent</groupId>
<artifactId>kafka-avro-serializer</artifactId>
<version>5.5.1</version>
<artifactId>kafka-avro-serializer</artifactId>
<version>5.5.1</version>
</dependency>
<dependency>
<groupId>io.nosqlbench</groupId>
<artifactId>engine-api</artifactId>
<version>3.12.155-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>io.nosqlbench</groupId>
<artifactId>engine-api</artifactId>
<version>4.15.6-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>io.nosqlbench</groupId>
<artifactId>driver-stdout</artifactId>
<version>3.12.155-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>io.nosqlbench</groupId>
<artifactId>driver-stdout</artifactId>
<version>4.15.6-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-api</artifactId>
<version>1.7.25</version>
</dependency>
<!-- <dependency>-->
<!-- <groupId>org.slf4j</groupId>-->
<!-- <artifactId>slf4j-api</artifactId>-->
<!-- <version>1.7.25</version>-->
<!-- </dependency>-->
</dependencies>
<repositories>

View File

@ -2,13 +2,13 @@ package com.datastax.ebdrivers.kafkaproducer;
import io.nosqlbench.engine.api.activityapi.core.SyncAction;
import io.nosqlbench.engine.api.activityapi.planning.OpSequence;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.logging.log4j.Logger;
import org.apache.logging.log4j.LogManager;
public class KafkaAction implements SyncAction {
private final static Logger logger = LoggerFactory.getLogger(KafkaAction.class);
private final static Logger logger = LogManager.getLogger(KafkaAction.class);
private final KafkaProducerActivity activity;
private final int slot;

View File

@ -4,7 +4,6 @@ import com.codahale.metrics.Timer;
import io.nosqlbench.engine.api.activityapi.planning.OpSequence;
import io.nosqlbench.engine.api.activityapi.planning.SequencePlanner;
import io.nosqlbench.engine.api.activityapi.planning.SequencerType;
import io.nosqlbench.engine.api.activityconfig.ParsedStmt;
import io.nosqlbench.engine.api.activityconfig.StatementsLoader;
import io.nosqlbench.engine.api.activityconfig.yaml.OpTemplate;
import io.nosqlbench.engine.api.activityconfig.yaml.StmtsDocList;
@ -12,22 +11,15 @@ import io.nosqlbench.engine.api.activityimpl.ActivityDef;
import io.nosqlbench.engine.api.activityimpl.SimpleActivity;
import io.nosqlbench.engine.api.metrics.ActivityMetrics;
import io.nosqlbench.engine.api.templating.StrInterpolator;
import io.nosqlbench.virtdata.core.bindings.BindingsTemplate;
import io.nosqlbench.virtdata.core.templates.StringBindings;
import io.nosqlbench.virtdata.core.templates.StringBindingsTemplate;
import org.apache.kafka.clients.producer.*;
import org.apache.kafka.common.serialization.StringSerializer;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import java.util.*;
import java.util.concurrent.Future;
import java.util.function.Function;
import java.util.regex.Pattern;
import java.util.Arrays;
import java.util.List;
import java.util.stream.Collectors;
public class KafkaProducerActivity extends SimpleActivity {
private final static Logger logger = LoggerFactory.getLogger(KafkaProducerActivity.class);
private final static Logger logger = LogManager.getLogger(KafkaProducerActivity.class);
private String yamlLoc;
private String clientId;
private String servers;

View File

@ -9,8 +9,8 @@ import io.nosqlbench.virtdata.core.templates.StringBindings;
import io.nosqlbench.virtdata.core.templates.StringBindingsTemplate;
import org.apache.kafka.clients.producer.*;
import org.apache.kafka.common.serialization.StringSerializer;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.logging.log4j.Logger;
import org.apache.logging.log4j.LogManager;
import java.io.IOException;
import java.nio.file.Files;
@ -20,7 +20,7 @@ import java.util.Properties;
import java.util.concurrent.Future;
public class KafkaStatement {
private final static Logger logger = LoggerFactory.getLogger(KafkaStatement.class);
private final static Logger logger = LogManager.getLogger(KafkaStatement.class);
private Producer<Object,Object> producer = null;
private final StringBindings bindings;

View File

@ -7,7 +7,7 @@
<parent>
<artifactId>mvn-defaults</artifactId>
<groupId>io.nosqlbench</groupId>
<version>3.12.155-SNAPSHOT</version>
<version>4.15.6-SNAPSHOT</version>
<relativePath>../mvn-defaults</relativePath>
</parent>
@ -21,13 +21,13 @@
<dependency>
<groupId>io.nosqlbench</groupId>
<artifactId>engine-api</artifactId>
<version>3.12.155-SNAPSHOT</version>
<version>4.15.6-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>io.nosqlbench</groupId>
<artifactId>drivers-api</artifactId>
<version>3.12.155-SNAPSHOT</version>
<version>4.15.6-SNAPSHOT</version>
</dependency>
<dependency>

View File

@ -2,8 +2,8 @@ package io.nosqlbench.driver.mongodb;
import java.util.concurrent.TimeUnit;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.logging.log4j.Logger;
import org.apache.logging.log4j.LogManager;
import com.codahale.metrics.Timer;
import io.nosqlbench.engine.api.activityapi.core.SyncAction;
@ -13,7 +13,7 @@ import org.bson.conversions.Bson;
public class MongoAction implements SyncAction {
private final static Logger logger = LoggerFactory.getLogger(MongoAction.class);
private final static Logger logger = LogManager.getLogger(MongoAction.class);
private final MongoActivity activity;
private final int slot;

View File

@ -5,8 +5,8 @@ import java.util.Objects;
import java.util.function.Function;
import io.nosqlbench.engine.api.activityconfig.yaml.OpTemplate;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.logging.log4j.Logger;
import org.apache.logging.log4j.LogManager;
import com.codahale.metrics.Histogram;
import com.codahale.metrics.Timer;
@ -37,7 +37,7 @@ import static org.bson.codecs.configuration.CodecRegistries.fromRegistries;
public class MongoActivity extends SimpleActivity implements ActivityDefObserver {
private final static Logger logger = LoggerFactory.getLogger(MongoActivity.class);
private final static Logger logger = LogManager.getLogger(MongoActivity.class);
private String yamlLoc;
private String connectionString;

View File

@ -1,28 +1,26 @@
package io.nosqlbench.driver.mongodb;
import io.nosqlbench.engine.api.activityconfig.ParsedStmt;
import io.nosqlbench.engine.api.activityconfig.StatementsLoader;
import io.nosqlbench.engine.api.activityconfig.yaml.OpTemplate;
import io.nosqlbench.engine.api.activityconfig.yaml.StmtsDocList;
import io.nosqlbench.engine.api.activityimpl.ActivityDef;
import io.nosqlbench.engine.api.templating.StrInterpolator;
import io.nosqlbench.virtdata.core.templates.BindPoint;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.bson.conversions.Bson;
import org.junit.Before;
import org.junit.Test;
import java.util.List;
import java.util.Objects;
import java.util.function.Function;
import io.nosqlbench.engine.api.activityconfig.yaml.OpTemplate;
import org.junit.Before;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import io.nosqlbench.engine.api.activityconfig.ParsedStmt;
import io.nosqlbench.engine.api.activityconfig.StatementsLoader;
import io.nosqlbench.engine.api.activityconfig.yaml.StmtDef;
import io.nosqlbench.engine.api.activityconfig.yaml.StmtsDocList;
import io.nosqlbench.engine.api.activityimpl.ActivityDef;
import io.nosqlbench.engine.api.templating.StrInterpolator;
import io.nosqlbench.virtdata.core.templates.BindPoint;
import org.bson.conversions.Bson;
import static org.assertj.core.api.Assertions.assertThat;
public class ReadyMongoStatementTest {
private final static Logger logger = LoggerFactory.getLogger(ReadyMongoStatementTest.class);
private final static Logger logger = LogManager.getLogger(ReadyMongoStatementTest.class);
private ActivityDef activityDef;
private StmtsDocList stmtsDocList;

View File

@ -7,7 +7,7 @@
<parent>
<artifactId>mvn-defaults</artifactId>
<groupId>io.nosqlbench</groupId>
<version>3.12.155-SNAPSHOT</version>
<version>4.15.6-SNAPSHOT</version>
<relativePath>../mvn-defaults</relativePath>
</parent>
@ -22,13 +22,13 @@
<dependency>
<groupId>io.nosqlbench</groupId>
<artifactId>engine-api</artifactId>
<version>3.12.155-SNAPSHOT</version>
<version>4.15.6-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>io.nosqlbench</groupId>
<artifactId>drivers-api</artifactId>
<version>3.12.155-SNAPSHOT</version>
<version>4.15.6-SNAPSHOT</version>
</dependency>
</dependencies>

View File

@ -7,14 +7,14 @@ import io.nosqlbench.engine.api.activityapi.core.ops.fluent.opfacets.TrackedOp;
import io.nosqlbench.engine.api.activityapi.planning.OpSequence;
import io.nosqlbench.engine.api.activityimpl.ActivityDef;
import io.nosqlbench.virtdata.core.templates.StringBindings;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.logging.log4j.Logger;
import org.apache.logging.log4j.LogManager;
import java.util.function.LongFunction;
@SuppressWarnings("Duplicates")
public class AsyncStdoutAction extends BaseAsyncAction<StdoutOpContext, StdoutActivity> {
private final static Logger logger = LoggerFactory.getLogger(AsyncStdoutAction.class);
private final static Logger logger = LogManager.getLogger(AsyncStdoutAction.class);
private OpSequence<StringBindings> sequencer;

Some files were not shown because too many files have changed in this diff Show More