mirror of
https://github.com/nosqlbench/nosqlbench.git
synced 2025-02-25 18:55:28 -06:00
Merge remote-tracking branch 'origin/main' into jk-test-eng-95-expected-result-verification
# Conflicts: # adapters-api/src/main/java/io/nosqlbench/engine/api/activityimpl/BaseOpDispenser.java # engine-api/src/test/java/io/nosqlbench/engine/api/activityapi/errorhandling/modular/NBErrorHandlerTest.java # engine-cli/src/main/java/io/nosqlbench/engine/cli/NBCLIOptions.java
This commit is contained in:
@@ -50,7 +50,7 @@
|
||||
<dependency>
|
||||
<groupId>org.apache.tinkerpop</groupId>
|
||||
<artifactId>gremlin-core</artifactId>
|
||||
<version>3.6.2</version>
|
||||
<version>3.6.3</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.tinkerpop</groupId>
|
||||
@@ -100,7 +100,7 @@
|
||||
<plugin>
|
||||
<groupId>org.antlr</groupId>
|
||||
<artifactId>antlr4-maven-plugin</artifactId>
|
||||
<version>4.11.1</version>
|
||||
<version>4.12.0</version>
|
||||
<configuration>
|
||||
<sourceDirectory>src/main/java/io/nosqlbench/cqlgen/grammars
|
||||
</sourceDirectory>
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright (c) 2022 nosqlbench
|
||||
* Copyright (c) 2022-2023 nosqlbench
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -16,11 +16,12 @@
|
||||
|
||||
package io.nosqlbench.cqlgen.binders;
|
||||
|
||||
import io.nosqlbench.api.config.NBLabels;
|
||||
import io.nosqlbench.cqlgen.model.CqlColumnBase;
|
||||
import io.nosqlbench.cqlgen.model.CqlModel;
|
||||
import io.nosqlbench.cqlgen.model.CqlTable;
|
||||
import io.nosqlbench.cqlgen.core.CGElementNamer;
|
||||
import io.nosqlbench.api.labels.Labeled;
|
||||
import io.nosqlbench.api.config.NBLabeledElement;
|
||||
|
||||
import java.util.*;
|
||||
|
||||
@@ -40,9 +41,9 @@ import java.util.*;
|
||||
*/
|
||||
public class NamingFolio {
|
||||
|
||||
private final Map<String, Labeled> graph = new LinkedHashMap<>();
|
||||
private final Map<String, NBLabeledElement> graph = new LinkedHashMap<>();
|
||||
private final CGElementNamer namer;
|
||||
public final static String DEFAULT_NAMER_SPEC = "[BLOCKNAME-][OPTYPE-][COLUMN]-[TYPEDEF-][TABLE][-KEYSPACE]";
|
||||
public static final String DEFAULT_NAMER_SPEC = "[BLOCKNAME-][OPTYPE-][COLUMN]-[TYPEDEF-][TABLE][-KEYSPACE]";
|
||||
NamingStyle namingStyle = NamingStyle.SymbolicType;
|
||||
|
||||
public NamingFolio(String namerspec) {
|
||||
@@ -58,7 +59,7 @@ public class NamingFolio {
|
||||
|
||||
public void addFieldRef(Map<String, String> labels) {
|
||||
String name = namer.apply(labels);
|
||||
graph.put(name, Labeled.forMap(labels));
|
||||
graph.put(name, NBLabeledElement.forMap(labels));
|
||||
}
|
||||
|
||||
public void addFieldRef(String column, String typedef, String table, String keyspace) {
|
||||
@@ -69,15 +70,15 @@ public class NamingFolio {
|
||||
* This will eventually elide extraneous fields according to knowledge of all known names
|
||||
* by name, type, table, keyspace. For now it just returns everything in fully qualified form.
|
||||
*/
|
||||
public String nameFor(Labeled labeled, String... fields) {
|
||||
Map<String, String> labelsPlus = labeled.getLabelsAnd(fields);
|
||||
String name = namer.apply(labelsPlus);
|
||||
public String nameFor(NBLabeledElement labeled, String... fields) {
|
||||
NBLabels labelsPlus = labeled.getLabels().and(fields);
|
||||
String name = namer.apply(labelsPlus.asMap());
|
||||
return name;
|
||||
}
|
||||
|
||||
public String nameFor(Labeled labeled, Map<String,String> fields) {
|
||||
Map<String, String> labelsPlus = labeled.getLabelsAnd(fields);
|
||||
String name = namer.apply(labelsPlus);
|
||||
public String nameFor(NBLabeledElement labeled, Map<String,String> fields) {
|
||||
NBLabels labelsPlus = labeled.getLabels().and(fields);
|
||||
String name = namer.apply(labelsPlus.asMap());
|
||||
return name;
|
||||
|
||||
}
|
||||
@@ -85,7 +86,7 @@ public class NamingFolio {
|
||||
public void informNamerOfAllKnownNames(CqlModel model) {
|
||||
for (CqlTable table : model.getTableDefs()) {
|
||||
for (CqlColumnBase coldef : table.getColumnDefs()) {
|
||||
addFieldRef(coldef.getLabels());
|
||||
addFieldRef(coldef.getLabels().asMap());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright (c) 2022 nosqlbench
|
||||
* Copyright (c) 2022-2023 nosqlbench
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -16,7 +16,7 @@
|
||||
|
||||
package io.nosqlbench.cqlgen.bindspecs;
|
||||
|
||||
import io.nosqlbench.api.labels.Labeled;
|
||||
import io.nosqlbench.api.config.NBLabeledElement;
|
||||
|
||||
public interface BindingSpec {
|
||||
|
||||
@@ -35,7 +35,7 @@ public interface BindingSpec {
|
||||
* This is
|
||||
* @return
|
||||
*/
|
||||
Labeled getTarget();
|
||||
NBLabeledElement getTarget();
|
||||
|
||||
String getTypedef();
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright (c) 2022 nosqlbench
|
||||
* Copyright (c) 2022-2023 nosqlbench
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -16,25 +16,25 @@
|
||||
|
||||
package io.nosqlbench.cqlgen.bindspecs;
|
||||
|
||||
import io.nosqlbench.api.labels.Labeled;
|
||||
import io.nosqlbench.api.config.NBLabeledElement;
|
||||
|
||||
public class BindingSpecImpl implements BindingSpec {
|
||||
private Labeled target;
|
||||
private NBLabeledElement target;
|
||||
private double cardinality;
|
||||
private String typedef;
|
||||
|
||||
public BindingSpecImpl(Labeled target) {
|
||||
public BindingSpecImpl(final NBLabeledElement target) {
|
||||
this.target = target;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Labeled getTarget() {
|
||||
return target;
|
||||
public NBLabeledElement getTarget() {
|
||||
return this.target;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getTypedef() {
|
||||
return typedef;
|
||||
return this.typedef;
|
||||
}
|
||||
|
||||
@Override
|
||||
@@ -42,7 +42,7 @@ public class BindingSpecImpl implements BindingSpec {
|
||||
return BindingSpec.super.getCardinality();
|
||||
}
|
||||
|
||||
public void setTarget(Labeled target) {
|
||||
public void setTarget(final NBLabeledElement target) {
|
||||
this.target = target;
|
||||
}
|
||||
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright (c) 2022 nosqlbench
|
||||
* Copyright (c) 2022-2023 nosqlbench
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -16,7 +16,7 @@
|
||||
|
||||
package io.nosqlbench.cqlgen.core;
|
||||
|
||||
import io.nosqlbench.api.labels.Labeled;
|
||||
import io.nosqlbench.api.config.NBLabeledElement;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.LinkedHashMap;
|
||||
@@ -28,14 +28,14 @@ import java.util.regex.Pattern;
|
||||
|
||||
public class CGElementNamer implements Function<Map<String, String>, String> {
|
||||
|
||||
public final static String _DEFAULT_TEMPLATE = "[PREFIX-][OPTYPE-][KEYSPACE__][TABLE][-DATATYPE]";
|
||||
public static final String _DEFAULT_TEMPLATE = "[PREFIX-][OPTYPE-][KEYSPACE__][TABLE][-DATATYPE]";
|
||||
|
||||
// for convenient reference
|
||||
public final static String PREFIX = "PREFIX";
|
||||
public final static String OPTYPE = "OPTYPE";
|
||||
public final static String KEYSPACE = "KEYSPACE";
|
||||
public final static String TABLE = "TABLE";
|
||||
public final static String DATATYPE = "DATATYPE";
|
||||
public static final String PREFIX = "PREFIX";
|
||||
public static final String OPTYPE = "OPTYPE";
|
||||
public static final String KEYSPACE = "KEYSPACE";
|
||||
public static final String TABLE = "TABLE";
|
||||
public static final String DATATYPE = "DATATYPE";
|
||||
|
||||
private final List<Section> sections = new ArrayList<>();
|
||||
private final String spec;
|
||||
@@ -47,17 +47,17 @@ public class CGElementNamer implements Function<Map<String, String>, String> {
|
||||
Pattern pattern = Pattern.compile("(?<prefix>[^\\]]+)?\\[(?<section>(?<pre>.*?)(?<name>[A-Z]+)(?<required>!)?(?<post>.*?))?]");
|
||||
Matcher scanner = pattern.matcher(template);
|
||||
while (scanner.find()) {
|
||||
if (scanner.group("prefix")!=null) {
|
||||
if (null != scanner.group("prefix")) {
|
||||
String prefix = scanner.group("prefix");
|
||||
sections.add(new Section(null, prefix, true));
|
||||
}
|
||||
if (scanner.group("section")!=null) {
|
||||
if (null != scanner.group("section")) {
|
||||
Section section = new Section(
|
||||
scanner.group("name").toLowerCase(),
|
||||
scanner.group("pre") +
|
||||
scanner.group("name")
|
||||
+ scanner.group("post"),
|
||||
scanner.group("required") != null);
|
||||
null != scanner.group("required"));
|
||||
sections.add(section);
|
||||
}
|
||||
}
|
||||
@@ -94,39 +94,42 @@ public class CGElementNamer implements Function<Map<String, String>, String> {
|
||||
return value;
|
||||
}
|
||||
|
||||
public String apply(Labeled element, String... keysAndValues) {
|
||||
public String apply(NBLabeledElement element, String... keysAndValues) {
|
||||
|
||||
LinkedHashMap<String, String> mylabels = new LinkedHashMap<>();
|
||||
for (int idx = 0; idx < keysAndValues.length; idx += 2) {
|
||||
mylabels.put(keysAndValues[idx], keysAndValues[idx + 1]);
|
||||
}
|
||||
mylabels.putAll(element.getLabels());
|
||||
mylabels.putAll(element.getLabels().asMap());
|
||||
return apply(mylabels);
|
||||
}
|
||||
|
||||
private final static class Section implements Function<Map<String, String>, String> {
|
||||
private static final class Section implements Function<Map<String, String>, String> {
|
||||
String name;
|
||||
String template;
|
||||
boolean required;
|
||||
|
||||
public Section(String name, String template, boolean required) {
|
||||
this.name = (name!=null ? name.toLowerCase() : null);
|
||||
this.name = null != name ? name.toLowerCase() : null;
|
||||
this.template = template.toLowerCase();
|
||||
this.required = required;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String apply(Map<String, String> labels) {
|
||||
if (name==null) {
|
||||
if (null == this.name) {
|
||||
return template;
|
||||
} else if (labels.containsKey(name)) {
|
||||
return template.replace(name, labels.get(name));
|
||||
} else if (labels.containsKey(name.toUpperCase())) {
|
||||
return template.replace(name, labels.get(name.toUpperCase()));
|
||||
} else if (required) {
|
||||
throw new RuntimeException("Section label '" + name + "' was not provided for template, but it is required.");
|
||||
} else {
|
||||
return "";
|
||||
}
|
||||
if (labels.containsKey(name)) {
|
||||
return template.replace(name, labels.get(name));
|
||||
}
|
||||
if (labels.containsKey(name.toUpperCase())) {
|
||||
return template.replace(name, labels.get(name.toUpperCase()));
|
||||
}
|
||||
if (required) {
|
||||
throw new RuntimeException("Section label '" + name + "' was not provided for template, but it is required.");
|
||||
}
|
||||
return "";
|
||||
}
|
||||
|
||||
@Override
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright (c) 2022 nosqlbench
|
||||
* Copyright (c) 2022-2023 nosqlbench
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -16,12 +16,13 @@
|
||||
|
||||
package io.nosqlbench.cqlgen.model;
|
||||
|
||||
import io.nosqlbench.api.config.NBLabels;
|
||||
import io.nosqlbench.api.config.NBNamedElement;
|
||||
import io.nosqlbench.api.labels.Labeled;
|
||||
import io.nosqlbench.api.config.NBLabeledElement;
|
||||
|
||||
import java.util.Map;
|
||||
|
||||
public abstract class CqlColumnBase implements NBNamedElement, Labeled {
|
||||
public abstract class CqlColumnBase implements NBNamedElement, NBLabeledElement {
|
||||
|
||||
private String name;
|
||||
private String typedef;
|
||||
@@ -43,6 +44,7 @@ public abstract class CqlColumnBase implements NBNamedElement, Labeled {
|
||||
this.typedef = type;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getName() {
|
||||
return name;
|
||||
}
|
||||
@@ -61,15 +63,12 @@ public abstract class CqlColumnBase implements NBNamedElement, Labeled {
|
||||
}
|
||||
|
||||
@Override
|
||||
public Map<String, String> getLabels() {
|
||||
return Map.of(
|
||||
"name", name,
|
||||
"type", "column"
|
||||
);
|
||||
public NBLabels getLabels() {
|
||||
return NBLabels.forKV("name", name, "type", "column");
|
||||
}
|
||||
|
||||
public boolean isCounter() {
|
||||
return getTrimmedTypedef().equalsIgnoreCase("counter");
|
||||
return "counter".equalsIgnoreCase(this.getTrimmedTypedef());
|
||||
}
|
||||
|
||||
public void setName(String name) {
|
||||
@@ -77,11 +76,11 @@ public abstract class CqlColumnBase implements NBNamedElement, Labeled {
|
||||
}
|
||||
|
||||
public String getSyntax() {
|
||||
return getName() + " " + getTrimmedTypedef();
|
||||
return this.name + ' ' + getTrimmedTypedef();
|
||||
}
|
||||
|
||||
public String getFullName() {
|
||||
return getParentFullName() + "." + getName();
|
||||
return getParentFullName() + '.' + this.name;
|
||||
}
|
||||
|
||||
protected abstract String getParentFullName();
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright (c) 2022 nosqlbench
|
||||
* Copyright (c) 2022-2023 nosqlbench
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -17,13 +17,14 @@
|
||||
package io.nosqlbench.cqlgen.model;
|
||||
|
||||
import com.datastax.oss.driver.internal.core.util.Strings;
|
||||
import io.nosqlbench.api.config.NBLabels;
|
||||
import io.nosqlbench.api.config.NBNamedElement;
|
||||
import io.nosqlbench.api.labels.Labeled;
|
||||
import io.nosqlbench.api.config.NBLabeledElement;
|
||||
import io.nosqlbench.cqlgen.core.CGKeyspaceStats;
|
||||
|
||||
import java.util.*;
|
||||
|
||||
public class CqlKeyspaceDef implements NBNamedElement, Labeled {
|
||||
public class CqlKeyspaceDef implements NBNamedElement, NBLabeledElement {
|
||||
String keyspaceName= "";
|
||||
CGKeyspaceStats stats;
|
||||
private boolean isDurableWrites;
|
||||
@@ -34,19 +35,20 @@ public class CqlKeyspaceDef implements NBNamedElement, Labeled {
|
||||
* Has this been populated by keyspace definition? If false, it is only
|
||||
* here because it was vivified by a reference.
|
||||
*/
|
||||
private transient boolean defined;
|
||||
private boolean defined;
|
||||
|
||||
public CqlKeyspaceDef() {
|
||||
}
|
||||
|
||||
public CqlKeyspaceDef(String ksname) {
|
||||
setKeyspaceName(ksname);
|
||||
this.keyspaceName = ksname;
|
||||
}
|
||||
|
||||
public void setKeyspaceName(String newname) {
|
||||
this.keyspaceName=newname;
|
||||
this.keyspaceName =newname;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getName() {
|
||||
return this.keyspaceName;
|
||||
}
|
||||
@@ -62,15 +64,15 @@ public class CqlKeyspaceDef implements NBNamedElement, Labeled {
|
||||
}
|
||||
|
||||
@Override
|
||||
public Map<String, String> getLabels() {
|
||||
return Map.of(
|
||||
public NBLabels getLabels() {
|
||||
return NBLabels.forKV(
|
||||
"name", keyspaceName,
|
||||
"type","keyspace"
|
||||
);
|
||||
}
|
||||
|
||||
public void setStats(CGKeyspaceStats ksstats) {
|
||||
this.stats=ksstats;
|
||||
this.stats =ksstats;
|
||||
}
|
||||
|
||||
public boolean isDurableWrites() {
|
||||
@@ -113,7 +115,7 @@ public class CqlKeyspaceDef implements NBNamedElement, Labeled {
|
||||
|
||||
public void getReferenceErrors(List<String> errors) {
|
||||
if (!defined) {
|
||||
errors.add("keyspace " + this.getName() + " was referenced but not defined.");
|
||||
errors.add("keyspace " + this.keyspaceName + " was referenced but not defined.");
|
||||
}
|
||||
for (CqlType typedef : typeDefs) {
|
||||
typedef.getReferenceErrors(errors);
|
||||
@@ -124,10 +126,10 @@ public class CqlKeyspaceDef implements NBNamedElement, Labeled {
|
||||
}
|
||||
|
||||
public void setDefined() {
|
||||
if (this.keyspaceName==null) {
|
||||
if (null == keyspaceName) {
|
||||
throw new RuntimeException("nuh uh");
|
||||
}
|
||||
this.defined=true;
|
||||
this.defined =true;
|
||||
}
|
||||
|
||||
public void validate() {
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright (c) 2022 nosqlbench
|
||||
* Copyright (c) 2022-2023 nosqlbench
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -16,8 +16,9 @@
|
||||
|
||||
package io.nosqlbench.cqlgen.model;
|
||||
|
||||
import io.nosqlbench.api.config.NBLabels;
|
||||
import io.nosqlbench.api.config.NBNamedElement;
|
||||
import io.nosqlbench.api.labels.Labeled;
|
||||
import io.nosqlbench.api.config.NBLabeledElement;
|
||||
import io.nosqlbench.cqlgen.core.CGTableStats;
|
||||
import io.nosqlbench.cqlgen.transformers.ComputedTableStats;
|
||||
|
||||
@@ -25,10 +26,10 @@ import java.util.*;
|
||||
import java.util.function.Function;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
public class CqlTable implements NBNamedElement, Labeled {
|
||||
public class CqlTable implements NBNamedElement, NBLabeledElement {
|
||||
private CqlKeyspaceDef keyspace;
|
||||
String name = "";
|
||||
CGTableStats tableAttributes = null;
|
||||
CGTableStats tableAttributes;
|
||||
int[] partitioning = new int[0];
|
||||
int[] clustering = new int[0];
|
||||
List<String> clusteringOrders = new ArrayList<>();
|
||||
@@ -73,6 +74,7 @@ public class CqlTable implements NBNamedElement, Labeled {
|
||||
return this.coldefs;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getName() {
|
||||
return this.name;
|
||||
}
|
||||
@@ -82,8 +84,8 @@ public class CqlTable implements NBNamedElement, Labeled {
|
||||
}
|
||||
|
||||
@Override
|
||||
public Map<String, String> getLabels() {
|
||||
return Map.of(
|
||||
public NBLabels getLabels() {
|
||||
return NBLabels.forKV(
|
||||
"keyspace", this.keyspace.getName(),
|
||||
"name", this.name,
|
||||
"type", "table"
|
||||
@@ -101,11 +103,10 @@ public class CqlTable implements NBNamedElement, Labeled {
|
||||
break;
|
||||
}
|
||||
}
|
||||
if (new_partitioning==partitioning) {
|
||||
if (new_partitioning== partitioning) {
|
||||
throw new RuntimeException("Unable to assign partition key '" + pkey + "' to a known column of the same name.");
|
||||
} else {
|
||||
this.partitioning = new_partitioning;
|
||||
}
|
||||
this.partitioning = new_partitioning;
|
||||
|
||||
}
|
||||
|
||||
@@ -115,7 +116,7 @@ public class CqlTable implements NBNamedElement, Labeled {
|
||||
for (int i = 0; i < coldefs.size(); i++) {
|
||||
if (coldefs.get(i).getName().equals(ccol)) {
|
||||
coldefs.get(i).setPosition(ColumnPosition.Clustering);
|
||||
new_clustering= new int[clustering.length + 1];
|
||||
new_clustering = new int[clustering.length + 1];
|
||||
System.arraycopy(clustering, 0, new_clustering, 0, clustering.length);
|
||||
new_clustering[new_clustering.length - 1] = i;
|
||||
break;
|
||||
@@ -123,9 +124,8 @@ public class CqlTable implements NBNamedElement, Labeled {
|
||||
}
|
||||
if (new_clustering == clustering) {
|
||||
throw new RuntimeException("Unable to assign clustering field '" + ccol + " to a known column of the same name.");
|
||||
} else {
|
||||
this.clustering = new_clustering;
|
||||
}
|
||||
this.clustering = new_clustering;
|
||||
}
|
||||
|
||||
public void addTableClusteringOrder(String colname, String order) {
|
||||
@@ -152,7 +152,7 @@ public class CqlTable implements NBNamedElement, Labeled {
|
||||
.findFirst();
|
||||
if (!def.isPresent()) {
|
||||
throw new RuntimeException("Unable to find column definition in table '" +
|
||||
this.getName() + "' for column '" + colname + "'");
|
||||
this.name + "' for column '" + colname + '\'');
|
||||
}
|
||||
return def.orElseThrow();
|
||||
}
|
||||
@@ -165,7 +165,7 @@ public class CqlTable implements NBNamedElement, Labeled {
|
||||
|
||||
public List<CqlTableColumn> getNonKeyColumnDefinitions() {
|
||||
int last = partitioning[partitioning.length - 1];
|
||||
last = (clustering.length > 0 ? clustering[clustering.length - 1] : last);
|
||||
last = 0 < this.clustering.length ? clustering[clustering.length - 1] : last;
|
||||
List<CqlTableColumn> nonkeys = new ArrayList<>();
|
||||
for (int nonkey = last; nonkey < coldefs.size(); nonkey++) {
|
||||
nonkeys.add(coldefs.get(nonkey));
|
||||
@@ -178,7 +178,7 @@ public class CqlTable implements NBNamedElement, Labeled {
|
||||
}
|
||||
|
||||
public String getFullName() {
|
||||
return (this.keyspace != null ? this.keyspace.getName() + "." : "") + this.name;
|
||||
return (null != keyspace ? this.keyspace.getName() + '.' : "") + this.name;
|
||||
}
|
||||
|
||||
public boolean isPartitionKey(int position) {
|
||||
@@ -190,11 +190,11 @@ public class CqlTable implements NBNamedElement, Labeled {
|
||||
}
|
||||
|
||||
public boolean isClusteringColumn(int position) {
|
||||
return clustering.length > 0 && position < clustering[clustering.length - 1] && position >= clustering[0];
|
||||
return 0 < this.clustering.length && position < clustering[clustering.length - 1] && position >= clustering[0];
|
||||
}
|
||||
|
||||
public boolean isLastClusteringColumn(int position) {
|
||||
return clustering.length > 0 && position == clustering[clustering.length - 1];
|
||||
return 0 < this.clustering.length && position == clustering[clustering.length - 1];
|
||||
}
|
||||
|
||||
public ComputedTableStats getComputedStats() {
|
||||
@@ -206,7 +206,7 @@ public class CqlTable implements NBNamedElement, Labeled {
|
||||
}
|
||||
|
||||
public boolean hasStats() {
|
||||
return this.computedTableStats!=null;
|
||||
return null != computedTableStats;
|
||||
}
|
||||
|
||||
public CqlKeyspaceDef getKeyspace() {
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright (c) 2022 nosqlbench
|
||||
* Copyright (c) 2022-2023 nosqlbench
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -16,35 +16,32 @@
|
||||
|
||||
package io.nosqlbench.cqlgen.model;
|
||||
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
import io.nosqlbench.api.config.NBLabels;
|
||||
|
||||
public class CqlTableColumn extends CqlColumnBase {
|
||||
|
||||
private CqlTable table;
|
||||
|
||||
public CqlTableColumn(String colname, String typedef, CqlTable table) {
|
||||
public CqlTableColumn(final String colname, final String typedef, final CqlTable table) {
|
||||
super(colname, typedef);
|
||||
setTable(table);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected String getParentFullName() {
|
||||
return table.getFullName();
|
||||
}
|
||||
|
||||
public CqlTable getTable() {
|
||||
return table;
|
||||
}
|
||||
|
||||
public void setTable(CqlTable table) {
|
||||
this.table = table;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Map<String, String> getLabels() {
|
||||
HashMap<String, String> map = new HashMap<>(super.getLabels());
|
||||
map.put("table",getTable().getName());
|
||||
return map;
|
||||
protected String getParentFullName() {
|
||||
return this.table.getFullName();
|
||||
}
|
||||
|
||||
public CqlTable getTable() {
|
||||
return this.table;
|
||||
}
|
||||
|
||||
public void setTable(final CqlTable table) {
|
||||
this.table = table;
|
||||
}
|
||||
|
||||
@Override
|
||||
public NBLabels getLabels() {
|
||||
return super.getLabels().and("table", table.getName());
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright (c) 2022 nosqlbench
|
||||
* Copyright (c) 2022-2023 nosqlbench
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -16,15 +16,15 @@
|
||||
|
||||
package io.nosqlbench.cqlgen.model;
|
||||
|
||||
import io.nosqlbench.api.config.NBLabeledElement;
|
||||
import io.nosqlbench.api.config.NBLabels;
|
||||
import io.nosqlbench.api.config.NBNamedElement;
|
||||
import io.nosqlbench.api.labels.Labeled;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Objects;
|
||||
|
||||
public class CqlType implements NBNamedElement, Labeled {
|
||||
public class CqlType implements NBNamedElement, NBLabeledElement {
|
||||
|
||||
private String name;
|
||||
private CqlKeyspaceDef keyspace;
|
||||
@@ -42,6 +42,7 @@ public class CqlType implements NBNamedElement, Labeled {
|
||||
return keyspace;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getName() {
|
||||
return this.name;
|
||||
}
|
||||
@@ -56,11 +57,11 @@ public class CqlType implements NBNamedElement, Labeled {
|
||||
}
|
||||
|
||||
@Override
|
||||
public Map<String, String> getLabels() {
|
||||
return Map.of(
|
||||
public NBLabels getLabels() {
|
||||
return NBLabels.forKV(
|
||||
"keyspace", keyspace.getName(),
|
||||
"type","type",
|
||||
"name",name
|
||||
"name", name
|
||||
);
|
||||
}
|
||||
|
||||
@@ -73,12 +74,12 @@ public class CqlType implements NBNamedElement, Labeled {
|
||||
}
|
||||
|
||||
public String getFullName() {
|
||||
return keyspace.getName()+"."+getName();
|
||||
return keyspace.getName()+ '.' + this.name;
|
||||
}
|
||||
|
||||
public void getReferenceErrors(List<String> errors) {
|
||||
if (!defined) {
|
||||
errors.add("type " + this.getName() + " was referenced but not defined.");
|
||||
errors.add("type " + this.name + " was referenced but not defined.");
|
||||
}
|
||||
}
|
||||
|
||||
@@ -88,6 +89,6 @@ public class CqlType implements NBNamedElement, Labeled {
|
||||
}
|
||||
|
||||
public void setDefined() {
|
||||
this.defined=true;
|
||||
this.defined =true;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright (c) 2022 nosqlbench
|
||||
* Copyright (c) 2022-2023 nosqlbench
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -16,35 +16,32 @@
|
||||
|
||||
package io.nosqlbench.cqlgen.model;
|
||||
|
||||
import java.util.LinkedHashMap;
|
||||
import java.util.Map;
|
||||
import io.nosqlbench.api.config.NBLabels;
|
||||
|
||||
public class CqlTypeColumn extends CqlColumnBase {
|
||||
|
||||
CqlType type;
|
||||
|
||||
public CqlTypeColumn(String colname, String typedef, CqlType usertype) {
|
||||
public CqlTypeColumn(final String colname, final String typedef, final CqlType usertype) {
|
||||
super(colname, typedef);
|
||||
this.setType(usertype);
|
||||
type = usertype;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected String getParentFullName() {
|
||||
return type.getFullName();
|
||||
return this.type.getFullName();
|
||||
}
|
||||
|
||||
public CqlType getType() {
|
||||
return type;
|
||||
return this.type;
|
||||
}
|
||||
|
||||
public void setType(CqlType type) {
|
||||
public void setType(final CqlType type) {
|
||||
this.type = type;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Map<String, String> getLabels() {
|
||||
Map<String,String> map = new LinkedHashMap<>(super.getLabels());
|
||||
map.put("name",type.getName());
|
||||
return map;
|
||||
public NBLabels getLabels() {
|
||||
return super.getLabels().and("name", this.type.getName());
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright (c) 2022 nosqlbench
|
||||
* Copyright (c) 2022-2023 nosqlbench
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -16,7 +16,8 @@
|
||||
|
||||
package io.nosqlbench.cqlgen.transformers;
|
||||
|
||||
import io.nosqlbench.api.labels.Labeled;
|
||||
import io.nosqlbench.api.config.NBLabeledElement;
|
||||
import io.nosqlbench.api.config.NBLabels;
|
||||
import io.nosqlbench.virtdata.library.basics.shared.from_long.to_string.Combinations;
|
||||
|
||||
import java.util.HashMap;
|
||||
@@ -60,19 +61,19 @@ public class CGCachingNameRemapper {
|
||||
Objects.requireNonNull(type);
|
||||
String name = labels.get("name");
|
||||
Objects.requireNonNull(name);
|
||||
String canonical = type+"-"+name;
|
||||
String canonical = type+ '-' +name;
|
||||
String prefix = prefixmap.getOrDefault(type,"");
|
||||
if (!remapped.containsKey(canonical)) {
|
||||
long indexForType=indexforType(type);
|
||||
String newname = (prefix!=null?prefix:"")+namefunc.apply(indexForType);
|
||||
long indexForType= indexforType(type);
|
||||
String newname = (null != prefix ?prefix:"")+ namefunc.apply(indexForType);
|
||||
remapped.put(canonical,newname);
|
||||
}
|
||||
return remapped.get(canonical);
|
||||
}
|
||||
|
||||
public synchronized String nameFor(Labeled element) {
|
||||
Map<String, String> labels = element.getLabels();
|
||||
return nameFor(labels);
|
||||
public synchronized String nameFor(NBLabeledElement element) {
|
||||
NBLabels labels = element.getLabels();
|
||||
return nameFor(labels.asMap());
|
||||
}
|
||||
|
||||
// public Function<String, String> mapperForType(Labeled cqlTable, String prefix) {
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright (c) 2022 nosqlbench
|
||||
* Copyright (c) 2022-2023 nosqlbench
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -16,7 +16,7 @@
|
||||
|
||||
package io.nosqlbench.cqlgen.transformers.namecache;
|
||||
|
||||
import io.nosqlbench.api.labels.Labeled;
|
||||
import io.nosqlbench.api.config.NBLabeledElement;
|
||||
|
||||
import java.util.Map;
|
||||
import java.util.function.Function;
|
||||
@@ -25,26 +25,22 @@ public class NamedColumn{
|
||||
private final String name;
|
||||
private String alias;
|
||||
|
||||
public NamedColumn(String name) {
|
||||
public NamedColumn(final String name) {
|
||||
this.name = name;
|
||||
}
|
||||
|
||||
public void alias(String alias) {
|
||||
public void alias(final String alias) {
|
||||
this.alias = alias;
|
||||
}
|
||||
|
||||
public String computeAlias(Labeled labeled, Function<Labeled, String> namer) {
|
||||
if (this.alias==null) {
|
||||
this.alias = namer.apply(labeled);
|
||||
}
|
||||
return this.alias;
|
||||
public String computeAlias(final NBLabeledElement labeled, final Function<NBLabeledElement, String> namer) {
|
||||
if (null == this.alias) alias = namer.apply(labeled);
|
||||
return alias;
|
||||
}
|
||||
|
||||
public String computeAlias(Map<String,String> labels, Function<Map<String,String>,String> namer) {
|
||||
if (this.alias==null) {
|
||||
this.alias= namer.apply(labels);
|
||||
}
|
||||
return this.alias;
|
||||
public String computeAlias(final Map<String,String> labels, final Function<Map<String,String>,String> namer) {
|
||||
if (null == this.alias) alias = namer.apply(labels);
|
||||
return alias;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright (c) 2022 nosqlbench
|
||||
* Copyright (c) 2022-2023 nosqlbench
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -16,7 +16,7 @@
|
||||
|
||||
package io.nosqlbench.cqlgen.transformers.namecache;
|
||||
|
||||
import io.nosqlbench.api.labels.Labeled;
|
||||
import io.nosqlbench.api.config.NBLabeledElement;
|
||||
|
||||
import java.util.Collection;
|
||||
import java.util.LinkedHashMap;
|
||||
@@ -29,35 +29,33 @@ public class NamedKeyspace {
|
||||
private final Map<String, NamedType> types = new LinkedHashMap<>();
|
||||
private String alias;
|
||||
|
||||
public NamedKeyspace(String ksname) {
|
||||
public NamedKeyspace(final String ksname) {
|
||||
this.ksname = ksname;
|
||||
}
|
||||
|
||||
public NamedType type(String typename) {
|
||||
return types.computeIfAbsent(typename, NamedType::new);
|
||||
public NamedType type(final String typename) {
|
||||
return this.types.computeIfAbsent(typename, NamedType::new);
|
||||
}
|
||||
|
||||
public NamedTable table(String tablename) {
|
||||
return tables.computeIfAbsent(tablename, NamedTable::new);
|
||||
public NamedTable table(final String tablename) {
|
||||
return this.tables.computeIfAbsent(tablename, NamedTable::new);
|
||||
}
|
||||
|
||||
public NamedKeyspace alias(String alias) {
|
||||
public NamedKeyspace alias(final String alias) {
|
||||
this.alias = alias;
|
||||
return this;
|
||||
}
|
||||
|
||||
public String computeAlias(Labeled labeled, Function<Labeled,String> namer) {
|
||||
if (this.alias==null) {
|
||||
this.alias = namer.apply(labeled);
|
||||
}
|
||||
return this.alias;
|
||||
public String computeAlias(final NBLabeledElement labeled, final Function<NBLabeledElement,String> namer) {
|
||||
if (null == this.alias) alias = namer.apply(labeled);
|
||||
return alias;
|
||||
}
|
||||
|
||||
public Collection<NamedTable> tables() {
|
||||
return tables.values();
|
||||
return this.tables.values();
|
||||
}
|
||||
|
||||
public Collection<NamedType> types() {
|
||||
return types.values();
|
||||
return this.types.values();
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright (c) 2022 nosqlbench
|
||||
* Copyright (c) 2022-2023 nosqlbench
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -16,7 +16,7 @@
|
||||
|
||||
package io.nosqlbench.cqlgen.transformers.namecache;
|
||||
|
||||
import io.nosqlbench.api.labels.Labeled;
|
||||
import io.nosqlbench.api.config.NBLabeledElement;
|
||||
|
||||
import java.util.Collection;
|
||||
import java.util.LinkedHashMap;
|
||||
@@ -28,31 +28,29 @@ public class NamedTable {
|
||||
private final Map<String, NamedColumn> columns = new LinkedHashMap<>();
|
||||
private String alias;
|
||||
|
||||
public NamedTable(String tablename) {
|
||||
public NamedTable(final String tablename) {
|
||||
this.tablename = tablename;
|
||||
}
|
||||
|
||||
public NamedColumn column(String name) {
|
||||
return this.columns.computeIfAbsent(name, NamedColumn::new);
|
||||
public NamedColumn column(final String name) {
|
||||
return columns.computeIfAbsent(name, NamedColumn::new);
|
||||
}
|
||||
|
||||
public NamedTable alias(String alias) {
|
||||
public NamedTable alias(final String alias) {
|
||||
this.alias = alias;
|
||||
return this;
|
||||
}
|
||||
|
||||
public String computeAlias(Labeled labeled, Function<Labeled,String> namer) {
|
||||
if (this.alias==null) {
|
||||
this.alias = namer.apply(labeled);
|
||||
}
|
||||
return this.alias;
|
||||
public String computeAlias(final NBLabeledElement labeled, final Function<NBLabeledElement,String> namer) {
|
||||
if (null == this.alias) alias = namer.apply(labeled);
|
||||
return alias;
|
||||
}
|
||||
|
||||
public String getAlias() {
|
||||
return this.alias;
|
||||
return alias;
|
||||
}
|
||||
|
||||
public Collection<NamedColumn> columns() {
|
||||
return columns.values();
|
||||
return this.columns.values();
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright (c) 2022 nosqlbench
|
||||
* Copyright (c) 2022-2023 nosqlbench
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -16,7 +16,7 @@
|
||||
|
||||
package io.nosqlbench.cqlgen.transformers.namecache;
|
||||
|
||||
import io.nosqlbench.api.labels.Labeled;
|
||||
import io.nosqlbench.api.config.NBLabeledElement;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.LinkedHashMap;
|
||||
@@ -29,29 +29,27 @@ public class NamedType {
|
||||
private String alias;
|
||||
private final Map<String,NamedColumn> columns = new LinkedHashMap<>();
|
||||
|
||||
public NamedType(String typename) {
|
||||
this.name = typename;
|
||||
public NamedType(final String typename) {
|
||||
name = typename;
|
||||
}
|
||||
|
||||
public void alias(String alias) {
|
||||
public void alias(final String alias) {
|
||||
this.alias = alias;
|
||||
}
|
||||
|
||||
public NamedColumn column(String key) {
|
||||
return this.columns.computeIfAbsent(key, NamedColumn::new);
|
||||
public NamedColumn column(final String key) {
|
||||
return columns.computeIfAbsent(key, NamedColumn::new);
|
||||
}
|
||||
public List<NamedColumn> getColumnDefs() {
|
||||
return new ArrayList<>(columns.values());
|
||||
return new ArrayList<>(this.columns.values());
|
||||
}
|
||||
|
||||
public String computeAlias(Labeled labeled, Function<Labeled, String> namer) {
|
||||
if (this.alias==null) {
|
||||
this.alias = namer.apply(labeled);
|
||||
}
|
||||
return this.alias;
|
||||
public String computeAlias(final NBLabeledElement labeled, final Function<NBLabeledElement, String> namer) {
|
||||
if (null == this.alias) alias = namer.apply(labeled);
|
||||
return alias;
|
||||
}
|
||||
|
||||
public void setName(String name) {
|
||||
public void setName(final String name) {
|
||||
this.name = name;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright (c) 2022 nosqlbench
|
||||
* Copyright (c) 2022-2023 nosqlbench
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -16,7 +16,8 @@
|
||||
|
||||
package io.nosqlbench.converters.cql.exporters;
|
||||
|
||||
import io.nosqlbench.api.labels.Labeled;
|
||||
import io.nosqlbench.api.config.NBLabeledElement;
|
||||
import io.nosqlbench.api.config.NBLabels;
|
||||
import io.nosqlbench.cqlgen.core.CGElementNamer;
|
||||
import org.junit.jupiter.api.Test;
|
||||
|
||||
@@ -50,10 +51,10 @@ public class CGElementNamerTest {
|
||||
@Test
|
||||
public void testLabeledFields() {
|
||||
CGElementNamer namer = new CGElementNamer("[ABC---][,deFGH][__IJ__]");
|
||||
Labeled mylabeled = new Labeled() {
|
||||
NBLabeledElement mylabeled = new NBLabeledElement() {
|
||||
@Override
|
||||
public Map<String, String> getLabels() {
|
||||
return Map.of("ij", "eyejay");
|
||||
public NBLabels getLabels() {
|
||||
return NBLabels.forKV("ij", "eyejay");
|
||||
}
|
||||
};
|
||||
assertThat(namer.apply(mylabeled, "abc", "base")).isEqualTo("base---__eyejay__");
|
||||
@@ -74,10 +75,10 @@ public class CGElementNamerTest {
|
||||
@Test
|
||||
public void testRequiredFieldsPresent() {
|
||||
CGElementNamer namer = new CGElementNamer("[ABC!---!]");
|
||||
Labeled mylabeled = new Labeled() {
|
||||
NBLabeledElement mylabeled = new NBLabeledElement() {
|
||||
@Override
|
||||
public Map<String, String> getLabels() {
|
||||
return Map.of("ij", "eyejay");
|
||||
public NBLabels getLabels() {
|
||||
return NBLabels.forKV("ij", "eyejay");
|
||||
}
|
||||
};
|
||||
assertThat(namer.apply(Map.of(
|
||||
@@ -89,10 +90,10 @@ public class CGElementNamerTest {
|
||||
@Test
|
||||
public void testRequiredFieldsMissing() {
|
||||
CGElementNamer namer = new CGElementNamer("[ABC!---!]");
|
||||
Labeled mylabeled = new Labeled() {
|
||||
NBLabeledElement mylabeled = new NBLabeledElement() {
|
||||
@Override
|
||||
public Map<String, String> getLabels() {
|
||||
return Map.of("ij", "eyejay");
|
||||
public NBLabels getLabels() {
|
||||
return NBLabels.forKV("ij", "eyejay");
|
||||
}
|
||||
};
|
||||
assertThatThrownBy(() -> namer.apply(Map.of(
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright (c) 2022 nosqlbench
|
||||
* Copyright (c) 2022-2023 nosqlbench
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -16,6 +16,8 @@
|
||||
|
||||
package io.nosqlbench.adapter.diag.optasks;
|
||||
|
||||
import io.nosqlbench.api.config.NBLabeledElement;
|
||||
import io.nosqlbench.api.config.NBLabels;
|
||||
import io.nosqlbench.api.config.standard.*;
|
||||
import io.nosqlbench.engine.api.activityapi.ratelimits.RateLimiter;
|
||||
import io.nosqlbench.engine.api.activityapi.ratelimits.RateLimiters;
|
||||
@@ -25,7 +27,7 @@ import io.nosqlbench.nb.annotations.Service;
|
||||
import java.util.Map;
|
||||
|
||||
@Service(value = DiagTask.class, selector = "diagrate")
|
||||
public class DiagTask_diagrate implements DiagTask, NBReconfigurable {
|
||||
public class DiagTask_diagrate implements DiagTask, NBReconfigurable, NBLabeledElement {
|
||||
private String name;
|
||||
private RateLimiter rateLimiter;
|
||||
private RateSpec rateSpec;
|
||||
@@ -77,4 +79,9 @@ public class DiagTask_diagrate implements DiagTask, NBReconfigurable {
|
||||
public String getName() {
|
||||
return name;
|
||||
}
|
||||
|
||||
@Override
|
||||
public NBLabels getLabels() {
|
||||
return NBLabels.forKV("diagop", name);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -43,7 +43,7 @@
|
||||
<dependency>
|
||||
<groupId>com.amazonaws</groupId>
|
||||
<artifactId>aws-java-sdk-dynamodb</artifactId>
|
||||
<version>1.12.447</version>
|
||||
<version>1.12.454</version>
|
||||
</dependency>
|
||||
|
||||
</dependencies>
|
||||
|
||||
@@ -37,7 +37,7 @@
|
||||
|
||||
<dependency>
|
||||
<groupId>io.nosqlbench</groupId>
|
||||
<artifactId>engine-api</artifactId>
|
||||
<artifactId>adapters-api</artifactId>
|
||||
<version>${revision}</version>
|
||||
<scope>compile</scope>
|
||||
</dependency>
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright (c) 2022 nosqlbench
|
||||
* Copyright (c) 2022-2023 nosqlbench
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -17,10 +17,11 @@
|
||||
package io.nosqlbench.adapter.http.core;
|
||||
|
||||
import com.codahale.metrics.Histogram;
|
||||
import io.nosqlbench.api.config.NBNamedElement;
|
||||
import io.nosqlbench.api.config.NBLabeledElement;
|
||||
import io.nosqlbench.api.config.NBLabels;
|
||||
import io.nosqlbench.api.engine.metrics.ActivityMetrics;
|
||||
|
||||
public class HttpMetrics implements NBNamedElement {
|
||||
public class HttpMetrics implements NBLabeledElement {
|
||||
private final HttpSpace space;
|
||||
final Histogram statusCodeHistogram;
|
||||
|
||||
@@ -29,8 +30,12 @@ public class HttpMetrics implements NBNamedElement {
|
||||
statusCodeHistogram = ActivityMetrics.histogram(this, "statuscode",space.getHdrDigits());
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getName() {
|
||||
return "http"+(space.getName().equals("default")?"":"-"+space.getName());
|
||||
return "http"+("default".equals(this.space.getSpaceName())?"": '-' + space.getSpaceName());
|
||||
}
|
||||
|
||||
@Override
|
||||
public NBLabels getLabels() {
|
||||
return space.getLabels();
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright (c) 2022 nosqlbench
|
||||
* Copyright (c) 2022-2023 nosqlbench
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -16,7 +16,8 @@
|
||||
|
||||
package io.nosqlbench.adapter.http.core;
|
||||
|
||||
import io.nosqlbench.api.config.NBNamedElement;
|
||||
import io.nosqlbench.api.config.NBLabeledElement;
|
||||
import io.nosqlbench.api.config.NBLabels;
|
||||
import io.nosqlbench.api.config.standard.ConfigModel;
|
||||
import io.nosqlbench.api.config.standard.NBConfigModel;
|
||||
import io.nosqlbench.api.config.standard.NBConfiguration;
|
||||
@@ -34,7 +35,7 @@ import java.util.Locale;
|
||||
* HTTP client implementation is meant to be immutable. If shared-state issues
|
||||
* occur, thread-local support will be re-added.
|
||||
*/
|
||||
public class HttpSpace implements NBNamedElement {
|
||||
public class HttpSpace implements NBLabeledElement {
|
||||
private final static Logger logger = LogManager.getLogger(HttpSpace.class);
|
||||
|
||||
private final String name;
|
||||
@@ -93,7 +94,11 @@ public class HttpSpace implements NBNamedElement {
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getName() {
|
||||
public NBLabels getLabels() {
|
||||
return NBLabels.forKV("space", getSpaceName());
|
||||
}
|
||||
|
||||
public String getSpaceName() {
|
||||
return name;
|
||||
}
|
||||
|
||||
|
||||
@@ -18,6 +18,7 @@ package io.nosqlbench.adapter.http;
|
||||
|
||||
import io.nosqlbench.adapter.http.core.HttpOpMapper;
|
||||
import io.nosqlbench.adapter.http.core.HttpSpace;
|
||||
import io.nosqlbench.api.config.NBLabeledElement;
|
||||
import io.nosqlbench.api.config.standard.NBConfiguration;
|
||||
import io.nosqlbench.engine.api.activityconfig.OpsLoader;
|
||||
import io.nosqlbench.engine.api.activityconfig.yaml.OpTemplate;
|
||||
@@ -37,30 +38,30 @@ import static org.assertj.core.api.Assertions.assertThat;
|
||||
|
||||
public class HttpOpMapperTest {
|
||||
|
||||
private final static Logger logger = LogManager.getLogger(HttpOpMapperTest.class);
|
||||
private static final Logger logger = LogManager.getLogger(HttpOpMapperTest.class);
|
||||
static NBConfiguration cfg;
|
||||
static HttpDriverAdapter adapter;
|
||||
static HttpOpMapper mapper;
|
||||
|
||||
@BeforeAll
|
||||
public static void initializeTestMapper() {
|
||||
cfg = HttpSpace.getConfigModel().apply(Map.of());
|
||||
adapter = new HttpDriverAdapter();
|
||||
adapter.applyConfig(cfg);
|
||||
DriverSpaceCache<? extends HttpSpace> cache = adapter.getSpaceCache();
|
||||
mapper = new HttpOpMapper(adapter,cfg, cache);
|
||||
HttpOpMapperTest.cfg = HttpSpace.getConfigModel().apply(Map.of());
|
||||
HttpOpMapperTest.adapter = new HttpDriverAdapter();
|
||||
HttpOpMapperTest.adapter.applyConfig(HttpOpMapperTest.cfg);
|
||||
final DriverSpaceCache<? extends HttpSpace> cache = HttpOpMapperTest.adapter.getSpaceCache();
|
||||
HttpOpMapperTest.mapper = new HttpOpMapper(HttpOpMapperTest.adapter, HttpOpMapperTest.cfg, cache);
|
||||
}
|
||||
|
||||
private static ParsedOp parsedOpFor(String yaml) {
|
||||
OpsDocList docs = OpsLoader.loadString(yaml, OpTemplateFormat.yaml, Map.of(), null);
|
||||
OpTemplate opTemplate = docs.getOps().get(0);
|
||||
ParsedOp parsedOp = new ParsedOp(opTemplate, cfg, List.of(adapter.getPreprocessor()));
|
||||
private static ParsedOp parsedOpFor(final String yaml) {
|
||||
final OpsDocList docs = OpsLoader.loadString(yaml, OpTemplateFormat.yaml, Map.of(), null);
|
||||
final OpTemplate opTemplate = docs.getOps().get(0);
|
||||
final ParsedOp parsedOp = new ParsedOp(opTemplate, HttpOpMapperTest.cfg, List.of(HttpOpMapperTest.adapter.getPreprocessor()), NBLabeledElement.forMap(Map.of()));
|
||||
return parsedOp;
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testOnelineSpec() {
|
||||
ParsedOp pop = parsedOpFor("""
|
||||
final ParsedOp pop = HttpOpMapperTest.parsedOpFor("""
|
||||
ops:
|
||||
- s1: method=get uri=http://localhost/
|
||||
""");
|
||||
@@ -70,7 +71,7 @@ public class HttpOpMapperTest {
|
||||
|
||||
@Test
|
||||
public void testRFCFormMinimal() {
|
||||
ParsedOp pop = parsedOpFor("""
|
||||
final ParsedOp pop = HttpOpMapperTest.parsedOpFor("""
|
||||
ops:
|
||||
- s1: get http://localhost/
|
||||
""");
|
||||
@@ -81,7 +82,7 @@ public class HttpOpMapperTest {
|
||||
|
||||
@Test
|
||||
public void testRFCFormVersioned() {
|
||||
ParsedOp pop = parsedOpFor("""
|
||||
final ParsedOp pop = HttpOpMapperTest.parsedOpFor("""
|
||||
ops:
|
||||
- s1: get http://localhost/ HTTP/1.1
|
||||
""");
|
||||
@@ -90,7 +91,7 @@ public class HttpOpMapperTest {
|
||||
|
||||
@Test
|
||||
public void testRFCFormHeaders() {
|
||||
ParsedOp pop = parsedOpFor("""
|
||||
final ParsedOp pop = HttpOpMapperTest.parsedOpFor("""
|
||||
ops:
|
||||
- s1: |
|
||||
get http://localhost/
|
||||
@@ -101,7 +102,7 @@ public class HttpOpMapperTest {
|
||||
|
||||
@Test
|
||||
public void testRFCFormBody() {
|
||||
ParsedOp pop = parsedOpFor("""
|
||||
final ParsedOp pop = HttpOpMapperTest.parsedOpFor("""
|
||||
ops:
|
||||
- s1: |
|
||||
get http://localhost/
|
||||
@@ -117,7 +118,7 @@ public class HttpOpMapperTest {
|
||||
|
||||
// This can not be fully resolved in the unit testing context, but it could be
|
||||
// in the integrated testing context. It is sufficient to verify parsing here.
|
||||
ParsedOp pop = parsedOpFor("""
|
||||
final ParsedOp pop = HttpOpMapperTest.parsedOpFor("""
|
||||
ops:
|
||||
- s1: |
|
||||
{method} {scheme}://{host}/{path}?{query} {version}
|
||||
@@ -136,7 +137,7 @@ public class HttpOpMapperTest {
|
||||
body: StaticStringMapper('test')
|
||||
""");
|
||||
|
||||
logger.debug(pop);
|
||||
HttpOpMapperTest.logger.debug(pop);
|
||||
assertThat(pop.getDefinedNames()).containsAll(List.of(
|
||||
"method","uri","version","Header1","body"
|
||||
));
|
||||
|
||||
@@ -1,7 +1,5 @@
|
||||
package io.nosqlbench.adapter.kafka.dispensers;
|
||||
|
||||
/*
|
||||
* Copyright (c) 2022 nosqlbench
|
||||
* Copyright (c) 2023 nosqlbench
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -16,13 +14,14 @@ package io.nosqlbench.adapter.kafka.dispensers;
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package io.nosqlbench.adapter.kafka.dispensers;
|
||||
|
||||
import io.nosqlbench.adapter.kafka.KafkaSpace;
|
||||
import io.nosqlbench.adapter.kafka.exception.KafkaAdapterInvalidParamException;
|
||||
import io.nosqlbench.adapter.kafka.ops.KafkaOp;
|
||||
import io.nosqlbench.adapter.kafka.util.KafkaAdapterMetrics;
|
||||
import io.nosqlbench.adapter.kafka.util.KafkaAdapterUtil;
|
||||
import io.nosqlbench.api.config.NBNamedElement;
|
||||
import io.nosqlbench.adapter.kafka.util.KafkaAdapterUtil.DOC_LEVEL_PARAMS;
|
||||
import io.nosqlbench.engine.api.activityimpl.BaseOpDispenser;
|
||||
import io.nosqlbench.engine.api.activityimpl.uniform.DriverAdapter;
|
||||
import io.nosqlbench.engine.api.templating.ParsedOp;
|
||||
@@ -31,13 +30,14 @@ import org.apache.commons.lang3.math.NumberUtils;
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
|
||||
import java.util.*;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
import java.util.function.LongFunction;
|
||||
import java.util.function.Predicate;
|
||||
|
||||
public abstract class KafkaBaseOpDispenser extends BaseOpDispenser<KafkaOp, KafkaSpace> implements NBNamedElement {
|
||||
public abstract class KafkaBaseOpDispenser extends BaseOpDispenser<KafkaOp, KafkaSpace> {
|
||||
|
||||
private final static Logger logger = LogManager.getLogger("KafkaBaseOpDispenser");
|
||||
private static final Logger logger = LogManager.getLogger("KafkaBaseOpDispenser");
|
||||
|
||||
protected final ParsedOp parsedOp;
|
||||
protected final KafkaAdapterMetrics kafkaAdapterMetrics;
|
||||
@@ -58,83 +58,80 @@ public abstract class KafkaBaseOpDispenser extends BaseOpDispenser<KafkaOp, Kaf
|
||||
protected final int totalThreadNum;
|
||||
protected final long totalCycleNum;
|
||||
|
||||
public KafkaBaseOpDispenser(DriverAdapter adapter,
|
||||
ParsedOp op,
|
||||
LongFunction<String> topicNameStrFunc,
|
||||
KafkaSpace kafkaSpace) {
|
||||
protected KafkaBaseOpDispenser(final DriverAdapter adapter,
|
||||
final ParsedOp op,
|
||||
final LongFunction<String> topicNameStrFunc,
|
||||
final KafkaSpace kafkaSpace) {
|
||||
|
||||
super(adapter, op);
|
||||
|
||||
this.parsedOp = op;
|
||||
parsedOp = op;
|
||||
this.kafkaSpace = kafkaSpace;
|
||||
|
||||
String defaultMetricsPrefix = getDefaultMetricsPrefix(this.parsedOp);
|
||||
this.kafkaAdapterMetrics = new KafkaAdapterMetrics(this, defaultMetricsPrefix);
|
||||
kafkaAdapterMetrics.initS4JAdapterInstrumentation();
|
||||
kafkaAdapterMetrics = new KafkaAdapterMetrics(this, this);
|
||||
this.kafkaAdapterMetrics.initS4JAdapterInstrumentation();
|
||||
|
||||
this.asyncAPI =
|
||||
parsedOp.getStaticConfigOr(KafkaAdapterUtil.DOC_LEVEL_PARAMS.ASYNC_API.label, Boolean.TRUE);
|
||||
asyncAPI =
|
||||
this.parsedOp.getStaticConfigOr(DOC_LEVEL_PARAMS.ASYNC_API.label, Boolean.TRUE);
|
||||
|
||||
this.topicNameStrFunc = topicNameStrFunc;
|
||||
this.topicConfMap.putAll(kafkaSpace.getKafkaClientConf().getTopicConfMap());
|
||||
topicConfMap.putAll(kafkaSpace.getKafkaClientConf().getTopicConfMap());
|
||||
|
||||
this.totalCycleNum = NumberUtils.toLong(parsedOp.getStaticConfig("cycles", String.class));
|
||||
kafkaSpace.setTotalCycleNum(totalCycleNum);
|
||||
totalCycleNum = NumberUtils.toLong(this.parsedOp.getStaticConfig("cycles", String.class));
|
||||
kafkaSpace.setTotalCycleNum(this.totalCycleNum);
|
||||
|
||||
this.kafkaClntCnt = kafkaSpace.getKafkaClntNum();
|
||||
this.consumerGrpCnt = kafkaSpace.getConsumerGrpNum();
|
||||
this.totalThreadNum = NumberUtils.toInt(parsedOp.getStaticConfig("threads", String.class));
|
||||
kafkaClntCnt = kafkaSpace.getKafkaClntNum();
|
||||
consumerGrpCnt = kafkaSpace.getConsumerGrpNum();
|
||||
totalThreadNum = NumberUtils.toInt(this.parsedOp.getStaticConfig("threads", String.class));
|
||||
|
||||
assert (kafkaClntCnt > 0);
|
||||
assert (consumerGrpCnt > 0);
|
||||
assert 0 < kafkaClntCnt;
|
||||
assert 0 < consumerGrpCnt;
|
||||
|
||||
boolean validThreadNum =
|
||||
( ((this instanceof MessageProducerOpDispenser) && (totalThreadNum == kafkaClntCnt)) ||
|
||||
((this instanceof MessageConsumerOpDispenser) && (totalThreadNum == kafkaClntCnt*consumerGrpCnt)) );
|
||||
if (!validThreadNum) {
|
||||
throw new KafkaAdapterInvalidParamException(
|
||||
"Incorrect settings of 'threads', 'num_clnt', or 'num_cons_grp' -- " +
|
||||
totalThreadNum + ", " + kafkaClntCnt + ", " + consumerGrpCnt);
|
||||
}
|
||||
final boolean validThreadNum =
|
||||
this instanceof MessageProducerOpDispenser && this.totalThreadNum == this.kafkaClntCnt ||
|
||||
this instanceof MessageConsumerOpDispenser && this.totalThreadNum == this.kafkaClntCnt * this.consumerGrpCnt;
|
||||
if (!validThreadNum) throw new KafkaAdapterInvalidParamException(
|
||||
"Incorrect settings of 'threads', 'num_clnt', or 'num_cons_grp' -- " +
|
||||
this.totalThreadNum + ", " + this.kafkaClntCnt + ", " + this.consumerGrpCnt);
|
||||
}
|
||||
|
||||
public KafkaSpace getKafkaSpace() { return kafkaSpace; }
|
||||
public KafkaAdapterMetrics getKafkaAdapterMetrics() { return kafkaAdapterMetrics; }
|
||||
public KafkaSpace getKafkaSpace() { return this.kafkaSpace; }
|
||||
public KafkaAdapterMetrics getKafkaAdapterMetrics() { return this.kafkaAdapterMetrics; }
|
||||
|
||||
protected LongFunction<Boolean> lookupStaticBoolConfigValueFunc(String paramName, boolean defaultValue) {
|
||||
LongFunction<Boolean> booleanLongFunction;
|
||||
booleanLongFunction = (l) -> parsedOp.getOptionalStaticConfig(paramName, String.class)
|
||||
protected LongFunction<Boolean> lookupStaticBoolConfigValueFunc(final String paramName, final boolean defaultValue) {
|
||||
final LongFunction<Boolean> booleanLongFunction;
|
||||
booleanLongFunction = l -> this.parsedOp.getOptionalStaticConfig(paramName, String.class)
|
||||
.filter(Predicate.not(String::isEmpty))
|
||||
.map(value -> BooleanUtils.toBoolean(value))
|
||||
.orElse(defaultValue);
|
||||
logger.info("{}: {}", paramName, booleanLongFunction.apply(0));
|
||||
KafkaBaseOpDispenser.logger.info("{}: {}", paramName, booleanLongFunction.apply(0));
|
||||
return booleanLongFunction;
|
||||
}
|
||||
|
||||
// If the corresponding Op parameter is not provided, use the specified default value
|
||||
protected LongFunction<String> lookupOptionalStrOpValueFunc(String paramName, String defaultValue) {
|
||||
LongFunction<String> stringLongFunction;
|
||||
stringLongFunction = parsedOp.getAsOptionalFunction(paramName, String.class)
|
||||
.orElse((l) -> defaultValue);
|
||||
logger.info("{}: {}", paramName, stringLongFunction.apply(0));
|
||||
protected LongFunction<String> lookupOptionalStrOpValueFunc(final String paramName, final String defaultValue) {
|
||||
final LongFunction<String> stringLongFunction;
|
||||
stringLongFunction = this.parsedOp.getAsOptionalFunction(paramName, String.class)
|
||||
.orElse(l -> defaultValue);
|
||||
KafkaBaseOpDispenser.logger.info("{}: {}", paramName, stringLongFunction.apply(0));
|
||||
|
||||
return stringLongFunction;
|
||||
}
|
||||
protected LongFunction<String> lookupOptionalStrOpValueFunc(String paramName) {
|
||||
return lookupOptionalStrOpValueFunc(paramName, "");
|
||||
protected LongFunction<String> lookupOptionalStrOpValueFunc(final String paramName) {
|
||||
return this.lookupOptionalStrOpValueFunc(paramName, "");
|
||||
}
|
||||
|
||||
// Mandatory Op parameter. Throw an error if not specified or having empty value
|
||||
protected LongFunction<String> lookupMandtoryStrOpValueFunc(String paramName) {
|
||||
LongFunction<String> stringLongFunction;
|
||||
stringLongFunction = parsedOp.getAsRequiredFunction(paramName, String.class);
|
||||
logger.info("{}: {}", paramName, stringLongFunction.apply(0));
|
||||
protected LongFunction<String> lookupMandtoryStrOpValueFunc(final String paramName) {
|
||||
final LongFunction<String> stringLongFunction;
|
||||
stringLongFunction = this.parsedOp.getAsRequiredFunction(paramName, String.class);
|
||||
KafkaBaseOpDispenser.logger.info("{}: {}", paramName, stringLongFunction.apply(0));
|
||||
|
||||
return stringLongFunction;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getName() {
|
||||
return "KafkaBaseOpDispenser";
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright (c) 2022 nosqlbench
|
||||
* Copyright (c) 2022-2023 nosqlbench
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -23,6 +23,7 @@ import io.nosqlbench.adapter.kafka.ops.OpTimeTrackKafkaClient;
|
||||
import io.nosqlbench.adapter.kafka.ops.OpTimeTrackKafkaConsumer;
|
||||
import io.nosqlbench.adapter.kafka.util.EndToEndStartingTimeSource;
|
||||
import io.nosqlbench.adapter.kafka.util.KafkaAdapterUtil;
|
||||
import io.nosqlbench.adapter.kafka.util.KafkaAdapterUtil.DOC_LEVEL_PARAMS;
|
||||
import io.nosqlbench.engine.api.metrics.ReceivedMessageSequenceTracker;
|
||||
import io.nosqlbench.engine.api.activityimpl.uniform.DriverAdapter;
|
||||
import io.nosqlbench.engine.api.templating.ParsedOp;
|
||||
@@ -39,7 +40,7 @@ import java.util.stream.Collectors;
|
||||
|
||||
public class MessageConsumerOpDispenser extends KafkaBaseOpDispenser {
|
||||
|
||||
private final static Logger logger = LogManager.getLogger("MessageConsumerOpDispenser");
|
||||
private static final Logger logger = LogManager.getLogger("MessageConsumerOpDispenser");
|
||||
|
||||
private final Map<String, String> consumerClientConfMap = new HashMap<>();
|
||||
|
||||
@@ -60,108 +61,102 @@ public class MessageConsumerOpDispenser extends KafkaBaseOpDispenser {
|
||||
receivedMessageSequenceTrackersForTopicThreadLocal = ThreadLocal.withInitial(HashMap::new);
|
||||
protected final LongFunction<Boolean> seqTrackingFunc;
|
||||
|
||||
public MessageConsumerOpDispenser(DriverAdapter adapter,
|
||||
ParsedOp op,
|
||||
LongFunction<String> tgtNameFunc,
|
||||
KafkaSpace kafkaSpace) {
|
||||
public MessageConsumerOpDispenser(final DriverAdapter adapter,
|
||||
final ParsedOp op,
|
||||
final LongFunction<String> tgtNameFunc,
|
||||
final KafkaSpace kafkaSpace) {
|
||||
super(adapter, op, tgtNameFunc, kafkaSpace);
|
||||
|
||||
this.consumerClientConfMap.putAll(kafkaSpace.getKafkaClientConf().getConsumerConfMap());
|
||||
consumerClientConfMap.put("bootstrap.servers", kafkaSpace.getBootstrapSvr());
|
||||
consumerClientConfMap.putAll(kafkaSpace.getKafkaClientConf().getConsumerConfMap());
|
||||
this.consumerClientConfMap.put("bootstrap.servers", kafkaSpace.getBootstrapSvr());
|
||||
|
||||
this.msgPollIntervalInSec =
|
||||
NumberUtils.toInt(parsedOp.getStaticConfigOr("msg_poll_interval", "0"));
|
||||
msgPollIntervalInSec =
|
||||
NumberUtils.toInt(this.parsedOp.getStaticConfigOr("msg_poll_interval", "0"));
|
||||
|
||||
this.maxMsgCntPerCommit =
|
||||
NumberUtils.toInt(parsedOp.getStaticConfig("manual_commit_batch_num", String.class));
|
||||
maxMsgCntPerCommit =
|
||||
NumberUtils.toInt(this.parsedOp.getStaticConfig("manual_commit_batch_num", String.class));
|
||||
|
||||
this.autoCommitEnabled = true;
|
||||
if (maxMsgCntPerCommit > 0) {
|
||||
this.autoCommitEnabled = false;
|
||||
consumerClientConfMap.put("enable.auto.commit", "false");
|
||||
} else {
|
||||
if (consumerClientConfMap.containsKey("enable.auto.commit")) {
|
||||
this.autoCommitEnabled = BooleanUtils.toBoolean(consumerClientConfMap.get("enable.auto.commit"));
|
||||
}
|
||||
}
|
||||
this.e2eStartTimeSrcParamStrFunc = lookupOptionalStrOpValueFunc(
|
||||
KafkaAdapterUtil.DOC_LEVEL_PARAMS.E2E_STARTING_TIME_SOURCE.label, "none");
|
||||
this.seqTrackingFunc = lookupStaticBoolConfigValueFunc(
|
||||
KafkaAdapterUtil.DOC_LEVEL_PARAMS.SEQ_TRACKING.label, false);
|
||||
;
|
||||
autoCommitEnabled = true;
|
||||
if (0 < maxMsgCntPerCommit) {
|
||||
autoCommitEnabled = false;
|
||||
this.consumerClientConfMap.put("enable.auto.commit", "false");
|
||||
} else if (this.consumerClientConfMap.containsKey("enable.auto.commit"))
|
||||
autoCommitEnabled = BooleanUtils.toBoolean(this.consumerClientConfMap.get("enable.auto.commit"));
|
||||
e2eStartTimeSrcParamStrFunc = this.lookupOptionalStrOpValueFunc(
|
||||
DOC_LEVEL_PARAMS.E2E_STARTING_TIME_SOURCE.label, "none");
|
||||
seqTrackingFunc = this.lookupStaticBoolConfigValueFunc(
|
||||
DOC_LEVEL_PARAMS.SEQ_TRACKING.label, false);
|
||||
}
|
||||
|
||||
private String getEffectiveGroupId(long cycle) {
|
||||
int grpIdx = (int) (cycle % consumerGrpCnt);
|
||||
private String getEffectiveGroupId(final long cycle) {
|
||||
final int grpIdx = (int) (cycle % this.consumerGrpCnt);
|
||||
String defaultGrpNamePrefix = KafkaAdapterUtil.DFT_CONSUMER_GROUP_NAME_PREFIX;
|
||||
if (consumerClientConfMap.containsKey("group.id")) {
|
||||
defaultGrpNamePrefix = consumerClientConfMap.get("group.id");
|
||||
}
|
||||
if (this.consumerClientConfMap.containsKey("group.id"))
|
||||
defaultGrpNamePrefix = this.consumerClientConfMap.get("group.id");
|
||||
|
||||
return defaultGrpNamePrefix + "-" + grpIdx;
|
||||
return defaultGrpNamePrefix + '-' + grpIdx;
|
||||
}
|
||||
|
||||
private OpTimeTrackKafkaClient getOrCreateOpTimeTrackKafkaConsumer(
|
||||
long cycle,
|
||||
List<String> topicNameList,
|
||||
String groupId)
|
||||
final long cycle,
|
||||
final List<String> topicNameList,
|
||||
final String groupId)
|
||||
{
|
||||
String topicNameListStr = topicNameList.stream()
|
||||
final String topicNameListStr = topicNameList.stream()
|
||||
.collect(Collectors.joining("::"));
|
||||
|
||||
String cacheKey = KafkaAdapterUtil.buildCacheKey(
|
||||
"consumer-" + String.valueOf(cycle % kafkaClntCnt), topicNameListStr, groupId );
|
||||
final String cacheKey = KafkaAdapterUtil.buildCacheKey(
|
||||
"consumer-" + cycle % this.kafkaClntCnt, topicNameListStr, groupId );
|
||||
|
||||
OpTimeTrackKafkaClient opTimeTrackKafkaClient = kafkaSpace.getOpTimeTrackKafkaClient(cacheKey);
|
||||
if (opTimeTrackKafkaClient == null) {
|
||||
Properties consumerConfProps = new Properties();
|
||||
consumerConfProps.putAll(consumerClientConfMap);
|
||||
OpTimeTrackKafkaClient opTimeTrackKafkaClient = this.kafkaSpace.getOpTimeTrackKafkaClient(cacheKey);
|
||||
if (null == opTimeTrackKafkaClient) {
|
||||
final Properties consumerConfProps = new Properties();
|
||||
consumerConfProps.putAll(this.consumerClientConfMap);
|
||||
consumerConfProps.put("group.id", groupId);
|
||||
|
||||
KafkaConsumer<String, String> consumer = new KafkaConsumer<>(consumerConfProps);
|
||||
final KafkaConsumer<String, String> consumer = new KafkaConsumer<>(consumerConfProps);
|
||||
synchronized (this) {
|
||||
consumer.subscribe(topicNameList);
|
||||
}
|
||||
if (logger.isDebugEnabled()) {
|
||||
logger.debug("Kafka consumer created: {}/{} -- {}, {}, {}",
|
||||
if (MessageConsumerOpDispenser.logger.isDebugEnabled())
|
||||
MessageConsumerOpDispenser.logger.debug("Kafka consumer created: {}/{} -- {}, {}, {}",
|
||||
cacheKey,
|
||||
consumer,
|
||||
topicNameList,
|
||||
autoCommitEnabled,
|
||||
maxMsgCntPerCommit);
|
||||
}
|
||||
this.autoCommitEnabled,
|
||||
this.maxMsgCntPerCommit);
|
||||
|
||||
opTimeTrackKafkaClient = new OpTimeTrackKafkaConsumer(
|
||||
kafkaSpace,
|
||||
asyncAPI,
|
||||
msgPollIntervalInSec,
|
||||
autoCommitEnabled,
|
||||
maxMsgCntPerCommit,
|
||||
this.kafkaSpace,
|
||||
this.asyncAPI,
|
||||
this.msgPollIntervalInSec,
|
||||
this.autoCommitEnabled,
|
||||
this.maxMsgCntPerCommit,
|
||||
consumer,
|
||||
kafkaAdapterMetrics,
|
||||
EndToEndStartingTimeSource.valueOf(e2eStartTimeSrcParamStrFunc.apply(cycle).toUpperCase()),
|
||||
this.kafkaAdapterMetrics,
|
||||
EndToEndStartingTimeSource.valueOf(this.e2eStartTimeSrcParamStrFunc.apply(cycle).toUpperCase()),
|
||||
this::getReceivedMessageSequenceTracker,
|
||||
seqTrackingFunc.apply(cycle));
|
||||
kafkaSpace.addOpTimeTrackKafkaClient(cacheKey, opTimeTrackKafkaClient);
|
||||
this.seqTrackingFunc.apply(cycle));
|
||||
this.kafkaSpace.addOpTimeTrackKafkaClient(cacheKey, opTimeTrackKafkaClient);
|
||||
}
|
||||
|
||||
return opTimeTrackKafkaClient;
|
||||
}
|
||||
|
||||
private ReceivedMessageSequenceTracker getReceivedMessageSequenceTracker(String topicName) {
|
||||
return receivedMessageSequenceTrackersForTopicThreadLocal.get()
|
||||
.computeIfAbsent(topicName, k -> createReceivedMessageSequenceTracker());
|
||||
private ReceivedMessageSequenceTracker getReceivedMessageSequenceTracker(final String topicName) {
|
||||
return this.receivedMessageSequenceTrackersForTopicThreadLocal.get()
|
||||
.computeIfAbsent(topicName, k -> this.createReceivedMessageSequenceTracker());
|
||||
}
|
||||
|
||||
private ReceivedMessageSequenceTracker createReceivedMessageSequenceTracker() {
|
||||
return new ReceivedMessageSequenceTracker(kafkaAdapterMetrics.getMsgErrOutOfSeqCounter(),
|
||||
kafkaAdapterMetrics.getMsgErrDuplicateCounter(),
|
||||
kafkaAdapterMetrics.getMsgErrLossCounter());
|
||||
return new ReceivedMessageSequenceTracker(this.kafkaAdapterMetrics.getMsgErrOutOfSeqCounter(),
|
||||
this.kafkaAdapterMetrics.getMsgErrDuplicateCounter(),
|
||||
this.kafkaAdapterMetrics.getMsgErrLossCounter());
|
||||
}
|
||||
|
||||
protected List<String> getEffectiveTopicNameList(long cycle) {
|
||||
String explicitTopicListStr = topicNameStrFunc.apply(cycle);
|
||||
assert (StringUtils.isNotBlank(explicitTopicListStr));
|
||||
protected List<String> getEffectiveTopicNameList(final long cycle) {
|
||||
final String explicitTopicListStr = this.topicNameStrFunc.apply(cycle);
|
||||
assert StringUtils.isNotBlank(explicitTopicListStr);
|
||||
|
||||
return Arrays.stream(StringUtils.split(explicitTopicListStr, ','))
|
||||
.filter(s -> StringUtils.isNotBlank(s))
|
||||
@@ -169,20 +164,18 @@ public class MessageConsumerOpDispenser extends KafkaBaseOpDispenser {
|
||||
}
|
||||
|
||||
@Override
|
||||
public KafkaOp apply(long cycle) {
|
||||
List<String> topicNameList = getEffectiveTopicNameList(cycle);
|
||||
String groupId = getEffectiveGroupId(cycle);
|
||||
if (topicNameList.size() ==0 || StringUtils.isBlank(groupId)) {
|
||||
throw new KafkaAdapterInvalidParamException(
|
||||
"Effective consumer group name and/or topic names are needed for creating a consumer!");
|
||||
}
|
||||
public KafkaOp apply(final long cycle) {
|
||||
final List<String> topicNameList = this.getEffectiveTopicNameList(cycle);
|
||||
final String groupId = this.getEffectiveGroupId(cycle);
|
||||
if ((0 == topicNameList.size()) || StringUtils.isBlank(groupId)) throw new KafkaAdapterInvalidParamException(
|
||||
"Effective consumer group name and/or topic names are needed for creating a consumer!");
|
||||
|
||||
OpTimeTrackKafkaClient opTimeTrackKafkaConsumer =
|
||||
getOrCreateOpTimeTrackKafkaConsumer(cycle, topicNameList, groupId);
|
||||
final OpTimeTrackKafkaClient opTimeTrackKafkaConsumer =
|
||||
this.getOrCreateOpTimeTrackKafkaConsumer(cycle, topicNameList, groupId);
|
||||
|
||||
return new KafkaOp(
|
||||
kafkaAdapterMetrics,
|
||||
kafkaSpace,
|
||||
this.kafkaAdapterMetrics,
|
||||
this.kafkaSpace,
|
||||
opTimeTrackKafkaConsumer,
|
||||
null);
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright (c) 2022 nosqlbench
|
||||
* Copyright (c) 2022-2023 nosqlbench
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -22,6 +22,7 @@ import io.nosqlbench.adapter.kafka.ops.KafkaOp;
|
||||
import io.nosqlbench.adapter.kafka.ops.OpTimeTrackKafkaClient;
|
||||
import io.nosqlbench.adapter.kafka.ops.OpTimeTrackKafkaProducer;
|
||||
import io.nosqlbench.adapter.kafka.util.KafkaAdapterUtil;
|
||||
import io.nosqlbench.adapter.kafka.util.KafkaAdapterUtil.DOC_LEVEL_PARAMS;
|
||||
import io.nosqlbench.engine.api.activityimpl.uniform.DriverAdapter;
|
||||
import io.nosqlbench.engine.api.metrics.EndToEndMetricsAdapterUtil;
|
||||
import io.nosqlbench.engine.api.templating.ParsedOp;
|
||||
@@ -31,8 +32,10 @@ import org.apache.kafka.clients.producer.ProducerRecord;
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
import java.util.Map.Entry;
|
||||
import java.util.Properties;
|
||||
import java.util.function.LongFunction;
|
||||
import java.util.function.Predicate;
|
||||
@@ -46,7 +49,7 @@ import java.util.LinkedHashSet;
|
||||
|
||||
public class MessageProducerOpDispenser extends KafkaBaseOpDispenser {
|
||||
|
||||
private final static Logger logger = LogManager.getLogger("MessageProducerOpDispenser");
|
||||
private static final Logger logger = LogManager.getLogger("MessageProducerOpDispenser");
|
||||
|
||||
public static final String MSG_HEADER_OP_PARAM = "msg_header";
|
||||
public static final String MSG_KEY_OP_PARAM = "msg_key";
|
||||
@@ -61,133 +64,126 @@ public class MessageProducerOpDispenser extends KafkaBaseOpDispenser {
|
||||
protected final LongFunction<Boolean> seqTrackingFunc;
|
||||
protected final LongFunction<Set<EndToEndMetricsAdapterUtil.MSG_SEQ_ERROR_SIMU_TYPE>> msgSeqErrSimuTypeSetFunc;
|
||||
|
||||
public MessageProducerOpDispenser(DriverAdapter adapter,
|
||||
ParsedOp op,
|
||||
LongFunction<String> tgtNameFunc,
|
||||
KafkaSpace kafkaSpace) {
|
||||
public MessageProducerOpDispenser(final DriverAdapter adapter,
|
||||
final ParsedOp op,
|
||||
final LongFunction<String> tgtNameFunc,
|
||||
final KafkaSpace kafkaSpace) {
|
||||
super(adapter, op, tgtNameFunc, kafkaSpace);
|
||||
this.producerClientConfMap.putAll(kafkaSpace.getKafkaClientConf().getProducerConfMap());
|
||||
producerClientConfMap.put("bootstrap.servers", kafkaSpace.getBootstrapSvr());
|
||||
producerClientConfMap.putAll(kafkaSpace.getKafkaClientConf().getProducerConfMap());
|
||||
this.producerClientConfMap.put("bootstrap.servers", kafkaSpace.getBootstrapSvr());
|
||||
|
||||
this.txnBatchNum = parsedOp.getStaticConfigOr("txn_batch_num", Integer.valueOf(0));
|
||||
txnBatchNum = this.parsedOp.getStaticConfigOr("txn_batch_num", Integer.valueOf(0));
|
||||
|
||||
this.msgHeaderJsonStrFunc = lookupOptionalStrOpValueFunc(MSG_HEADER_OP_PARAM);
|
||||
this.msgKeyStrFunc = lookupOptionalStrOpValueFunc(MSG_KEY_OP_PARAM);
|
||||
this.msgValueStrFunc = lookupMandtoryStrOpValueFunc(MSG_BODY_OP_PARAM);
|
||||
msgHeaderJsonStrFunc = this.lookupOptionalStrOpValueFunc(MessageProducerOpDispenser.MSG_HEADER_OP_PARAM);
|
||||
msgKeyStrFunc = this.lookupOptionalStrOpValueFunc(MessageProducerOpDispenser.MSG_KEY_OP_PARAM);
|
||||
msgValueStrFunc = this.lookupMandtoryStrOpValueFunc(MessageProducerOpDispenser.MSG_BODY_OP_PARAM);
|
||||
|
||||
this.msgSeqErrSimuTypeSetFunc = getStaticErrSimuTypeSetOpValueFunc();
|
||||
msgSeqErrSimuTypeSetFunc = this.getStaticErrSimuTypeSetOpValueFunc();
|
||||
// Doc-level parameter: seq_tracking
|
||||
this.seqTrackingFunc = lookupStaticBoolConfigValueFunc(
|
||||
KafkaAdapterUtil.DOC_LEVEL_PARAMS.SEQ_TRACKING.label, false);
|
||||
seqTrackingFunc = this.lookupStaticBoolConfigValueFunc(
|
||||
DOC_LEVEL_PARAMS.SEQ_TRACKING.label, false);
|
||||
}
|
||||
|
||||
private String getEffectiveClientId(long cycle) {
|
||||
if (producerClientConfMap.containsKey("client.id")) {
|
||||
String defaultClientIdPrefix = producerClientConfMap.get("client.id");
|
||||
int clntIdx = (int) (cycle % kafkaClntCnt);
|
||||
private String getEffectiveClientId(final long cycle) {
|
||||
if (this.producerClientConfMap.containsKey("client.id")) {
|
||||
final String defaultClientIdPrefix = this.producerClientConfMap.get("client.id");
|
||||
final int clntIdx = (int) (cycle % this.kafkaClntCnt);
|
||||
|
||||
return defaultClientIdPrefix + "-" + clntIdx;
|
||||
}
|
||||
else {
|
||||
return "";
|
||||
return defaultClientIdPrefix + '-' + clntIdx;
|
||||
}
|
||||
return "";
|
||||
}
|
||||
|
||||
private OpTimeTrackKafkaClient getOrCreateOpTimeTrackKafkaProducer(long cycle,
|
||||
String topicName,
|
||||
String clientId)
|
||||
private OpTimeTrackKafkaClient getOrCreateOpTimeTrackKafkaProducer(final long cycle,
|
||||
final String topicName,
|
||||
final String clientId)
|
||||
{
|
||||
String cacheKey = KafkaAdapterUtil.buildCacheKey(
|
||||
"producer-" + String.valueOf(cycle % kafkaClntCnt), topicName);
|
||||
final String cacheKey = KafkaAdapterUtil.buildCacheKey(
|
||||
"producer-" + cycle % this.kafkaClntCnt, topicName);
|
||||
|
||||
OpTimeTrackKafkaClient opTimeTrackKafkaClient = kafkaSpace.getOpTimeTrackKafkaClient(cacheKey);
|
||||
if (opTimeTrackKafkaClient == null) {
|
||||
Properties producerConfProps = new Properties();
|
||||
producerConfProps.putAll(producerClientConfMap);
|
||||
OpTimeTrackKafkaClient opTimeTrackKafkaClient = this.kafkaSpace.getOpTimeTrackKafkaClient(cacheKey);
|
||||
if (null == opTimeTrackKafkaClient) {
|
||||
final Properties producerConfProps = new Properties();
|
||||
producerConfProps.putAll(this.producerClientConfMap);
|
||||
|
||||
if (StringUtils.isNotBlank(clientId))
|
||||
if (StringUtils.isNotBlank(clientId)) {
|
||||
producerConfProps.put("client.id", clientId);
|
||||
else
|
||||
} else {
|
||||
producerConfProps.remove("client.id");
|
||||
}
|
||||
|
||||
// When transaction batch number is less than 2, it is treated effectively as no-transaction
|
||||
if (txnBatchNum < 2)
|
||||
if (2 > txnBatchNum) {
|
||||
producerConfProps.remove("transactional.id");
|
||||
}
|
||||
|
||||
String baseTransactId = "";
|
||||
boolean transactionEnabled = false;
|
||||
if (producerConfProps.containsKey("transactional.id")) {
|
||||
baseTransactId = producerConfProps.get("transactional.id").toString();
|
||||
producerConfProps.put("transactional.id", baseTransactId + "-" + cacheKey);
|
||||
transactionEnabled = StringUtils.isNotBlank(producerConfProps.get("transactional.id").toString());
|
||||
baseTransactId = producerConfProps.getProperty("transactional.id").toString();
|
||||
producerConfProps.put("transactional.id", baseTransactId + '-' + cacheKey);
|
||||
transactionEnabled = StringUtils.isNotBlank(producerConfProps.getProperty("transactional.id").toString());
|
||||
}
|
||||
|
||||
KafkaProducer<String, String> producer = new KafkaProducer<>(producerConfProps);
|
||||
if (transactionEnabled) {
|
||||
producer.initTransactions();
|
||||
}
|
||||
final KafkaProducer<String, String> producer = new KafkaProducer<>(producerConfProps);
|
||||
if (transactionEnabled) producer.initTransactions();
|
||||
|
||||
if (logger.isDebugEnabled()) {
|
||||
logger.debug("Producer created: {}/{} -- ({}, {}, {})",
|
||||
if (MessageProducerOpDispenser.logger.isDebugEnabled())
|
||||
MessageProducerOpDispenser.logger.debug("Producer created: {}/{} -- ({}, {}, {})",
|
||||
cacheKey,
|
||||
producer,
|
||||
topicName,
|
||||
transactionEnabled,
|
||||
clientId);
|
||||
}
|
||||
|
||||
opTimeTrackKafkaClient = new OpTimeTrackKafkaProducer(
|
||||
kafkaSpace,
|
||||
asyncAPI,
|
||||
this.kafkaSpace,
|
||||
this.asyncAPI,
|
||||
transactionEnabled,
|
||||
txnBatchNum,
|
||||
seqTrackingFunc.apply(cycle),
|
||||
msgSeqErrSimuTypeSetFunc.apply(cycle),
|
||||
this.txnBatchNum,
|
||||
this.seqTrackingFunc.apply(cycle),
|
||||
this.msgSeqErrSimuTypeSetFunc.apply(cycle),
|
||||
producer);
|
||||
kafkaSpace.addOpTimeTrackKafkaClient(cacheKey, opTimeTrackKafkaClient);
|
||||
this.kafkaSpace.addOpTimeTrackKafkaClient(cacheKey, opTimeTrackKafkaClient);
|
||||
}
|
||||
|
||||
return opTimeTrackKafkaClient;
|
||||
}
|
||||
|
||||
private ProducerRecord<String, String> createKafkaMessage(
|
||||
long curCycle,
|
||||
String topicName,
|
||||
String msgHeaderRawJsonStr,
|
||||
String msgKey,
|
||||
String msgValue
|
||||
final long curCycle,
|
||||
final String topicName,
|
||||
final String msgHeaderRawJsonStr,
|
||||
final String msgKey,
|
||||
final String msgValue
|
||||
) {
|
||||
if (StringUtils.isAllBlank(msgKey, msgValue)) {
|
||||
if (StringUtils.isAllBlank(msgKey, msgValue))
|
||||
throw new KafkaAdapterInvalidParamException("Message key and value can't both be empty!");
|
||||
}
|
||||
|
||||
int messageSize = KafkaAdapterUtil.getStrObjSize(msgKey) + KafkaAdapterUtil.getStrObjSize(msgValue);
|
||||
|
||||
ProducerRecord<String, String> record = new ProducerRecord<>(topicName, msgKey, msgValue);
|
||||
final ProducerRecord<String, String> record = new ProducerRecord<>(topicName, msgKey, msgValue);
|
||||
|
||||
// Check if msgHeaderRawJsonStr is a valid JSON string with a collection of key/value pairs
|
||||
// - if Yes, convert it to a map
|
||||
// - otherwise, log an error message and ignore message headers without throwing a runtime exception
|
||||
Map<String, String> msgHeaderProperties = new HashMap<>();
|
||||
if (!StringUtils.isBlank(msgHeaderRawJsonStr)) {
|
||||
try {
|
||||
msgHeaderProperties = KafkaAdapterUtil.convertJsonToMap(msgHeaderRawJsonStr);
|
||||
} catch (Exception e) {
|
||||
logger.warn(
|
||||
"Error parsing message property JSON string {}, ignore message properties!",
|
||||
msgHeaderRawJsonStr);
|
||||
}
|
||||
if (!StringUtils.isBlank(msgHeaderRawJsonStr)) try {
|
||||
msgHeaderProperties = KafkaAdapterUtil.convertJsonToMap(msgHeaderRawJsonStr);
|
||||
} catch (final Exception e) {
|
||||
MessageProducerOpDispenser.logger.warn(
|
||||
"Error parsing message property JSON string {}, ignore message properties!",
|
||||
msgHeaderRawJsonStr);
|
||||
}
|
||||
|
||||
for (Map.Entry<String, String> entry : msgHeaderProperties.entrySet()) {
|
||||
String headerKey = entry.getKey();
|
||||
String headerValue = entry.getValue();
|
||||
for (final Entry<String, String> entry : msgHeaderProperties.entrySet()) {
|
||||
final String headerKey = entry.getKey();
|
||||
final String headerValue = entry.getValue();
|
||||
|
||||
messageSize += KafkaAdapterUtil.getStrObjSize(headerKey) + KafkaAdapterUtil.getStrObjSize(headerValue);
|
||||
|
||||
if (! StringUtils.isAnyBlank(headerKey, headerValue)) {
|
||||
record.headers().add(headerKey, headerValue.getBytes());
|
||||
}
|
||||
if (! StringUtils.isAnyBlank(headerKey, headerValue))
|
||||
record.headers().add(headerKey, headerValue.getBytes(StandardCharsets.UTF_8));
|
||||
|
||||
}
|
||||
|
||||
@@ -197,56 +193,52 @@ public class MessageProducerOpDispenser extends KafkaBaseOpDispenser {
|
||||
messageSize += KafkaAdapterUtil.getStrObjSize(KafkaAdapterUtil.NB_MSG_SIZE_PROP);
|
||||
messageSize += 6;
|
||||
|
||||
record.headers().add(KafkaAdapterUtil.NB_MSG_SEQ_PROP, String.valueOf(curCycle).getBytes());
|
||||
record.headers().add(KafkaAdapterUtil.NB_MSG_SIZE_PROP, String.valueOf(messageSize).getBytes());
|
||||
record.headers().add(KafkaAdapterUtil.NB_MSG_SEQ_PROP, String.valueOf(curCycle).getBytes(StandardCharsets.UTF_8));
|
||||
record.headers().add(KafkaAdapterUtil.NB_MSG_SIZE_PROP, String.valueOf(messageSize).getBytes(StandardCharsets.UTF_8));
|
||||
|
||||
return record;
|
||||
}
|
||||
|
||||
@Override
|
||||
public KafkaOp apply(long cycle) {
|
||||
String topicName = topicNameStrFunc.apply(cycle);
|
||||
String clientId = getEffectiveClientId(cycle);
|
||||
public KafkaOp apply(final long cycle) {
|
||||
final String topicName = this.topicNameStrFunc.apply(cycle);
|
||||
final String clientId = this.getEffectiveClientId(cycle);
|
||||
|
||||
OpTimeTrackKafkaClient opTimeTrackKafkaProducer =
|
||||
getOrCreateOpTimeTrackKafkaProducer(cycle, topicName, clientId);
|
||||
final OpTimeTrackKafkaClient opTimeTrackKafkaProducer =
|
||||
this.getOrCreateOpTimeTrackKafkaProducer(cycle, topicName, clientId);
|
||||
|
||||
ProducerRecord<String, String> message = createKafkaMessage(
|
||||
final ProducerRecord<String, String> message = this.createKafkaMessage(
|
||||
cycle,
|
||||
topicName,
|
||||
msgHeaderJsonStrFunc.apply(cycle),
|
||||
msgKeyStrFunc.apply(cycle),
|
||||
msgValueStrFunc.apply(cycle)
|
||||
this.msgHeaderJsonStrFunc.apply(cycle),
|
||||
this.msgKeyStrFunc.apply(cycle),
|
||||
this.msgValueStrFunc.apply(cycle)
|
||||
);
|
||||
|
||||
return new KafkaOp(
|
||||
kafkaAdapterMetrics,
|
||||
kafkaSpace,
|
||||
this.kafkaAdapterMetrics,
|
||||
this.kafkaSpace,
|
||||
opTimeTrackKafkaProducer,
|
||||
message);
|
||||
}
|
||||
|
||||
protected LongFunction<Set<EndToEndMetricsAdapterUtil.MSG_SEQ_ERROR_SIMU_TYPE>> getStaticErrSimuTypeSetOpValueFunc() {
|
||||
LongFunction<Set<EndToEndMetricsAdapterUtil.MSG_SEQ_ERROR_SIMU_TYPE>> setStringLongFunction;
|
||||
setStringLongFunction = (l) ->
|
||||
parsedOp.getOptionalStaticValue(KafkaAdapterUtil.DOC_LEVEL_PARAMS.SEQERR_SIMU.label, String.class)
|
||||
final LongFunction<Set<EndToEndMetricsAdapterUtil.MSG_SEQ_ERROR_SIMU_TYPE>> setStringLongFunction;
|
||||
setStringLongFunction = l ->
|
||||
this.parsedOp.getOptionalStaticValue(DOC_LEVEL_PARAMS.SEQERR_SIMU.label, String.class)
|
||||
.filter(Predicate.not(String::isEmpty))
|
||||
.map(value -> {
|
||||
Set<EndToEndMetricsAdapterUtil.MSG_SEQ_ERROR_SIMU_TYPE> set = new HashSet<>();
|
||||
|
||||
if (StringUtils.contains(value,',')) {
|
||||
set = Arrays.stream(value.split(","))
|
||||
.map(EndToEndMetricsAdapterUtil.MSG_SEQ_ERROR_SIMU_TYPE::parseSimuType)
|
||||
.filter(Optional::isPresent)
|
||||
.map(Optional::get)
|
||||
.collect(Collectors.toCollection(LinkedHashSet::new));
|
||||
}
|
||||
if (StringUtils.contains(value,',')) set = Arrays.stream(value.split(","))
|
||||
.map(EndToEndMetricsAdapterUtil.MSG_SEQ_ERROR_SIMU_TYPE::parseSimuType)
|
||||
.filter(Optional::isPresent)
|
||||
.map(Optional::get)
|
||||
.collect(Collectors.toCollection(LinkedHashSet::new));
|
||||
|
||||
return set;
|
||||
}).orElse(Collections.emptySet());
|
||||
logger.info(
|
||||
KafkaAdapterUtil.DOC_LEVEL_PARAMS.SEQERR_SIMU.label + ": {}",
|
||||
setStringLongFunction.apply(0));
|
||||
MessageProducerOpDispenser.logger.info("{}: {}", DOC_LEVEL_PARAMS.SEQERR_SIMU.label, setStringLongFunction.apply(0));
|
||||
return setStringLongFunction;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,25 +1,24 @@
|
||||
/*
|
||||
* Copyright (c) 2022 nosqlbench
|
||||
* Copyright (c) 2022-2023 nosqlbench
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package io.nosqlbench.adapter.kafka.exception;
|
||||
|
||||
public class KafkaAdapterUnsupportedOpException extends RuntimeException {
|
||||
|
||||
public KafkaAdapterUnsupportedOpException(String kafkaOpType) {
|
||||
super("Unsupported Kafka adapter operation type: \"" + kafkaOpType + "\"");
|
||||
public KafkaAdapterUnsupportedOpException(final String kafkaOpType) {
|
||||
super("Unsupported Kafka adapter operation type: \"" + kafkaOpType + '"');
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,18 +1,17 @@
|
||||
/*
|
||||
* Copyright (c) 2022 nosqlbench
|
||||
* Copyright (c) 2022-2023 nosqlbench
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package io.nosqlbench.adapter.kafka.ops;
|
||||
@@ -31,11 +30,13 @@ import org.apache.kafka.common.header.Headers;
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.util.Map;
|
||||
import java.util.Objects;
|
||||
import java.util.function.Function;
|
||||
|
||||
public class OpTimeTrackKafkaConsumer extends OpTimeTrackKafkaClient {
|
||||
private final static Logger logger = LogManager.getLogger("OpTimeTrackKafkaConsumer");
|
||||
private static final Logger logger = LogManager.getLogger("OpTimeTrackKafkaConsumer");
|
||||
private final EndToEndStartingTimeSource e2eStartingTimeSrc;
|
||||
private final int msgPoolIntervalInMs;
|
||||
private final boolean asyncMsgCommit;
|
||||
@@ -46,20 +47,20 @@ public class OpTimeTrackKafkaConsumer extends OpTimeTrackKafkaClient {
|
||||
private final ThreadLocal<Integer> manualCommitTrackingCnt = ThreadLocal.withInitial(() -> 0);
|
||||
|
||||
private final KafkaConsumer<String, String> consumer;
|
||||
private Histogram e2eMsgProcLatencyHistogram;
|
||||
private final Histogram e2eMsgProcLatencyHistogram;
|
||||
private final Function<String, ReceivedMessageSequenceTracker> receivedMessageSequenceTrackerForTopic;
|
||||
private final boolean seqTracking;
|
||||
|
||||
public OpTimeTrackKafkaConsumer(KafkaSpace kafkaSpace,
|
||||
boolean asyncMsgCommit,
|
||||
int msgPoolIntervalInMs,
|
||||
boolean autoCommitEnabled,
|
||||
int maxMsgCntPerCommit,
|
||||
KafkaConsumer<String, String> consumer,
|
||||
KafkaAdapterMetrics kafkaAdapterMetrics,
|
||||
EndToEndStartingTimeSource e2eStartingTimeSrc,
|
||||
Function<String, ReceivedMessageSequenceTracker> receivedMessageSequenceTrackerForTopic,
|
||||
boolean seqTracking) {
|
||||
public OpTimeTrackKafkaConsumer(final KafkaSpace kafkaSpace,
|
||||
final boolean asyncMsgCommit,
|
||||
final int msgPoolIntervalInMs,
|
||||
final boolean autoCommitEnabled,
|
||||
final int maxMsgCntPerCommit,
|
||||
final KafkaConsumer<String, String> consumer,
|
||||
final KafkaAdapterMetrics kafkaAdapterMetrics,
|
||||
final EndToEndStartingTimeSource e2eStartingTimeSrc,
|
||||
final Function<String, ReceivedMessageSequenceTracker> receivedMessageSequenceTrackerForTopic,
|
||||
final boolean seqTracking) {
|
||||
super(kafkaSpace);
|
||||
this.msgPoolIntervalInMs = msgPoolIntervalInMs;
|
||||
this.asyncMsgCommit = asyncMsgCommit;
|
||||
@@ -67,53 +68,49 @@ public class OpTimeTrackKafkaConsumer extends OpTimeTrackKafkaClient {
|
||||
this.maxMsgCntPerCommit = maxMsgCntPerCommit;
|
||||
this.consumer = consumer;
|
||||
this.e2eStartingTimeSrc = e2eStartingTimeSrc;
|
||||
this.e2eMsgProcLatencyHistogram = kafkaAdapterMetrics.getE2eMsgProcLatencyHistogram();
|
||||
e2eMsgProcLatencyHistogram = kafkaAdapterMetrics.getE2eMsgProcLatencyHistogram();
|
||||
this.receivedMessageSequenceTrackerForTopic = receivedMessageSequenceTrackerForTopic;
|
||||
this.seqTracking = seqTracking;
|
||||
}
|
||||
|
||||
public int getManualCommitTrackingCnt() { return manualCommitTrackingCnt.get(); }
|
||||
public int getManualCommitTrackingCnt() { return this.manualCommitTrackingCnt.get(); }
|
||||
public void incManualCommitTrackingCnt() {
|
||||
int curVal = getManualCommitTrackingCnt();
|
||||
manualCommitTrackingCnt.set(curVal + 1);
|
||||
final int curVal = this.getManualCommitTrackingCnt();
|
||||
this.manualCommitTrackingCnt.set(curVal + 1);
|
||||
}
|
||||
public void resetManualCommitTrackingCnt() {
|
||||
manualCommitTrackingCnt.set(0);
|
||||
this.manualCommitTrackingCnt.set(0);
|
||||
}
|
||||
|
||||
private boolean msgCommitNeeded(long cycle) {
|
||||
private boolean msgCommitNeeded(final long cycle) {
|
||||
// Whether to commit the transaction which happens when:
|
||||
// - "txn_batch_num" has been reached since last reset
|
||||
boolean commitNeeded = !autoCommitEnabled;
|
||||
boolean commitNeeded = !this.autoCommitEnabled;
|
||||
|
||||
if (commitNeeded) {
|
||||
int msgCommitTackingCnt = manualCommitTrackingCnt.get();
|
||||
final int msgCommitTackingCnt = this.manualCommitTrackingCnt.get();
|
||||
|
||||
if ( ( (msgCommitTackingCnt > 0) && ((msgCommitTackingCnt % maxMsgCntPerCommit) == 0) ) ||
|
||||
( cycle >= (kafkaSpace.getTotalCycleNum() - 1) ) ) {
|
||||
if ( 0 < msgCommitTackingCnt && 0 == msgCommitTackingCnt % maxMsgCntPerCommit ||
|
||||
cycle >= this.kafkaSpace.getTotalCycleNum() - 1) {
|
||||
commitNeeded = true;
|
||||
|
||||
if (logger.isDebugEnabled()) {
|
||||
logger.debug("Manually commit message ({}, {}, {})",
|
||||
manualCommitTrackingCnt, msgCommitTackingCnt, cycle);
|
||||
}
|
||||
}
|
||||
else {
|
||||
commitNeeded = false;
|
||||
if (OpTimeTrackKafkaConsumer.logger.isDebugEnabled())
|
||||
OpTimeTrackKafkaConsumer.logger.debug("Manually commit message ({}, {}, {})",
|
||||
this.manualCommitTrackingCnt, msgCommitTackingCnt, cycle);
|
||||
}
|
||||
else commitNeeded = false;
|
||||
}
|
||||
|
||||
return commitNeeded;
|
||||
}
|
||||
|
||||
private String printRecvedMsg(ConsumerRecord<String, String> record) {
|
||||
Headers headers = record.headers();
|
||||
Header nbMsgSeqHeader = headers.lastHeader(KafkaAdapterUtil.NB_MSG_SEQ_PROP);
|
||||
private String printRecvedMsg(final ConsumerRecord<String, String> record) {
|
||||
final Headers headers = record.headers();
|
||||
final Header nbMsgSeqHeader = headers.lastHeader(KafkaAdapterUtil.NB_MSG_SEQ_PROP);
|
||||
|
||||
StringBuilder sb = new StringBuilder();
|
||||
if (nbMsgSeqHeader != null) {
|
||||
sb.append("Header (MsgSeq): " + new String(nbMsgSeqHeader.value()) + "; ");
|
||||
}
|
||||
final StringBuilder sb = new StringBuilder();
|
||||
if (null != nbMsgSeqHeader)
|
||||
sb.append("Header (MsgSeq): " + new String(nbMsgSeqHeader.value(), StandardCharsets.UTF_8) + "; ");
|
||||
sb.append("Key: " + record.key() + "; ");
|
||||
sb.append("Value: " + record.value() + "; ");
|
||||
|
||||
@@ -122,123 +119,113 @@ public class OpTimeTrackKafkaConsumer extends OpTimeTrackKafkaClient {
|
||||
}
|
||||
|
||||
@Override
|
||||
void cycleMsgProcess(long cycle, Object cycleObj) {
|
||||
if (kafkaSpace.isShuttigDown()) {
|
||||
return;
|
||||
}
|
||||
void cycleMsgProcess(final long cycle, final Object cycleObj) {
|
||||
if (this.kafkaSpace.isShuttigDown()) return;
|
||||
|
||||
synchronized (this) {
|
||||
ConsumerRecords<String, String> records = consumer.poll(msgPoolIntervalInMs);
|
||||
for (ConsumerRecord<String, String> record : records) {
|
||||
if (record != null) {
|
||||
if (logger.isDebugEnabled()) {
|
||||
Header msg_seq_header = record.headers().lastHeader(KafkaAdapterUtil.MSG_SEQUENCE_NUMBER);
|
||||
logger.debug(
|
||||
final ConsumerRecords<String, String> records = this.consumer.poll(this.msgPoolIntervalInMs);
|
||||
for (final ConsumerRecord<String, String> record : records)
|
||||
if (null != record) {
|
||||
if (OpTimeTrackKafkaConsumer.logger.isDebugEnabled()) {
|
||||
final Header msg_seq_header = record.headers().lastHeader(KafkaAdapterUtil.MSG_SEQUENCE_NUMBER);
|
||||
OpTimeTrackKafkaConsumer.logger.debug(
|
||||
"Receiving message is successful: [{}] - offset({}), cycle ({}), e2e_latency_ms({}), e2e_seq_number({})",
|
||||
printRecvedMsg(record),
|
||||
this.printRecvedMsg(record),
|
||||
record.offset(),
|
||||
cycle,
|
||||
System.currentTimeMillis() - record.timestamp(),
|
||||
(msg_seq_header != null ? new String(msg_seq_header.value()) : "null"));
|
||||
null != msg_seq_header ? new String(msg_seq_header.value(), StandardCharsets.UTF_8) : "null");
|
||||
}
|
||||
|
||||
if (!autoCommitEnabled) {
|
||||
boolean bCommitMsg = msgCommitNeeded(cycle);
|
||||
if (!this.autoCommitEnabled) {
|
||||
final boolean bCommitMsg = this.msgCommitNeeded(cycle);
|
||||
if (bCommitMsg) {
|
||||
if (!asyncMsgCommit) {
|
||||
consumer.commitSync();
|
||||
checkAndUpdateMessageE2EMetrics(record);
|
||||
if (logger.isDebugEnabled()) {
|
||||
logger.debug(
|
||||
if (!this.asyncMsgCommit) {
|
||||
this.consumer.commitSync();
|
||||
this.checkAndUpdateMessageE2EMetrics(record);
|
||||
if (OpTimeTrackKafkaConsumer.logger.isDebugEnabled())
|
||||
OpTimeTrackKafkaConsumer.logger.debug(
|
||||
"Sync message commit is successful: cycle ({}), maxMsgCntPerCommit ({})",
|
||||
cycle,
|
||||
maxMsgCntPerCommit);
|
||||
this.maxMsgCntPerCommit);
|
||||
} else this.consumer.commitAsync(new OffsetCommitCallback() {
|
||||
@Override
|
||||
public void onComplete(final Map<TopicPartition, OffsetAndMetadata> map, final Exception e) {
|
||||
if (OpTimeTrackKafkaConsumer.logger.isDebugEnabled()) if (null == e) {
|
||||
OpTimeTrackKafkaConsumer.logger.debug(
|
||||
"Async message commit succeeded: cycle({}), maxMsgCntPerCommit ({})",
|
||||
cycle,
|
||||
OpTimeTrackKafkaConsumer.this.maxMsgCntPerCommit);
|
||||
OpTimeTrackKafkaConsumer.this.checkAndUpdateMessageE2EMetrics(record);
|
||||
} else OpTimeTrackKafkaConsumer.logger.debug(
|
||||
"Async message commit failed: cycle ({}), maxMsgCntPerCommit ({}), error ({})",
|
||||
cycle,
|
||||
OpTimeTrackKafkaConsumer.this.maxMsgCntPerCommit,
|
||||
e.getMessage());
|
||||
}
|
||||
} else {
|
||||
consumer.commitAsync(new OffsetCommitCallback() {
|
||||
@Override
|
||||
public void onComplete(Map<TopicPartition, OffsetAndMetadata> map, Exception e) {
|
||||
if (logger.isDebugEnabled()) {
|
||||
if (e == null) {
|
||||
logger.debug(
|
||||
"Async message commit succeeded: cycle({}), maxMsgCntPerCommit ({})",
|
||||
cycle,
|
||||
maxMsgCntPerCommit);
|
||||
checkAndUpdateMessageE2EMetrics(record);
|
||||
} else {
|
||||
logger.debug(
|
||||
"Async message commit failed: cycle ({}), maxMsgCntPerCommit ({}), error ({})",
|
||||
cycle,
|
||||
maxMsgCntPerCommit,
|
||||
e.getMessage());
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
resetManualCommitTrackingCnt();
|
||||
} else {
|
||||
checkAndUpdateMessageE2EMetrics(record);
|
||||
incManualCommitTrackingCnt();
|
||||
this.resetManualCommitTrackingCnt();
|
||||
} else {
|
||||
this.checkAndUpdateMessageE2EMetrics(record);
|
||||
this.incManualCommitTrackingCnt();
|
||||
}
|
||||
}
|
||||
checkAndUpdateMessageE2EMetrics(record);
|
||||
this.checkAndUpdateMessageE2EMetrics(record);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private void checkAndUpdateMessageE2EMetrics(ConsumerRecord<String, String> record) {
|
||||
private void checkAndUpdateMessageE2EMetrics(final ConsumerRecord<String, String> record) {
|
||||
// keep track of message errors and update error counters
|
||||
if(seqTracking) checkAndUpdateMessageErrorCounter(record);
|
||||
updateE2ELatencyMetric(record);
|
||||
if(this.seqTracking) {
|
||||
this.checkAndUpdateMessageErrorCounter(record);
|
||||
}
|
||||
this.updateE2ELatencyMetric(record);
|
||||
}
|
||||
|
||||
private void updateE2ELatencyMetric(ConsumerRecord<String, String> record) {
|
||||
private void updateE2ELatencyMetric(final ConsumerRecord<String, String> record) {
|
||||
long startTimeStamp = 0L;
|
||||
switch (e2eStartingTimeSrc) {
|
||||
case MESSAGE_PUBLISH_TIME:
|
||||
startTimeStamp = record.timestamp();
|
||||
break;
|
||||
if (Objects.requireNonNull(this.e2eStartingTimeSrc) == EndToEndStartingTimeSource.MESSAGE_PUBLISH_TIME) {
|
||||
startTimeStamp = record.timestamp();
|
||||
}
|
||||
if (startTimeStamp != 0L) {
|
||||
long e2eMsgLatency = System.currentTimeMillis() - startTimeStamp;
|
||||
e2eMsgProcLatencyHistogram.update(e2eMsgLatency);
|
||||
if (0L != startTimeStamp) {
|
||||
final long e2eMsgLatency = System.currentTimeMillis() - startTimeStamp;
|
||||
this.e2eMsgProcLatencyHistogram.update(e2eMsgLatency);
|
||||
}
|
||||
}
|
||||
|
||||
private void checkAndUpdateMessageErrorCounter(ConsumerRecord<String, String> record) {
|
||||
Header msg_seq_number_header = record.headers().lastHeader(KafkaAdapterUtil.MSG_SEQUENCE_NUMBER);
|
||||
String msgSeqIdStr = msg_seq_number_header != null ? new String(msg_seq_number_header.value()) : StringUtils.EMPTY;
|
||||
private void checkAndUpdateMessageErrorCounter(final ConsumerRecord<String, String> record) {
|
||||
final Header msg_seq_number_header = record.headers().lastHeader(KafkaAdapterUtil.MSG_SEQUENCE_NUMBER);
|
||||
final String msgSeqIdStr = (null != msg_seq_number_header) ? new String(msg_seq_number_header.value(), StandardCharsets.UTF_8) : StringUtils.EMPTY;
|
||||
if (!StringUtils.isBlank(msgSeqIdStr)) {
|
||||
long sequenceNumber = Long.parseLong(msgSeqIdStr);
|
||||
ReceivedMessageSequenceTracker receivedMessageSequenceTracker =
|
||||
receivedMessageSequenceTrackerForTopic.apply(record.topic());
|
||||
final long sequenceNumber = Long.parseLong(msgSeqIdStr);
|
||||
final ReceivedMessageSequenceTracker receivedMessageSequenceTracker =
|
||||
this.receivedMessageSequenceTrackerForTopic.apply(record.topic());
|
||||
receivedMessageSequenceTracker.sequenceNumberReceived(sequenceNumber);
|
||||
} else {
|
||||
logger.warn("Message sequence number header is null, skipping e2e message error metrics generation.");
|
||||
}
|
||||
} else
|
||||
OpTimeTrackKafkaConsumer.logger.warn("Message sequence number header is null, skipping e2e message error metrics generation.");
|
||||
}
|
||||
|
||||
@Override
|
||||
public void close() {
|
||||
try {
|
||||
if (consumer != null) {
|
||||
if (!asyncMsgCommit)
|
||||
consumer.commitSync();
|
||||
else
|
||||
consumer.commitAsync();
|
||||
if (null != consumer) {
|
||||
if (!this.asyncMsgCommit) {
|
||||
this.consumer.commitSync();
|
||||
} else {
|
||||
this.consumer.commitAsync();
|
||||
}
|
||||
|
||||
consumer.close();
|
||||
this.consumer.close();
|
||||
}
|
||||
|
||||
this.manualCommitTrackingCnt.remove();
|
||||
manualCommitTrackingCnt.remove();
|
||||
}
|
||||
catch (IllegalStateException ise) {
|
||||
catch (final IllegalStateException ise) {
|
||||
// If a consumer is already closed, that's fine.
|
||||
}
|
||||
catch (Exception e) {
|
||||
catch (final Exception e) {
|
||||
e.printStackTrace();
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,18 +1,17 @@
|
||||
/*
|
||||
* Copyright (c) 2022 nosqlbench
|
||||
* Copyright (c) 2022-2023 nosqlbench
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package io.nosqlbench.adapter.kafka.ops;
|
||||
@@ -33,6 +32,7 @@ import org.apache.kafka.common.errors.ProducerFencedException;
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.util.Map;
|
||||
import java.util.HashMap;
|
||||
import java.util.Set;
|
||||
@@ -43,7 +43,7 @@ import org.apache.kafka.common.errors.InterruptException;
|
||||
|
||||
public class OpTimeTrackKafkaProducer extends OpTimeTrackKafkaClient {
|
||||
|
||||
private final static Logger logger = LogManager.getLogger("OpTimeTrackKafkaProducer");
|
||||
private static final Logger logger = LogManager.getLogger("OpTimeTrackKafkaProducer");
|
||||
|
||||
private final boolean transactionEnabled;
|
||||
|
||||
@@ -64,224 +64,201 @@ public class OpTimeTrackKafkaProducer extends OpTimeTrackKafkaClient {
|
||||
|
||||
|
||||
// Keep track the transaction count per thread
|
||||
private static ThreadLocal<Integer>
|
||||
private static final ThreadLocal<Integer>
|
||||
txnBatchTrackingCntTL = ThreadLocal.withInitial(() -> 0);
|
||||
|
||||
private static ThreadLocal<TxnProcResult>
|
||||
private static final ThreadLocal<TxnProcResult>
|
||||
txnProcResultTL = ThreadLocal.withInitial(() -> TxnProcResult.SUCCESS);
|
||||
|
||||
private final KafkaProducer<String, String> producer;
|
||||
|
||||
public OpTimeTrackKafkaProducer(KafkaSpace kafkaSpace,
|
||||
boolean asyncMsgAck,
|
||||
boolean transactEnabledConfig,
|
||||
int txnBatchNum,
|
||||
boolean seqTracking,
|
||||
Set<EndToEndMetricsAdapterUtil.MSG_SEQ_ERROR_SIMU_TYPE> errSimuTypeSet,
|
||||
KafkaProducer<String, String> producer) {
|
||||
public OpTimeTrackKafkaProducer(final KafkaSpace kafkaSpace,
|
||||
final boolean asyncMsgAck,
|
||||
final boolean transactEnabledConfig,
|
||||
final int txnBatchNum,
|
||||
final boolean seqTracking,
|
||||
final Set<EndToEndMetricsAdapterUtil.MSG_SEQ_ERROR_SIMU_TYPE> errSimuTypeSet,
|
||||
final KafkaProducer<String, String> producer) {
|
||||
super(kafkaSpace);
|
||||
this.asyncMsgAck = asyncMsgAck;
|
||||
this.transactEnabledConfig = transactEnabledConfig;
|
||||
this.txnBatchNum = txnBatchNum;
|
||||
this.seqTracking = seqTracking;
|
||||
this.errSimuTypeSet = errSimuTypeSet;
|
||||
this.transactionEnabled = transactEnabledConfig && (txnBatchNum > 2);
|
||||
transactionEnabled = transactEnabledConfig && 2 < txnBatchNum;
|
||||
this.producer = producer;
|
||||
}
|
||||
|
||||
public static int getTxnBatchTrackingCntTL() {
|
||||
return txnBatchTrackingCntTL.get();
|
||||
return OpTimeTrackKafkaProducer.txnBatchTrackingCntTL.get();
|
||||
}
|
||||
public static void incTxnBatchTrackingCnt() {
|
||||
txnBatchTrackingCntTL.set(getTxnBatchTrackingCntTL() + 1);
|
||||
OpTimeTrackKafkaProducer.txnBatchTrackingCntTL.set(OpTimeTrackKafkaProducer.getTxnBatchTrackingCntTL() + 1);
|
||||
}
|
||||
public static void resetTxnBatchTrackingCnt() {
|
||||
txnBatchTrackingCntTL.set(0);
|
||||
OpTimeTrackKafkaProducer.txnBatchTrackingCntTL.set(0);
|
||||
}
|
||||
|
||||
public static TxnProcResult getTxnProcResultTL() {
|
||||
return txnProcResultTL.get();
|
||||
return OpTimeTrackKafkaProducer.txnProcResultTL.get();
|
||||
}
|
||||
public static void setTxnProcResultTL(TxnProcResult result) {
|
||||
txnProcResultTL.set(result);
|
||||
public static void setTxnProcResultTL(final TxnProcResult result) {
|
||||
OpTimeTrackKafkaProducer.txnProcResultTL.set(result);
|
||||
}
|
||||
public static void resetTxnProcResultTL(TxnProcResult result) {
|
||||
txnProcResultTL.set(TxnProcResult.SUCCESS);
|
||||
public static void resetTxnProcResultTL(final TxnProcResult result) {
|
||||
OpTimeTrackKafkaProducer.txnProcResultTL.set(TxnProcResult.SUCCESS);
|
||||
}
|
||||
|
||||
private void processMsgTransaction(long cycle, KafkaProducer<String, String> producer) {
|
||||
private void processMsgTransaction(final long cycle, final KafkaProducer<String, String> producer) {
|
||||
TxnProcResult result = TxnProcResult.SUCCESS;
|
||||
|
||||
if (transactionEnabled) {
|
||||
int txnBatchTackingCnt = getTxnBatchTrackingCntTL();
|
||||
if (this.transactionEnabled) {
|
||||
final int txnBatchTackingCnt = OpTimeTrackKafkaProducer.getTxnBatchTrackingCntTL();
|
||||
|
||||
try {
|
||||
if (txnBatchTackingCnt == 0) {
|
||||
if (0 == txnBatchTackingCnt) {
|
||||
// Start a new transaction when first starting the processing
|
||||
producer.beginTransaction();
|
||||
if (logger.isDebugEnabled()) {
|
||||
logger.debug("New transaction started ( {}, {}, {}, {}, {} )",
|
||||
cycle, producer, transactEnabledConfig, txnBatchNum, getTxnBatchTrackingCntTL());
|
||||
}
|
||||
} else if ( (txnBatchTackingCnt % (txnBatchNum - 1) == 0) ||
|
||||
(cycle == (kafkaSpace.getTotalCycleNum() - 1)) ) {
|
||||
if (OpTimeTrackKafkaProducer.logger.isDebugEnabled())
|
||||
OpTimeTrackKafkaProducer.logger.debug("New transaction started ( {}, {}, {}, {}, {} )",
|
||||
cycle, producer, this.transactEnabledConfig, this.txnBatchNum, OpTimeTrackKafkaProducer.getTxnBatchTrackingCntTL());
|
||||
} else if ((0 == (txnBatchTackingCnt % (txnBatchNum - 1))) ||
|
||||
(cycle == (this.kafkaSpace.getTotalCycleNum() - 1))) synchronized (this) {
|
||||
// Commit the current transaction
|
||||
if (OpTimeTrackKafkaProducer.logger.isDebugEnabled())
|
||||
OpTimeTrackKafkaProducer.logger.debug("Start committing transaction ... ( {}, {}, {}, {}, {} )",
|
||||
cycle, producer, this.transactEnabledConfig, this.txnBatchNum, OpTimeTrackKafkaProducer.getTxnBatchTrackingCntTL());
|
||||
producer.commitTransaction();
|
||||
if (OpTimeTrackKafkaProducer.logger.isDebugEnabled())
|
||||
OpTimeTrackKafkaProducer.logger.debug("Transaction committed ( {}, {}, {}, {}, {} )",
|
||||
cycle, producer, this.transactEnabledConfig, this.txnBatchNum, OpTimeTrackKafkaProducer.getTxnBatchTrackingCntTL());
|
||||
|
||||
synchronized (this) {
|
||||
// Commit the current transaction
|
||||
if (logger.isDebugEnabled()) {
|
||||
logger.debug("Start committing transaction ... ( {}, {}, {}, {}, {} )",
|
||||
cycle, producer, transactEnabledConfig, txnBatchNum, getTxnBatchTrackingCntTL());
|
||||
}
|
||||
producer.commitTransaction();
|
||||
if (logger.isDebugEnabled()) {
|
||||
logger.debug("Transaction committed ( {}, {}, {}, {}, {} )",
|
||||
cycle, producer, transactEnabledConfig, txnBatchNum, getTxnBatchTrackingCntTL());
|
||||
}
|
||||
|
||||
// Start a new transaction
|
||||
producer.beginTransaction();
|
||||
if (logger.isDebugEnabled()) {
|
||||
logger.debug("New transaction started ( {}, {}, {}, {}, {} )",
|
||||
cycle, producer, transactEnabledConfig, txnBatchNum, getTxnBatchTrackingCntTL());
|
||||
}
|
||||
}
|
||||
// Start a new transaction
|
||||
producer.beginTransaction();
|
||||
if (OpTimeTrackKafkaProducer.logger.isDebugEnabled())
|
||||
OpTimeTrackKafkaProducer.logger.debug("New transaction started ( {}, {}, {}, {}, {} )",
|
||||
cycle, producer, this.transactEnabledConfig, this.txnBatchNum, OpTimeTrackKafkaProducer.getTxnBatchTrackingCntTL());
|
||||
}
|
||||
}
|
||||
catch (Exception e) {
|
||||
catch (final Exception e) {
|
||||
e.printStackTrace();
|
||||
if ( (e instanceof IllegalStateException) ||
|
||||
(e instanceof ProducerFencedException) ||
|
||||
(e instanceof UnsupportedOperationException) ||
|
||||
(e instanceof AuthorizationException) ) {
|
||||
result = TxnProcResult.FATAL_ERROR;
|
||||
}
|
||||
else if ( (e instanceof TimeoutException ) ||
|
||||
(e instanceof InterruptException)) {
|
||||
result = TxnProcResult.RECOVERABLE_ERROR;
|
||||
}
|
||||
else {
|
||||
result = TxnProcResult.UNKNOWN_ERROR;
|
||||
}
|
||||
if ( e instanceof IllegalStateException ||
|
||||
e instanceof ProducerFencedException ||
|
||||
e instanceof UnsupportedOperationException ||
|
||||
e instanceof AuthorizationException) result = TxnProcResult.FATAL_ERROR;
|
||||
else if ( e instanceof TimeoutException ||
|
||||
e instanceof InterruptException) result = TxnProcResult.RECOVERABLE_ERROR;
|
||||
else result = TxnProcResult.UNKNOWN_ERROR;
|
||||
}
|
||||
}
|
||||
|
||||
setTxnProcResultTL(result);
|
||||
OpTimeTrackKafkaProducer.setTxnProcResultTL(result);
|
||||
}
|
||||
|
||||
@Override
|
||||
void cycleMsgProcess(long cycle, Object cycleObj) {
|
||||
void cycleMsgProcess(final long cycle, final Object cycleObj) {
|
||||
// For producer, cycleObj represents a "message" (ProducerRecord)
|
||||
assert (cycleObj != null);
|
||||
assert null != cycleObj;
|
||||
|
||||
if (kafkaSpace.isShuttigDown()) {
|
||||
if (transactionEnabled) {
|
||||
try {
|
||||
producer.abortTransaction();
|
||||
if (logger.isDebugEnabled()) {
|
||||
logger.debug("Abort open transaction while shutting down ( {}, {}, {}, {}, {} )",
|
||||
cycle, producer, transactEnabledConfig, txnBatchNum, getTxnBatchTrackingCntTL());
|
||||
}
|
||||
}
|
||||
catch (Exception e) {
|
||||
e.printStackTrace();
|
||||
}
|
||||
if (this.kafkaSpace.isShuttigDown()) {
|
||||
if (this.transactionEnabled) try {
|
||||
this.producer.abortTransaction();
|
||||
if (OpTimeTrackKafkaProducer.logger.isDebugEnabled())
|
||||
OpTimeTrackKafkaProducer.logger.debug("Abort open transaction while shutting down ( {}, {}, {}, {}, {} )",
|
||||
cycle, this.producer, this.transactEnabledConfig, this.txnBatchNum, OpTimeTrackKafkaProducer.getTxnBatchTrackingCntTL());
|
||||
} catch (final Exception e) {
|
||||
e.printStackTrace();
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
processMsgTransaction(cycle, producer);
|
||||
TxnProcResult result = getTxnProcResultTL();
|
||||
this.processMsgTransaction(cycle, this.producer);
|
||||
final TxnProcResult result = OpTimeTrackKafkaProducer.getTxnProcResultTL();
|
||||
|
||||
if (result == TxnProcResult.RECOVERABLE_ERROR) {
|
||||
try {
|
||||
producer.abortTransaction();
|
||||
}
|
||||
catch (Exception e) {
|
||||
throw new KafkaAdapterUnexpectedException("Aborting transaction failed!");
|
||||
}
|
||||
} else if (result == TxnProcResult.FATAL_ERROR) {
|
||||
throw new KafkaAdapterUnexpectedException("Fatal error when initializing or committing transactions!");
|
||||
} else if (result == TxnProcResult.UNKNOWN_ERROR) {
|
||||
logger.debug("Unexpected error when initializing or committing transactions!");
|
||||
if (TxnProcResult.RECOVERABLE_ERROR == result) try {
|
||||
this.producer.abortTransaction();
|
||||
} catch (final Exception e) {
|
||||
throw new KafkaAdapterUnexpectedException("Aborting transaction failed!");
|
||||
}
|
||||
else if (TxnProcResult.FATAL_ERROR == result)
|
||||
throw new KafkaAdapterUnexpectedException("Fatal error when initializing or committing transactions!");
|
||||
else if (TxnProcResult.UNKNOWN_ERROR == result)
|
||||
OpTimeTrackKafkaProducer.logger.debug("Unexpected error when initializing or committing transactions!");
|
||||
|
||||
ProducerRecord<String, String> message = (ProducerRecord<String, String>) cycleObj;
|
||||
if (seqTracking) {
|
||||
long nextSequenceNumber = getMessageSequenceNumberSendingHandler(message.topic())
|
||||
.getNextSequenceNumber(errSimuTypeSet);
|
||||
message.headers().add(KafkaAdapterUtil.MSG_SEQUENCE_NUMBER, String.valueOf(nextSequenceNumber).getBytes());
|
||||
final ProducerRecord<String, String> message = (ProducerRecord<String, String>) cycleObj;
|
||||
if (this.seqTracking) {
|
||||
final long nextSequenceNumber = this.getMessageSequenceNumberSendingHandler(message.topic())
|
||||
.getNextSequenceNumber(this.errSimuTypeSet);
|
||||
message.headers().add(KafkaAdapterUtil.MSG_SEQUENCE_NUMBER, String.valueOf(nextSequenceNumber).getBytes(StandardCharsets.UTF_8));
|
||||
}
|
||||
try {
|
||||
if (result == TxnProcResult.SUCCESS) {
|
||||
Future<RecordMetadata> responseFuture = producer.send(message, new Callback() {
|
||||
if (TxnProcResult.SUCCESS == result) {
|
||||
final Future<RecordMetadata> responseFuture = this.producer.send(message, new Callback() {
|
||||
@Override
|
||||
public void onCompletion(RecordMetadata recordMetadata, Exception e) {
|
||||
if (asyncMsgAck) {
|
||||
if (logger.isDebugEnabled()) {
|
||||
logger.debug("Message sending with async ack. is successful ({}) - {}, {}",
|
||||
cycle, producer, recordMetadata);
|
||||
}
|
||||
}
|
||||
public void onCompletion(final RecordMetadata recordMetadata, final Exception e) {
|
||||
if (OpTimeTrackKafkaProducer.this.asyncMsgAck)
|
||||
if (OpTimeTrackKafkaProducer.logger.isDebugEnabled())
|
||||
OpTimeTrackKafkaProducer.logger.debug("Message sending with async ack. is successful ({}) - {}, {}",
|
||||
cycle, OpTimeTrackKafkaProducer.this.producer, recordMetadata);
|
||||
}
|
||||
});
|
||||
|
||||
if (!asyncMsgAck) {
|
||||
try {
|
||||
RecordMetadata recordMetadata = responseFuture.get();
|
||||
if (logger.isDebugEnabled()) {
|
||||
logger.debug("Message sending with sync ack. is successful ({}) - {}, {}",
|
||||
cycle, producer, recordMetadata);
|
||||
}
|
||||
} catch (InterruptedException | ExecutionException e) {
|
||||
KafkaAdapterUtil.messageErrorHandling(
|
||||
e,
|
||||
kafkaSpace.isStrictMsgErrorHandling(),
|
||||
"Unexpected error when waiting to receive message-send ack from the Kafka cluster." +
|
||||
"\n-----\n" + e);
|
||||
}
|
||||
if (!this.asyncMsgAck) try {
|
||||
final RecordMetadata recordMetadata = responseFuture.get();
|
||||
if (OpTimeTrackKafkaProducer.logger.isDebugEnabled())
|
||||
OpTimeTrackKafkaProducer.logger.debug("Message sending with sync ack. is successful ({}) - {}, {}",
|
||||
cycle, this.producer, recordMetadata);
|
||||
} catch (final InterruptedException | ExecutionException e) {
|
||||
KafkaAdapterUtil.messageErrorHandling(
|
||||
e,
|
||||
this.kafkaSpace.isStrictMsgErrorHandling(),
|
||||
"Unexpected error when waiting to receive message-send ack from the Kafka cluster." +
|
||||
"\n-----\n" + e);
|
||||
}
|
||||
|
||||
incTxnBatchTrackingCnt();
|
||||
OpTimeTrackKafkaProducer.incTxnBatchTrackingCnt();
|
||||
}
|
||||
|
||||
}
|
||||
catch ( ProducerFencedException | OutOfOrderSequenceException |
|
||||
UnsupportedOperationException | AuthorizationException e) {
|
||||
if (logger.isDebugEnabled()) {
|
||||
logger.debug("Fatal error when sending a message ({}) - {}, {}",
|
||||
cycle, producer, message);
|
||||
}
|
||||
catch ( final ProducerFencedException | OutOfOrderSequenceException |
|
||||
UnsupportedOperationException | AuthorizationException e) {
|
||||
if (OpTimeTrackKafkaProducer.logger.isDebugEnabled())
|
||||
OpTimeTrackKafkaProducer.logger.debug("Fatal error when sending a message ({}) - {}, {}",
|
||||
cycle, this.producer, message);
|
||||
throw new KafkaAdapterUnexpectedException(e);
|
||||
}
|
||||
catch (IllegalStateException | KafkaException e) {
|
||||
if (transactionEnabled) {
|
||||
catch (final IllegalStateException | KafkaException e) {
|
||||
if (this.transactionEnabled) {
|
||||
|
||||
}
|
||||
}
|
||||
catch (Exception e) {
|
||||
catch (final Exception e) {
|
||||
throw new KafkaAdapterUnexpectedException(e);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void close() {
|
||||
try {
|
||||
if (producer != null) {
|
||||
if (transactionEnabled) producer.commitTransaction();
|
||||
producer.close();
|
||||
if (null != producer) {
|
||||
if (this.transactionEnabled) {
|
||||
this.producer.commitTransaction();
|
||||
}
|
||||
this.producer.close();
|
||||
}
|
||||
|
||||
this.txnBatchTrackingCntTL.remove();
|
||||
txnBatchTrackingCntTL.remove();
|
||||
}
|
||||
catch (IllegalStateException ise) {
|
||||
catch (final IllegalStateException ise) {
|
||||
// If a producer is already closed, that's fine.
|
||||
}
|
||||
catch (Exception e) {
|
||||
catch (final Exception e) {
|
||||
e.printStackTrace();
|
||||
}
|
||||
}
|
||||
|
||||
private MessageSequenceNumberSendingHandler getMessageSequenceNumberSendingHandler(String topicName) {
|
||||
return MessageSequenceNumberSendingHandlersThreadLocal.get()
|
||||
private MessageSequenceNumberSendingHandler getMessageSequenceNumberSendingHandler(final String topicName) {
|
||||
return this.MessageSequenceNumberSendingHandlersThreadLocal.get()
|
||||
.computeIfAbsent(topicName, k -> new MessageSequenceNumberSendingHandler());
|
||||
}
|
||||
}
|
||||
|
||||
@@ -15,20 +15,21 @@
|
||||
*/
|
||||
|
||||
package io.nosqlbench.adapter.kafka.util;
|
||||
|
||||
import com.codahale.metrics.Counter;
|
||||
import com.codahale.metrics.Histogram;
|
||||
import com.codahale.metrics.Timer;
|
||||
import io.nosqlbench.adapter.kafka.dispensers.KafkaBaseOpDispenser;
|
||||
import io.nosqlbench.api.config.NBNamedElement;
|
||||
import io.nosqlbench.api.config.NBLabeledElement;
|
||||
import io.nosqlbench.api.config.NBLabels;
|
||||
import io.nosqlbench.api.engine.metrics.ActivityMetrics;
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
|
||||
public class KafkaAdapterMetrics implements NBNamedElement {
|
||||
public class KafkaAdapterMetrics {
|
||||
|
||||
private final static Logger logger = LogManager.getLogger("S4JAdapterMetrics");
|
||||
|
||||
private final String defaultAdapterMetricsPrefix;
|
||||
private static final Logger logger = LogManager.getLogger("S4JAdapterMetrics");
|
||||
private final NBLabels labels;
|
||||
|
||||
private Histogram messageSizeHistogram;
|
||||
private Timer bindTimer;
|
||||
@@ -41,63 +42,43 @@ public class KafkaAdapterMetrics implements NBNamedElement {
|
||||
private Counter msgErrDuplicateCounter;
|
||||
|
||||
public Histogram getE2eMsgProcLatencyHistogram() {
|
||||
return e2eMsgProcLatencyHistogram;
|
||||
return this.e2eMsgProcLatencyHistogram;
|
||||
}
|
||||
|
||||
// end-to-end latency
|
||||
private Histogram e2eMsgProcLatencyHistogram;
|
||||
private KafkaBaseOpDispenser kafkaBaseOpDispenser;
|
||||
private final KafkaBaseOpDispenser kafkaBaseOpDispenser;
|
||||
|
||||
public KafkaAdapterMetrics(KafkaBaseOpDispenser kafkaBaseOpDispenser, String defaultMetricsPrefix) {
|
||||
public KafkaAdapterMetrics(final KafkaBaseOpDispenser kafkaBaseOpDispenser, final NBLabeledElement labeledParent) {
|
||||
this.kafkaBaseOpDispenser = kafkaBaseOpDispenser;
|
||||
this.defaultAdapterMetricsPrefix = defaultMetricsPrefix;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getName() {
|
||||
return "KafkaAdapterMetrics";
|
||||
labels=labeledParent.getLabels().and("name",KafkaAdapterMetrics.class.getSimpleName());
|
||||
}
|
||||
|
||||
public void initS4JAdapterInstrumentation() {
|
||||
// Histogram metrics
|
||||
this.messageSizeHistogram =
|
||||
ActivityMetrics.histogram(
|
||||
this,
|
||||
defaultAdapterMetricsPrefix + "message_size",
|
||||
ActivityMetrics.DEFAULT_HDRDIGITS);
|
||||
messageSizeHistogram =
|
||||
ActivityMetrics.histogram(this.kafkaBaseOpDispenser,
|
||||
"message_size", ActivityMetrics.DEFAULT_HDRDIGITS);
|
||||
|
||||
// Timer metrics
|
||||
this.bindTimer =
|
||||
ActivityMetrics.timer(
|
||||
this,
|
||||
defaultAdapterMetricsPrefix + "bind",
|
||||
ActivityMetrics.DEFAULT_HDRDIGITS);
|
||||
this.executeTimer =
|
||||
ActivityMetrics.timer(
|
||||
this,
|
||||
defaultAdapterMetricsPrefix + "execute",
|
||||
ActivityMetrics.DEFAULT_HDRDIGITS);
|
||||
bindTimer =
|
||||
ActivityMetrics.timer(this.kafkaBaseOpDispenser,
|
||||
"bind", ActivityMetrics.DEFAULT_HDRDIGITS);
|
||||
executeTimer =
|
||||
ActivityMetrics.timer(this.kafkaBaseOpDispenser,
|
||||
"execute", ActivityMetrics.DEFAULT_HDRDIGITS);
|
||||
|
||||
// End-to-end metrics
|
||||
// Latency
|
||||
this.e2eMsgProcLatencyHistogram =
|
||||
ActivityMetrics.histogram(
|
||||
kafkaBaseOpDispenser,
|
||||
defaultAdapterMetricsPrefix + "e2e_msg_latency",
|
||||
ActivityMetrics.DEFAULT_HDRDIGITS);
|
||||
e2eMsgProcLatencyHistogram =
|
||||
ActivityMetrics.histogram(this.kafkaBaseOpDispenser, "e2e_msg_latency", ActivityMetrics.DEFAULT_HDRDIGITS);
|
||||
// Error metrics
|
||||
this.msgErrOutOfSeqCounter =
|
||||
ActivityMetrics.counter(
|
||||
kafkaBaseOpDispenser,
|
||||
defaultAdapterMetricsPrefix + "err_msg_oos");
|
||||
this.msgErrLossCounter =
|
||||
ActivityMetrics.counter(
|
||||
kafkaBaseOpDispenser,
|
||||
defaultAdapterMetricsPrefix + "err_msg_loss");
|
||||
this.msgErrDuplicateCounter =
|
||||
ActivityMetrics.counter(
|
||||
kafkaBaseOpDispenser,
|
||||
defaultAdapterMetricsPrefix + "err_msg_dup");
|
||||
msgErrOutOfSeqCounter =
|
||||
ActivityMetrics.counter(this.kafkaBaseOpDispenser, "err_msg_oos");
|
||||
msgErrLossCounter =
|
||||
ActivityMetrics.counter(this.kafkaBaseOpDispenser, "err_msg_loss");
|
||||
msgErrDuplicateCounter =
|
||||
ActivityMetrics.counter(this.kafkaBaseOpDispenser, "err_msg_dup");
|
||||
}
|
||||
|
||||
public Timer getBindTimer() { return bindTimer; }
|
||||
|
||||
@@ -1,22 +1,21 @@
|
||||
package io.nosqlbench.adapter.kafka.util;
|
||||
|
||||
/*
|
||||
* Copyright (c) 2022 nosqlbench
|
||||
* Copyright (c) 2023 nosqlbench
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package io.nosqlbench.adapter.kafka.util;
|
||||
|
||||
import com.amazonaws.util.Base64;
|
||||
import com.fasterxml.jackson.core.type.TypeReference;
|
||||
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||
@@ -30,9 +29,10 @@ import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
public class KafkaAdapterUtil {
|
||||
public enum KafkaAdapterUtil {
|
||||
;
|
||||
public static final String MSG_SEQUENCE_NUMBER = "sequence_number";
|
||||
private final static Logger logger = LogManager.getLogger(KafkaAdapterUtil.class);
|
||||
private static final Logger logger = LogManager.getLogger(KafkaAdapterUtil.class);
|
||||
|
||||
public static String DFT_CONSUMER_GROUP_NAME_PREFIX = "nbKafkaGrp";
|
||||
public static String DFT_TOPIC_NAME_PREFIX = "nbKafkaTopic";
|
||||
@@ -47,74 +47,67 @@ public class KafkaAdapterUtil {
|
||||
SEQ_TRACKING("seq_tracking");
|
||||
public final String label;
|
||||
|
||||
DOC_LEVEL_PARAMS(String label) {
|
||||
DOC_LEVEL_PARAMS(final String label) {
|
||||
this.label = label;
|
||||
}
|
||||
}
|
||||
public static boolean isValidDocLevelParam(String param) {
|
||||
public static boolean isValidDocLevelParam(final String param) {
|
||||
return Arrays.stream(DOC_LEVEL_PARAMS.values()).anyMatch(t -> t.label.equals(param));
|
||||
}
|
||||
public static String getValidDocLevelParamList() {
|
||||
return Arrays.stream(DOC_LEVEL_PARAMS.values()).map(t -> t.label).collect(Collectors.joining(", "));
|
||||
}
|
||||
|
||||
public final static String NB_MSG_SEQ_PROP = "NBMsgSeqProp";
|
||||
public final static String NB_MSG_SIZE_PROP = "NBMsgSize";
|
||||
public static final String NB_MSG_SEQ_PROP = "NBMsgSeqProp";
|
||||
public static final String NB_MSG_SIZE_PROP = "NBMsgSize";
|
||||
|
||||
// Get simplified NB thread name
|
||||
public static String getSimplifiedNBThreadName(String fullThreadName) {
|
||||
assert (StringUtils.isNotBlank(fullThreadName));
|
||||
public static String getSimplifiedNBThreadName(final String fullThreadName) {
|
||||
assert StringUtils.isNotBlank(fullThreadName);
|
||||
|
||||
if (StringUtils.contains(fullThreadName, '/'))
|
||||
if (StringUtils.contains(fullThreadName, '/')) {
|
||||
return StringUtils.substringAfterLast(fullThreadName, "/");
|
||||
else
|
||||
return fullThreadName;
|
||||
}
|
||||
return fullThreadName;
|
||||
}
|
||||
|
||||
|
||||
public static Map<String, String> convertJsonToMap(String jsonStr) throws Exception {
|
||||
ObjectMapper mapper = new ObjectMapper();
|
||||
public static Map<String, String> convertJsonToMap(final String jsonStr) throws Exception {
|
||||
final ObjectMapper mapper = new ObjectMapper();
|
||||
return mapper.readValue(jsonStr, new TypeReference<Map<String, String>>(){});
|
||||
}
|
||||
|
||||
public static List<Object> convertJsonToObjList(String jsonStr) throws Exception {
|
||||
ObjectMapper mapper = new ObjectMapper();
|
||||
public static List<Object> convertJsonToObjList(final String jsonStr) throws Exception {
|
||||
final ObjectMapper mapper = new ObjectMapper();
|
||||
return Arrays.asList(mapper.readValue(jsonStr, Object[].class));
|
||||
}
|
||||
|
||||
public static String buildCacheKey(String... keyParts) {
|
||||
String combinedStr = Arrays.stream(keyParts)
|
||||
public static String buildCacheKey(final String... keyParts) {
|
||||
final String combinedStr = Arrays.stream(keyParts)
|
||||
.filter(StringUtils::isNotBlank)
|
||||
.collect(Collectors.joining("::"));
|
||||
return Base64.encodeAsString(combinedStr.getBytes());
|
||||
return Base64.encodeAsString(combinedStr.getBytes(StandardCharsets.UTF_8));
|
||||
}
|
||||
|
||||
public static void pauseCurThreadExec(int pauseInSec) {
|
||||
if (pauseInSec > 0) {
|
||||
try {
|
||||
Thread.sleep(pauseInSec * 1000);
|
||||
}
|
||||
catch (InterruptedException ie) {
|
||||
ie.printStackTrace();
|
||||
}
|
||||
public static void pauseCurThreadExec(final int pauseInSec) {
|
||||
if (0 < pauseInSec) try {
|
||||
Thread.sleep(pauseInSec * 1000L);
|
||||
} catch (final InterruptedException ie) {
|
||||
ie.printStackTrace();
|
||||
}
|
||||
}
|
||||
|
||||
public static int getStrObjSize(String strObj) {
|
||||
public static int getStrObjSize(final String strObj) {
|
||||
// << https://docs.oracle.com/javase/6/docs/api/java/lang/String.html >>
|
||||
// A String represents a string in the UTF-16 format ...
|
||||
return strObj.getBytes(StandardCharsets.UTF_16).length;
|
||||
}
|
||||
|
||||
public static void messageErrorHandling(Exception exception, boolean strictErrorHandling, String errorMsg) {
|
||||
public static void messageErrorHandling(final Exception exception, final boolean strictErrorHandling, final String errorMsg) {
|
||||
exception.printStackTrace();
|
||||
|
||||
if (strictErrorHandling) {
|
||||
throw new RuntimeException(errorMsg + " [ " + exception.getMessage() + " ]");
|
||||
}
|
||||
else {
|
||||
KafkaAdapterUtil.pauseCurThreadExec(1);
|
||||
}
|
||||
if (strictErrorHandling) throw new RuntimeException(errorMsg + " [ " + exception.getMessage() + " ]");
|
||||
pauseCurThreadExec(1);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright (c) 2022 nosqlbench
|
||||
* Copyright (c) 2022-2023 nosqlbench
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -34,23 +34,23 @@ import java.util.Iterator;
|
||||
import java.util.Map;
|
||||
|
||||
public class KafkaClientConf {
|
||||
private final static Logger logger = LogManager.getLogger(KafkaClientConf.class);
|
||||
private static final Logger logger = LogManager.getLogger(KafkaClientConf.class);
|
||||
|
||||
public static final String TOPIC_CONF_PREFIX = "topic";
|
||||
public static final String PRODUCER_CONF_PREFIX = "producer";
|
||||
public static final String CONSUMER_CONF_PREFIX = "consumer";
|
||||
|
||||
// https://kafka.apache.org/documentation/#topicconfigs
|
||||
private Map<String, String> topicConfMap = new HashMap<>();
|
||||
private Map<String, String> producerConfMap = new HashMap<>();
|
||||
private Map<String, String> consumerConfMap = new HashMap<>();
|
||||
private final Map<String, String> topicConfMap = new HashMap<>();
|
||||
private final Map<String, String> producerConfMap = new HashMap<>();
|
||||
private final Map<String, String> consumerConfMap = new HashMap<>();
|
||||
|
||||
|
||||
public KafkaClientConf(String clientConfFileName) {
|
||||
public KafkaClientConf(final String clientConfFileName) {
|
||||
|
||||
//////////////////
|
||||
// Read related Kafka client configuration settings from a file
|
||||
readRawConfFromFile(clientConfFileName);
|
||||
this.readRawConfFromFile(clientConfFileName);
|
||||
|
||||
|
||||
//////////////////
|
||||
@@ -61,67 +61,63 @@ public class KafkaClientConf {
|
||||
// <<< https://kafka.apache.org/documentation/#producerconfigs >>>
|
||||
// producer config
|
||||
// * bootstrap.servers
|
||||
producerConfMap.remove("bootstrap.servers");
|
||||
this.producerConfMap.remove("bootstrap.servers");
|
||||
|
||||
// <<< https://kafka.apache.org/documentation/#consumerconfigs >>>
|
||||
// consumer config
|
||||
// * bootstrap.servers
|
||||
consumerConfMap.remove("bootstrap.servers");
|
||||
this.consumerConfMap.remove("bootstrap.servers");
|
||||
|
||||
}
|
||||
|
||||
public void readRawConfFromFile(String fileName) {
|
||||
File file = new File(fileName);
|
||||
public void readRawConfFromFile(final String fileName) {
|
||||
final File file = new File(fileName);
|
||||
|
||||
try {
|
||||
String canonicalFilePath = file.getCanonicalPath();
|
||||
final String canonicalFilePath = file.getCanonicalPath();
|
||||
|
||||
Parameters params = new Parameters();
|
||||
final Parameters params = new Parameters();
|
||||
|
||||
FileBasedConfigurationBuilder<FileBasedConfiguration> builder =
|
||||
final FileBasedConfigurationBuilder<FileBasedConfiguration> builder =
|
||||
new FileBasedConfigurationBuilder<FileBasedConfiguration>(PropertiesConfiguration.class)
|
||||
.configure(params.properties()
|
||||
.setFileName(fileName));
|
||||
|
||||
Configuration config = builder.getConfiguration();
|
||||
final Configuration config = builder.getConfiguration();
|
||||
|
||||
for (Iterator<String> it = config.getKeys(); it.hasNext(); ) {
|
||||
String confKey = it.next();
|
||||
String confVal = config.getProperty(confKey).toString();
|
||||
for (final Iterator<String> it = config.getKeys(); it.hasNext(); ) {
|
||||
final String confKey = it.next();
|
||||
final String confVal = config.getProperty(confKey).toString();
|
||||
|
||||
if (!StringUtils.isBlank(confVal)) {
|
||||
// Get client connection specific configuration settings, removing "topic." prefix
|
||||
if (StringUtils.startsWith(confKey, TOPIC_CONF_PREFIX)) {
|
||||
topicConfMap.put(confKey.substring(TOPIC_CONF_PREFIX.length() + 1), confVal);
|
||||
}
|
||||
// Get producer specific configuration settings, removing "producer." prefix
|
||||
else if (StringUtils.startsWith(confKey, PRODUCER_CONF_PREFIX)) {
|
||||
producerConfMap.put(confKey.substring(PRODUCER_CONF_PREFIX.length() + 1), confVal);
|
||||
}
|
||||
// Get consumer specific configuration settings, removing "consumer." prefix
|
||||
else if (StringUtils.startsWith(confKey, CONSUMER_CONF_PREFIX)) {
|
||||
consumerConfMap.put(confKey.substring(CONSUMER_CONF_PREFIX.length() + 1), confVal);
|
||||
}
|
||||
}
|
||||
// Get client connection specific configuration settings, removing "topic." prefix
|
||||
if (!StringUtils.isBlank(confVal))
|
||||
if (StringUtils.startsWith(confKey, KafkaClientConf.TOPIC_CONF_PREFIX))
|
||||
this.topicConfMap.put(confKey.substring(KafkaClientConf.TOPIC_CONF_PREFIX.length() + 1), confVal);
|
||||
// Get producer specific configuration settings, removing "producer." prefix
|
||||
else if (StringUtils.startsWith(confKey, KafkaClientConf.PRODUCER_CONF_PREFIX))
|
||||
this.producerConfMap.put(confKey.substring(KafkaClientConf.PRODUCER_CONF_PREFIX.length() + 1), confVal);
|
||||
// Get consumer specific configuration settings, removing "consumer." prefix
|
||||
else if (StringUtils.startsWith(confKey, KafkaClientConf.CONSUMER_CONF_PREFIX))
|
||||
this.consumerConfMap.put(confKey.substring(KafkaClientConf.CONSUMER_CONF_PREFIX.length() + 1), confVal);
|
||||
}
|
||||
} catch (IOException ioe) {
|
||||
logger.error("Can't read the specified config properties file: " + fileName);
|
||||
} catch (final IOException ioe) {
|
||||
KafkaClientConf.logger.error("Can't read the specified config properties file: {}", fileName);
|
||||
ioe.printStackTrace();
|
||||
} catch (ConfigurationException cex) {
|
||||
logger.error("Error loading configuration items from the specified config properties file: " + fileName + ":" + cex.getMessage());
|
||||
} catch (final ConfigurationException cex) {
|
||||
KafkaClientConf.logger.error("Error loading configuration items from the specified config properties file: {}:{}", fileName, cex.getMessage());
|
||||
cex.printStackTrace();
|
||||
}
|
||||
}
|
||||
|
||||
public Map<String, String> getTopicConfMap() { return topicConfMap; }
|
||||
public Map<String, String> getProducerConfMap() { return producerConfMap; }
|
||||
public Map<String, String> getConsumerConfMap() { return consumerConfMap; }
|
||||
public Map<String, String> getTopicConfMap() { return this.topicConfMap; }
|
||||
public Map<String, String> getProducerConfMap() { return this.producerConfMap; }
|
||||
public Map<String, String> getConsumerConfMap() { return this.consumerConfMap; }
|
||||
|
||||
public String toString() {
|
||||
return new ToStringBuilder(this).
|
||||
append("topicConfMap", topicConfMap).
|
||||
append("producerConfMap", producerConfMap).
|
||||
append("consumerConfMap", consumerConfMap).
|
||||
append("topicConfMap", this.topicConfMap).
|
||||
append("producerConfMap", this.producerConfMap).
|
||||
append("consumerConfMap", this.consumerConfMap).
|
||||
toString();
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,4 +1,20 @@
|
||||
#!/usr/local/bin/bash
|
||||
#
|
||||
# Copyright (c) 2023 nosqlbench
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
#
|
||||
|
||||
: "${SKIP_TESTS:=1}"
|
||||
(
|
||||
cd "$(git rev-parse --show-toplevel)" && \
|
||||
|
||||
@@ -1,3 +1,19 @@
|
||||
#
|
||||
# Copyright (c) 2023 nosqlbench
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
#
|
||||
|
||||
#####
|
||||
# Topic related configurations (global) - topic.***
|
||||
# - Valid settings: https://kafka.apache.org/documentation/#topicconfigs
|
||||
|
||||
@@ -1,4 +1,20 @@
|
||||
#!/usr/local/bin/bash
|
||||
#
|
||||
# Copyright (c) 2023 nosqlbench
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
#
|
||||
|
||||
: "${REBUILD:=1}"
|
||||
: "${CYCLES:=1000000000}"
|
||||
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" &>/dev/null && pwd)"
|
||||
|
||||
@@ -1,4 +1,20 @@
|
||||
#!/usr/local/bin/bash
|
||||
#
|
||||
# Copyright (c) 2023 nosqlbench
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
#
|
||||
|
||||
: "${REBUILD:=1}"
|
||||
: "${CYCLES:=1000000000}"
|
||||
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" &>/dev/null && pwd)"
|
||||
|
||||
@@ -34,7 +34,7 @@
|
||||
</description>
|
||||
|
||||
<properties>
|
||||
<pulsar.version>2.11.0</pulsar.version>
|
||||
<pulsar.version>2.11.1</pulsar.version>
|
||||
</properties>
|
||||
|
||||
<dependencies>
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright (c) 2022 nosqlbench
|
||||
* Copyright (c) 2022-2023 nosqlbench
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -20,6 +20,9 @@ import io.nosqlbench.adapter.pulsar.PulsarSpace;
|
||||
import io.nosqlbench.adapter.pulsar.ops.MessageConsumerOp;
|
||||
import io.nosqlbench.adapter.pulsar.util.EndToEndStartingTimeSource;
|
||||
import io.nosqlbench.adapter.pulsar.util.PulsarAdapterUtil;
|
||||
import io.nosqlbench.adapter.pulsar.util.PulsarAdapterUtil.CONSUMER_CONF_CUSTOM_KEY;
|
||||
import io.nosqlbench.adapter.pulsar.util.PulsarAdapterUtil.CONSUMER_CONF_STD_KEY;
|
||||
import io.nosqlbench.adapter.pulsar.util.PulsarAdapterUtil.DOC_LEVEL_PARAMS;
|
||||
import io.nosqlbench.engine.api.metrics.ReceivedMessageSequenceTracker;
|
||||
import io.nosqlbench.engine.api.activityimpl.uniform.DriverAdapter;
|
||||
import io.nosqlbench.engine.api.templating.ParsedOp;
|
||||
@@ -33,7 +36,7 @@ import java.util.function.LongFunction;
|
||||
|
||||
public class MessageConsumerOpDispenser extends PulsarClientOpDispenser {
|
||||
|
||||
private final static Logger logger = LogManager.getLogger("MessageConsumerOpDispenser");
|
||||
private static final Logger logger = LogManager.getLogger("MessageConsumerOpDispenser");
|
||||
|
||||
private final LongFunction<String> topicPatternFunc;
|
||||
private final LongFunction<String> subscriptionNameFunc;
|
||||
@@ -46,59 +49,59 @@ public class MessageConsumerOpDispenser extends PulsarClientOpDispenser {
|
||||
private final ThreadLocal<Map<String, ReceivedMessageSequenceTracker>>
|
||||
receivedMessageSequenceTrackersForTopicThreadLocal = ThreadLocal.withInitial(HashMap::new);
|
||||
|
||||
public MessageConsumerOpDispenser(DriverAdapter adapter,
|
||||
ParsedOp op,
|
||||
LongFunction<String> tgtNameFunc,
|
||||
PulsarSpace pulsarSpace) {
|
||||
public MessageConsumerOpDispenser(final DriverAdapter adapter,
|
||||
final ParsedOp op,
|
||||
final LongFunction<String> tgtNameFunc,
|
||||
final PulsarSpace pulsarSpace) {
|
||||
super(adapter, op, tgtNameFunc, pulsarSpace);
|
||||
|
||||
this.topicPatternFunc =
|
||||
lookupOptionalStrOpValueFunc(PulsarAdapterUtil.CONSUMER_CONF_STD_KEY.topicsPattern.label);
|
||||
this.subscriptionNameFunc =
|
||||
lookupMandtoryStrOpValueFunc(PulsarAdapterUtil.CONSUMER_CONF_STD_KEY.subscriptionName.label);
|
||||
this.subscriptionTypeFunc =
|
||||
lookupOptionalStrOpValueFunc(PulsarAdapterUtil.CONSUMER_CONF_STD_KEY.subscriptionType.label);
|
||||
this.cycleConsumerNameFunc =
|
||||
lookupOptionalStrOpValueFunc(PulsarAdapterUtil.CONSUMER_CONF_STD_KEY.consumerName.label);
|
||||
this.rangesFunc =
|
||||
lookupOptionalStrOpValueFunc(PulsarAdapterUtil.CONSUMER_CONF_CUSTOM_KEY.ranges.label);
|
||||
this.e2eStartTimeSrcParamStrFunc = lookupOptionalStrOpValueFunc(
|
||||
PulsarAdapterUtil.DOC_LEVEL_PARAMS.E2E_STARTING_TIME_SOURCE.label, "none");
|
||||
this.consumerFunction = (l) -> getConsumer(
|
||||
topicPatternFunc =
|
||||
this.lookupOptionalStrOpValueFunc(CONSUMER_CONF_STD_KEY.topicsPattern.label);
|
||||
subscriptionNameFunc =
|
||||
this.lookupMandtoryStrOpValueFunc(CONSUMER_CONF_STD_KEY.subscriptionName.label);
|
||||
subscriptionTypeFunc =
|
||||
this.lookupOptionalStrOpValueFunc(CONSUMER_CONF_STD_KEY.subscriptionType.label);
|
||||
cycleConsumerNameFunc =
|
||||
this.lookupOptionalStrOpValueFunc(CONSUMER_CONF_STD_KEY.consumerName.label);
|
||||
rangesFunc =
|
||||
this.lookupOptionalStrOpValueFunc(CONSUMER_CONF_CUSTOM_KEY.ranges.label);
|
||||
e2eStartTimeSrcParamStrFunc = this.lookupOptionalStrOpValueFunc(
|
||||
DOC_LEVEL_PARAMS.E2E_STARTING_TIME_SOURCE.label, "none");
|
||||
consumerFunction = l -> this.getConsumer(
|
||||
tgtNameFunc.apply(l),
|
||||
topicPatternFunc.apply(l),
|
||||
subscriptionNameFunc.apply(l),
|
||||
subscriptionTypeFunc.apply(l),
|
||||
cycleConsumerNameFunc.apply(l),
|
||||
rangesFunc.apply(l));
|
||||
this.topicPatternFunc.apply(l),
|
||||
this.subscriptionNameFunc.apply(l),
|
||||
this.subscriptionTypeFunc.apply(l),
|
||||
this.cycleConsumerNameFunc.apply(l),
|
||||
this.rangesFunc.apply(l));
|
||||
}
|
||||
|
||||
@Override
|
||||
public MessageConsumerOp apply(long cycle) {
|
||||
public MessageConsumerOp apply(final long cycle) {
|
||||
return new MessageConsumerOp(
|
||||
pulsarAdapterMetrics,
|
||||
pulsarClient,
|
||||
pulsarSchema,
|
||||
asyncApiFunc.apply(cycle),
|
||||
useTransactFunc.apply(cycle),
|
||||
seqTrackingFunc.apply(cycle),
|
||||
transactSupplierFunc.apply(cycle),
|
||||
payloadRttFieldFunc.apply(cycle),
|
||||
EndToEndStartingTimeSource.valueOf(e2eStartTimeSrcParamStrFunc.apply(cycle).toUpperCase()),
|
||||
this.pulsarAdapterMetrics,
|
||||
this.pulsarClient,
|
||||
this.pulsarSchema,
|
||||
this.asyncApiFunc.apply(cycle),
|
||||
this.useTransactFunc.apply(cycle),
|
||||
this.seqTrackingFunc.apply(cycle),
|
||||
this.transactSupplierFunc.apply(cycle),
|
||||
this.payloadRttFieldFunc.apply(cycle),
|
||||
EndToEndStartingTimeSource.valueOf(this.e2eStartTimeSrcParamStrFunc.apply(cycle).toUpperCase()),
|
||||
this::getReceivedMessageSequenceTracker,
|
||||
consumerFunction.apply(cycle),
|
||||
pulsarSpace.getPulsarNBClientConf().getConsumerTimeoutSeconds()
|
||||
this.consumerFunction.apply(cycle),
|
||||
this.pulsarSpace.getPulsarNBClientConf().getConsumerTimeoutSeconds()
|
||||
);
|
||||
}
|
||||
|
||||
private ReceivedMessageSequenceTracker getReceivedMessageSequenceTracker(String topicName) {
|
||||
return receivedMessageSequenceTrackersForTopicThreadLocal.get()
|
||||
.computeIfAbsent(topicName, k -> createReceivedMessageSequenceTracker());
|
||||
private ReceivedMessageSequenceTracker getReceivedMessageSequenceTracker(final String topicName) {
|
||||
return this.receivedMessageSequenceTrackersForTopicThreadLocal.get()
|
||||
.computeIfAbsent(topicName, k -> this.createReceivedMessageSequenceTracker());
|
||||
}
|
||||
|
||||
private ReceivedMessageSequenceTracker createReceivedMessageSequenceTracker() {
|
||||
return new ReceivedMessageSequenceTracker(pulsarAdapterMetrics.getMsgErrOutOfSeqCounter(),
|
||||
pulsarAdapterMetrics.getMsgErrDuplicateCounter(),
|
||||
pulsarAdapterMetrics.getMsgErrLossCounter());
|
||||
return new ReceivedMessageSequenceTracker(this.pulsarAdapterMetrics.getMsgErrOutOfSeqCounter(),
|
||||
this.pulsarAdapterMetrics.getMsgErrDuplicateCounter(),
|
||||
this.pulsarAdapterMetrics.getMsgErrLossCounter());
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,7 +1,5 @@
|
||||
package io.nosqlbench.adapter.pulsar.dispensers;
|
||||
|
||||
/*
|
||||
* Copyright (c) 2022 nosqlbench
|
||||
* Copyright (c) 2023 nosqlbench
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -16,13 +14,27 @@ package io.nosqlbench.adapter.pulsar.dispensers;
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package io.nosqlbench.adapter.pulsar.dispensers;
|
||||
|
||||
import io.nosqlbench.adapter.pulsar.PulsarSpace;
|
||||
import io.nosqlbench.adapter.pulsar.PulsarSpace.ConsumerCacheKey;
|
||||
import io.nosqlbench.adapter.pulsar.PulsarSpace.ProducerCacheKey;
|
||||
import io.nosqlbench.adapter.pulsar.PulsarSpace.ReaderCacheKey;
|
||||
import io.nosqlbench.adapter.pulsar.exception.PulsarAdapterInvalidParamException;
|
||||
import io.nosqlbench.adapter.pulsar.exception.PulsarAdapterUnexpectedException;
|
||||
import io.nosqlbench.adapter.pulsar.ops.PulsarOp;
|
||||
import io.nosqlbench.adapter.pulsar.util.PulsarAdapterMetrics;
|
||||
import io.nosqlbench.adapter.pulsar.util.PulsarAdapterUtil;
|
||||
import io.nosqlbench.api.config.NBNamedElement;
|
||||
import io.nosqlbench.adapter.pulsar.util.PulsarAdapterUtil.CONF_GATEGORY;
|
||||
import io.nosqlbench.adapter.pulsar.util.PulsarAdapterUtil.CONSUMER_CONF_STD_KEY;
|
||||
import io.nosqlbench.adapter.pulsar.util.PulsarAdapterUtil.DOC_LEVEL_PARAMS;
|
||||
import io.nosqlbench.adapter.pulsar.util.PulsarAdapterUtil.PRODUCER_CONF_STD_KEY;
|
||||
import io.nosqlbench.adapter.pulsar.util.PulsarAdapterUtil.PULSAR_API_TYPE;
|
||||
import io.nosqlbench.adapter.pulsar.util.PulsarAdapterUtil.READER_CONF_CUSTOM_KEY;
|
||||
import io.nosqlbench.adapter.pulsar.util.PulsarAdapterUtil.READER_CONF_STD_KEY;
|
||||
import io.nosqlbench.adapter.pulsar.util.PulsarAdapterUtil.READER_MSG_POSITION_TYPE;
|
||||
import io.nosqlbench.api.config.NBLabeledElement;
|
||||
import io.nosqlbench.api.config.NBLabels;
|
||||
import io.nosqlbench.engine.api.activityimpl.BaseOpDispenser;
|
||||
import io.nosqlbench.engine.api.activityimpl.uniform.DriverAdapter;
|
||||
import io.nosqlbench.engine.api.templating.ParsedOp;
|
||||
@@ -39,9 +51,9 @@ import java.util.function.Predicate;
|
||||
import java.util.regex.Pattern;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
public abstract class PulsarBaseOpDispenser extends BaseOpDispenser<PulsarOp, PulsarSpace> implements NBNamedElement {
|
||||
public abstract class PulsarBaseOpDispenser extends BaseOpDispenser<PulsarOp, PulsarSpace> {
|
||||
|
||||
private final static Logger logger = LogManager.getLogger("PulsarBaseOpDispenser");
|
||||
private static final Logger logger = LogManager.getLogger("PulsarBaseOpDispenser");
|
||||
|
||||
protected final ParsedOp parsedOp;
|
||||
protected final PulsarSpace pulsarSpace;
|
||||
@@ -53,98 +65,102 @@ public abstract class PulsarBaseOpDispenser extends BaseOpDispenser<PulsarOp, P
|
||||
|
||||
protected final long totalCycleNum;
|
||||
|
||||
public PulsarBaseOpDispenser(DriverAdapter adapter,
|
||||
ParsedOp op,
|
||||
LongFunction<String> tgtNameFunc,
|
||||
PulsarSpace pulsarSpace) {
|
||||
protected PulsarBaseOpDispenser(final DriverAdapter adapter,
|
||||
final ParsedOp op,
|
||||
final LongFunction<String> tgtNameFunc,
|
||||
final PulsarSpace pulsarSpace) {
|
||||
|
||||
super(adapter, op);
|
||||
|
||||
this.parsedOp = op;
|
||||
parsedOp = op;
|
||||
this.tgtNameFunc = tgtNameFunc;
|
||||
this.pulsarSpace = pulsarSpace;
|
||||
|
||||
// Doc-level parameter: async_api
|
||||
this.asyncApiFunc = lookupStaticBoolConfigValueFunc(
|
||||
PulsarAdapterUtil.DOC_LEVEL_PARAMS.ASYNC_API.label, true);
|
||||
asyncApiFunc = this.lookupStaticBoolConfigValueFunc(
|
||||
DOC_LEVEL_PARAMS.ASYNC_API.label, true);
|
||||
|
||||
String defaultMetricsPrefix = getDefaultMetricsPrefix(this.parsedOp);
|
||||
this.pulsarAdapterMetrics = new PulsarAdapterMetrics(this, defaultMetricsPrefix);
|
||||
pulsarAdapterMetrics.initPulsarAdapterInstrumentation();
|
||||
pulsarAdapterMetrics = new PulsarAdapterMetrics(this);
|
||||
this.pulsarAdapterMetrics.initPulsarAdapterInstrumentation();
|
||||
|
||||
totalThreadNum = NumberUtils.toInt(parsedOp.getStaticValue("threads"));
|
||||
totalCycleNum = NumberUtils.toLong(parsedOp.getStaticValue("cycles"));
|
||||
this.totalThreadNum = NumberUtils.toInt(this.parsedOp.getStaticValue("threads"));
|
||||
this.totalCycleNum = NumberUtils.toLong(this.parsedOp.getStaticValue("cycles"));
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getName() {
|
||||
return "PulsarBaseOpDispenser";
|
||||
}
|
||||
|
||||
public PulsarSpace getPulsarSpace() { return pulsarSpace; }
|
||||
|
||||
protected LongFunction<Boolean> lookupStaticBoolConfigValueFunc(String paramName, boolean defaultValue) {
|
||||
LongFunction<Boolean> booleanLongFunction;
|
||||
booleanLongFunction = (l) -> parsedOp.getOptionalStaticConfig(paramName, String.class)
|
||||
@Override
|
||||
public NBLabels getLabels() {
|
||||
return NBLabels.forKV("name", this.getName());
|
||||
}
|
||||
|
||||
public PulsarSpace getPulsarSpace() { return this.pulsarSpace; }
|
||||
|
||||
protected LongFunction<Boolean> lookupStaticBoolConfigValueFunc(final String paramName, final boolean defaultValue) {
|
||||
final LongFunction<Boolean> booleanLongFunction;
|
||||
booleanLongFunction = l -> this.parsedOp.getOptionalStaticConfig(paramName, String.class)
|
||||
.filter(Predicate.not(String::isEmpty))
|
||||
.map(value -> BooleanUtils.toBoolean(value))
|
||||
.orElse(defaultValue);
|
||||
logger.info("{}: {}", paramName, booleanLongFunction.apply(0));
|
||||
PulsarBaseOpDispenser.logger.info("{}: {}", paramName, booleanLongFunction.apply(0));
|
||||
return booleanLongFunction;
|
||||
}
|
||||
|
||||
protected LongFunction<Set<String>> lookupStaticStrSetOpValueFunc(String paramName) {
|
||||
LongFunction<Set<String>> setStringLongFunction;
|
||||
setStringLongFunction = (l) -> parsedOp.getOptionalStaticValue(paramName, String.class)
|
||||
protected LongFunction<Set<String>> lookupStaticStrSetOpValueFunc(final String paramName) {
|
||||
final LongFunction<Set<String>> setStringLongFunction;
|
||||
setStringLongFunction = l -> this.parsedOp.getOptionalStaticValue(paramName, String.class)
|
||||
.filter(Predicate.not(String::isEmpty))
|
||||
.map(value -> {
|
||||
Set<String > set = new HashSet<>();
|
||||
|
||||
if (StringUtils.contains(value,',')) {
|
||||
set = Arrays.stream(value.split(","))
|
||||
.map(String::trim)
|
||||
.filter(Predicate.not(String::isEmpty))
|
||||
.collect(Collectors.toCollection(LinkedHashSet::new));
|
||||
}
|
||||
if (StringUtils.contains(value,',')) set = Arrays.stream(value.split(","))
|
||||
.map(String::trim)
|
||||
.filter(Predicate.not(String::isEmpty))
|
||||
.collect(Collectors.toCollection(LinkedHashSet::new));
|
||||
|
||||
return set;
|
||||
}).orElse(Collections.emptySet());
|
||||
logger.info("{}: {}", paramName, setStringLongFunction.apply(0));
|
||||
PulsarBaseOpDispenser.logger.info("{}: {}", paramName, setStringLongFunction.apply(0));
|
||||
return setStringLongFunction;
|
||||
}
|
||||
|
||||
// If the corresponding Op parameter is not provided, use the specified default value
|
||||
protected LongFunction<Integer> lookupStaticIntOpValueFunc(String paramName, int defaultValue) {
|
||||
LongFunction<Integer> integerLongFunction;
|
||||
integerLongFunction = (l) -> parsedOp.getOptionalStaticValue(paramName, String.class)
|
||||
protected LongFunction<Integer> lookupStaticIntOpValueFunc(final String paramName, final int defaultValue) {
|
||||
final LongFunction<Integer> integerLongFunction;
|
||||
integerLongFunction = l -> this.parsedOp.getOptionalStaticValue(paramName, String.class)
|
||||
.filter(Predicate.not(String::isEmpty))
|
||||
.map(value -> NumberUtils.toInt(value))
|
||||
.map(value -> {
|
||||
if (value < 0) return 0;
|
||||
else return value;
|
||||
if (0 > value) {
|
||||
return 0;
|
||||
}
|
||||
return value;
|
||||
}).orElse(defaultValue);
|
||||
logger.info("{}: {}", paramName, integerLongFunction.apply(0));
|
||||
PulsarBaseOpDispenser.logger.info("{}: {}", paramName, integerLongFunction.apply(0));
|
||||
return integerLongFunction;
|
||||
}
|
||||
|
||||
// If the corresponding Op parameter is not provided, use the specified default value
|
||||
protected LongFunction<String> lookupOptionalStrOpValueFunc(String paramName, String defaultValue) {
|
||||
LongFunction<String> stringLongFunction;
|
||||
stringLongFunction = parsedOp.getAsOptionalFunction(paramName, String.class)
|
||||
.orElse((l) -> defaultValue);
|
||||
logger.info("{}: {}", paramName, stringLongFunction.apply(0));
|
||||
protected LongFunction<String> lookupOptionalStrOpValueFunc(final String paramName, final String defaultValue) {
|
||||
final LongFunction<String> stringLongFunction;
|
||||
stringLongFunction = this.parsedOp.getAsOptionalFunction(paramName, String.class)
|
||||
.orElse(l -> defaultValue);
|
||||
PulsarBaseOpDispenser.logger.info("{}: {}", paramName, stringLongFunction.apply(0));
|
||||
|
||||
return stringLongFunction;
|
||||
}
|
||||
protected LongFunction<String> lookupOptionalStrOpValueFunc(String paramName) {
|
||||
return lookupOptionalStrOpValueFunc(paramName, "");
|
||||
protected LongFunction<String> lookupOptionalStrOpValueFunc(final String paramName) {
|
||||
return this.lookupOptionalStrOpValueFunc(paramName, "");
|
||||
}
|
||||
|
||||
// Mandatory Op parameter. Throw an error if not specified or having empty value
|
||||
protected LongFunction<String> lookupMandtoryStrOpValueFunc(String paramName) {
|
||||
LongFunction<String> stringLongFunction;
|
||||
stringLongFunction = parsedOp.getAsRequiredFunction(paramName, String.class);
|
||||
logger.info("{}: {}", paramName, stringLongFunction.apply(0));
|
||||
protected LongFunction<String> lookupMandtoryStrOpValueFunc(final String paramName) {
|
||||
final LongFunction<String> stringLongFunction;
|
||||
stringLongFunction = this.parsedOp.getAsRequiredFunction(paramName, String.class);
|
||||
PulsarBaseOpDispenser.logger.info("{}: {}", paramName, stringLongFunction.apply(0));
|
||||
|
||||
return stringLongFunction;
|
||||
}
|
||||
@@ -157,28 +173,28 @@ public abstract class PulsarBaseOpDispenser extends BaseOpDispenser<PulsarOp, P
|
||||
* @param topicName - topic name
|
||||
* @return String
|
||||
*/
|
||||
private String getPulsarAPIMetricsPrefix(String apiType, String apiObjName, String topicName) {
|
||||
private String getPulsarAPIMetricsPrefix(final String apiType, final String apiObjName, final String topicName) {
|
||||
String apiMetricsPrefix = "";
|
||||
|
||||
if (PulsarAdapterUtil.isValidPulsarApiType(apiType)) {
|
||||
if (!StringUtils.isBlank(apiObjName)) {
|
||||
apiMetricsPrefix = apiObjName + "_";
|
||||
} else {
|
||||
if (!StringUtils.isBlank(apiObjName)) apiMetricsPrefix = apiObjName + '_';
|
||||
else {
|
||||
// we want a meaningful name for the API object (producer, consumer, reader, etc.)
|
||||
// we are not appending the topic name
|
||||
apiMetricsPrefix = apiType;
|
||||
|
||||
if (apiType.equalsIgnoreCase(PulsarAdapterUtil.PULSAR_API_TYPE.PRODUCER.label))
|
||||
apiMetricsPrefix += pulsarSpace.getProducerSetCnt();
|
||||
else if (apiType.equalsIgnoreCase(PulsarAdapterUtil.PULSAR_API_TYPE.CONSUMER.label))
|
||||
apiMetricsPrefix += pulsarSpace.getConsumerSetCnt();
|
||||
else if (apiType.equalsIgnoreCase(PulsarAdapterUtil.PULSAR_API_TYPE.READER.label))
|
||||
apiMetricsPrefix += pulsarSpace.getReaderSetCnt();
|
||||
if (apiType.equalsIgnoreCase(PULSAR_API_TYPE.PRODUCER.label)) {
|
||||
apiMetricsPrefix += this.pulsarSpace.getProducerSetCnt();
|
||||
} else if (apiType.equalsIgnoreCase(PULSAR_API_TYPE.CONSUMER.label)) {
|
||||
apiMetricsPrefix += this.pulsarSpace.getConsumerSetCnt();
|
||||
} else if (apiType.equalsIgnoreCase(PULSAR_API_TYPE.READER.label)) {
|
||||
apiMetricsPrefix += this.pulsarSpace.getReaderSetCnt();
|
||||
}
|
||||
|
||||
apiMetricsPrefix += "_";
|
||||
}
|
||||
|
||||
apiMetricsPrefix += topicName + "_";
|
||||
apiMetricsPrefix += topicName + '_';
|
||||
apiMetricsPrefix = apiMetricsPrefix
|
||||
// default name for tests/demos (in all Pulsar examples) is persistent://public/default/test -> use just the topic name test
|
||||
.replace("persistent://public/default/", "")
|
||||
@@ -198,327 +214,303 @@ public abstract class PulsarBaseOpDispenser extends BaseOpDispenser<PulsarOp, P
|
||||
// A configuration parameter can be set either at the global level (config.properties file),
|
||||
// or at the cycle level (<nb_scenario>.yaml file).
|
||||
// If set at both levels, cycle level setting takes precedence
|
||||
private String getEffectiveConValue(String confCategory, String confParamName, String cycleConfValue) {
|
||||
if (!StringUtils.isBlank(cycleConfValue)) {
|
||||
return cycleConfValue;
|
||||
}
|
||||
private String getEffectiveConValue(final String confCategory, final String confParamName, final String cycleConfValue) {
|
||||
if (!StringUtils.isBlank(cycleConfValue)) return cycleConfValue;
|
||||
|
||||
if (PulsarAdapterUtil.isValidConfCategory(confCategory)) {
|
||||
Map<String, String> catConfMap = new HashMap<>();
|
||||
|
||||
if (StringUtils.equalsIgnoreCase(confCategory, PulsarAdapterUtil.CONF_GATEGORY.Schema.label))
|
||||
catConfMap = pulsarSpace.getPulsarNBClientConf().getSchemaConfMapRaw();
|
||||
else if (StringUtils.equalsIgnoreCase(confCategory, PulsarAdapterUtil.CONF_GATEGORY.Client.label))
|
||||
catConfMap = pulsarSpace.getPulsarNBClientConf().getClientConfMapRaw();
|
||||
else if (StringUtils.equalsIgnoreCase(confCategory, PulsarAdapterUtil.CONF_GATEGORY.Producer.label))
|
||||
catConfMap = pulsarSpace.getPulsarNBClientConf().getProducerConfMapRaw();
|
||||
else if (StringUtils.equalsIgnoreCase(confCategory, PulsarAdapterUtil.CONF_GATEGORY.Consumer.label))
|
||||
catConfMap = pulsarSpace.getPulsarNBClientConf().getConsumerConfMapRaw();
|
||||
else if (StringUtils.equalsIgnoreCase(confCategory, PulsarAdapterUtil.CONF_GATEGORY.Reader.label))
|
||||
catConfMap = pulsarSpace.getPulsarNBClientConf().getReaderConfMapRaw();
|
||||
|
||||
String globalConfValue = catConfMap.get(confParamName);
|
||||
if (!StringUtils.isBlank(globalConfValue)) {
|
||||
return globalConfValue;
|
||||
if (StringUtils.equalsIgnoreCase(confCategory, CONF_GATEGORY.Schema.label)) {
|
||||
catConfMap = this.pulsarSpace.getPulsarNBClientConf().getSchemaConfMapRaw();
|
||||
} else if (StringUtils.equalsIgnoreCase(confCategory, CONF_GATEGORY.Client.label)) {
|
||||
catConfMap = this.pulsarSpace.getPulsarNBClientConf().getClientConfMapRaw();
|
||||
} else if (StringUtils.equalsIgnoreCase(confCategory, CONF_GATEGORY.Producer.label)) {
|
||||
catConfMap = this.pulsarSpace.getPulsarNBClientConf().getProducerConfMapRaw();
|
||||
} else if (StringUtils.equalsIgnoreCase(confCategory, CONF_GATEGORY.Consumer.label)) {
|
||||
catConfMap = this.pulsarSpace.getPulsarNBClientConf().getConsumerConfMapRaw();
|
||||
} else if (StringUtils.equalsIgnoreCase(confCategory, CONF_GATEGORY.Reader.label)) {
|
||||
catConfMap = this.pulsarSpace.getPulsarNBClientConf().getReaderConfMapRaw();
|
||||
}
|
||||
|
||||
final String globalConfValue = catConfMap.get(confParamName);
|
||||
if (!StringUtils.isBlank(globalConfValue)) return globalConfValue;
|
||||
}
|
||||
|
||||
return "";
|
||||
}
|
||||
|
||||
|
||||
public Producer<?> getProducer(String cycleTopicName, String cycleProducerName) {
|
||||
String topicName = getEffectiveConValue(
|
||||
PulsarAdapterUtil.CONF_GATEGORY.Producer.label,
|
||||
PulsarAdapterUtil.PRODUCER_CONF_STD_KEY.topicName.label,
|
||||
public Producer<?> getProducer(final String cycleTopicName, final String cycleProducerName) {
|
||||
final String topicName = this.getEffectiveConValue(
|
||||
CONF_GATEGORY.Producer.label,
|
||||
PRODUCER_CONF_STD_KEY.topicName.label,
|
||||
cycleTopicName);
|
||||
|
||||
String producerName = getEffectiveConValue(
|
||||
PulsarAdapterUtil.CONF_GATEGORY.Producer.label,
|
||||
PulsarAdapterUtil.PRODUCER_CONF_STD_KEY.producerName.label,
|
||||
final String producerName = this.getEffectiveConValue(
|
||||
CONF_GATEGORY.Producer.label,
|
||||
PRODUCER_CONF_STD_KEY.producerName.label,
|
||||
cycleProducerName);
|
||||
|
||||
PulsarSpace.ProducerCacheKey producerCacheKey = new PulsarSpace.ProducerCacheKey(producerName, topicName);
|
||||
return pulsarSpace.getProducer(producerCacheKey, () -> {
|
||||
PulsarClient pulsarClient = pulsarSpace.getPulsarClient();
|
||||
final ProducerCacheKey producerCacheKey = new ProducerCacheKey(producerName, topicName);
|
||||
return this.pulsarSpace.getProducer(producerCacheKey, () -> {
|
||||
final PulsarClient pulsarClient = this.pulsarSpace.getPulsarClient();
|
||||
|
||||
// Get other possible producer settings that are set at global level
|
||||
Map<String, Object> producerConf = pulsarSpace.getPulsarNBClientConf().getProducerConfMapTgt();
|
||||
final Map<String, Object> producerConf = this.pulsarSpace.getPulsarNBClientConf().getProducerConfMapTgt();
|
||||
|
||||
// Remove global level settings
|
||||
producerConf.remove(PulsarAdapterUtil.PRODUCER_CONF_STD_KEY.topicName.label);
|
||||
producerConf.remove(PulsarAdapterUtil.PRODUCER_CONF_STD_KEY.producerName.label);
|
||||
producerConf.remove(PRODUCER_CONF_STD_KEY.topicName.label);
|
||||
producerConf.remove(PRODUCER_CONF_STD_KEY.producerName.label);
|
||||
|
||||
try {
|
||||
ProducerBuilder<?> producerBuilder = pulsarClient.
|
||||
newProducer(pulsarSpace.getPulsarSchema()).
|
||||
newProducer(this.pulsarSpace.getPulsarSchema()).
|
||||
loadConf(producerConf).
|
||||
topic(topicName);
|
||||
|
||||
if (!StringUtils.isAnyBlank(producerName)) {
|
||||
producerBuilder = producerBuilder.producerName(producerName);
|
||||
}
|
||||
if (!StringUtils.isAnyBlank(producerName)) producerBuilder = producerBuilder.producerName(producerName);
|
||||
|
||||
Producer<?> producer = producerBuilder.create();
|
||||
pulsarAdapterMetrics.registerProducerApiMetrics(producer,
|
||||
getPulsarAPIMetricsPrefix(
|
||||
PulsarAdapterUtil.PULSAR_API_TYPE.PRODUCER.label,
|
||||
producerName,
|
||||
topicName));
|
||||
final Producer<?> producer = producerBuilder.create();
|
||||
this.pulsarAdapterMetrics.registerProducerApiMetrics(producer);
|
||||
return producer;
|
||||
} catch (PulsarClientException ple) {
|
||||
} catch (final PulsarClientException ple) {
|
||||
throw new PulsarAdapterUnexpectedException("Failed to create a Pulsar producer.");
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
private List<String> getEffectiveConsumerTopicNameList(String cycleTopicNameListStr) {
|
||||
String effectiveTopicNamesStr = getEffectiveConValue(
|
||||
PulsarAdapterUtil.CONF_GATEGORY.Consumer.label,
|
||||
PulsarAdapterUtil.CONSUMER_CONF_STD_KEY.topicNames.label,
|
||||
private List<String> getEffectiveConsumerTopicNameList(final String cycleTopicNameListStr) {
|
||||
final String effectiveTopicNamesStr = this.getEffectiveConValue(
|
||||
CONF_GATEGORY.Consumer.label,
|
||||
CONSUMER_CONF_STD_KEY.topicNames.label,
|
||||
cycleTopicNameListStr);
|
||||
|
||||
String[] names = effectiveTopicNamesStr.split("[;,]");
|
||||
ArrayList<String> effectiveTopicNameList = new ArrayList<>();
|
||||
final String[] names = effectiveTopicNamesStr.split("[;,]");
|
||||
final ArrayList<String> effectiveTopicNameList = new ArrayList<>();
|
||||
|
||||
for (String name : names) {
|
||||
if (!StringUtils.isBlank(name))
|
||||
for (final String name : names)
|
||||
if (!StringUtils.isBlank(name)) {
|
||||
effectiveTopicNameList.add(name.trim());
|
||||
}
|
||||
}
|
||||
|
||||
return effectiveTopicNameList;
|
||||
}
|
||||
|
||||
private SubscriptionType getEffectiveSubscriptionType(String cycleSubscriptionType) {
|
||||
String subscriptionTypeStr = getEffectiveConValue(
|
||||
PulsarAdapterUtil.CONF_GATEGORY.Consumer.label,
|
||||
PulsarAdapterUtil.CONSUMER_CONF_STD_KEY.subscriptionType.label,
|
||||
private SubscriptionType getEffectiveSubscriptionType(final String cycleSubscriptionType) {
|
||||
final String subscriptionTypeStr = this.getEffectiveConValue(
|
||||
CONF_GATEGORY.Consumer.label,
|
||||
CONSUMER_CONF_STD_KEY.subscriptionType.label,
|
||||
cycleSubscriptionType);
|
||||
|
||||
SubscriptionType subscriptionType = SubscriptionType.Exclusive; // default subscription type
|
||||
if (!StringUtils.isBlank(subscriptionTypeStr)) {
|
||||
try {
|
||||
subscriptionType = SubscriptionType.valueOf(subscriptionTypeStr);
|
||||
}
|
||||
catch (Exception e) {
|
||||
throw new PulsarAdapterInvalidParamException(
|
||||
"Invalid effective subscription type for a consumer (\"" + subscriptionTypeStr + "\"). " +
|
||||
"It must be one of the following values: " + PulsarAdapterUtil.getValidSubscriptionTypeList());
|
||||
}
|
||||
if (!StringUtils.isBlank(subscriptionTypeStr)) try {
|
||||
subscriptionType = SubscriptionType.valueOf(subscriptionTypeStr);
|
||||
} catch (final Exception e) {
|
||||
throw new PulsarAdapterInvalidParamException(
|
||||
"Invalid effective subscription type for a consumer (\"" + subscriptionTypeStr + "\"). " +
|
||||
"It must be one of the following values: " + PulsarAdapterUtil.getValidSubscriptionTypeList());
|
||||
}
|
||||
|
||||
return subscriptionType;
|
||||
}
|
||||
|
||||
public Consumer<?> getConsumer(String cycleTopicNameListStr,
|
||||
String cycleTopicPatternStr,
|
||||
String cycleSubscriptionName,
|
||||
String cycleSubscriptionType,
|
||||
String cycleConsumerName,
|
||||
String cycleKeySharedSubscriptionRanges) {
|
||||
public Consumer<?> getConsumer(final String cycleTopicNameListStr,
|
||||
final String cycleTopicPatternStr,
|
||||
final String cycleSubscriptionName,
|
||||
final String cycleSubscriptionType,
|
||||
final String cycleConsumerName,
|
||||
final String cycleKeySharedSubscriptionRanges) {
|
||||
|
||||
List<String> topicNameList = getEffectiveConsumerTopicNameList(cycleTopicNameListStr);
|
||||
final List<String> topicNameList = this.getEffectiveConsumerTopicNameList(cycleTopicNameListStr);
|
||||
|
||||
String topicPatternStr = StringUtils.trimToNull(getEffectiveConValue(
|
||||
PulsarAdapterUtil.CONF_GATEGORY.Consumer.label,
|
||||
PulsarAdapterUtil.CONSUMER_CONF_STD_KEY.topicsPattern.label,
|
||||
final String topicPatternStr = StringUtils.trimToNull(this.getEffectiveConValue(
|
||||
CONF_GATEGORY.Consumer.label,
|
||||
CONSUMER_CONF_STD_KEY.topicsPattern.label,
|
||||
cycleTopicPatternStr));
|
||||
|
||||
String subscriptionName = getEffectiveConValue(
|
||||
PulsarAdapterUtil.CONF_GATEGORY.Consumer.label,
|
||||
PulsarAdapterUtil.CONSUMER_CONF_STD_KEY.subscriptionName.label,
|
||||
final String subscriptionName = this.getEffectiveConValue(
|
||||
CONF_GATEGORY.Consumer.label,
|
||||
CONSUMER_CONF_STD_KEY.subscriptionName.label,
|
||||
cycleSubscriptionName);
|
||||
|
||||
SubscriptionType subscriptionType = getEffectiveSubscriptionType(cycleSubscriptionType);
|
||||
final SubscriptionType subscriptionType = this.getEffectiveSubscriptionType(cycleSubscriptionType);
|
||||
|
||||
String consumerName = getEffectiveConValue(
|
||||
PulsarAdapterUtil.CONF_GATEGORY.Consumer.label,
|
||||
PulsarAdapterUtil.CONSUMER_CONF_STD_KEY.consumerName.label,
|
||||
final String consumerName = this.getEffectiveConValue(
|
||||
CONF_GATEGORY.Consumer.label,
|
||||
CONSUMER_CONF_STD_KEY.consumerName.label,
|
||||
cycleConsumerName);
|
||||
|
||||
if ( subscriptionType.equals(SubscriptionType.Exclusive) && (totalThreadNum > 1) ) {
|
||||
if (SubscriptionType.Exclusive == subscriptionType && 1 < totalThreadNum)
|
||||
throw new PulsarAdapterInvalidParamException(
|
||||
PulsarAdapterUtil.CONSUMER_CONF_STD_KEY.subscriptionType.label,
|
||||
CONSUMER_CONF_STD_KEY.subscriptionType.label,
|
||||
"creating multiple consumers of \"Exclusive\" subscription type under the same subscription name");
|
||||
}
|
||||
|
||||
if ( (topicNameList.isEmpty() && (topicPatternStr == null)) ||
|
||||
(!topicNameList.isEmpty() && (topicPatternStr != null)) ) {
|
||||
throw new PulsarAdapterInvalidParamException(
|
||||
"Invalid combination of topic name(s) and topic patterns; only specify one parameter!");
|
||||
}
|
||||
if (topicNameList.isEmpty() == (null == topicPatternStr)) throw new PulsarAdapterInvalidParamException(
|
||||
"Invalid combination of topic name(s) and topic patterns; only specify one parameter!");
|
||||
|
||||
return pulsarSpace.getConsumer(
|
||||
new PulsarSpace.ConsumerCacheKey(consumerName, subscriptionName, topicNameList, topicPatternStr), () -> {
|
||||
PulsarClient pulsarClient = pulsarSpace.getPulsarClient();
|
||||
return this.pulsarSpace.getConsumer(
|
||||
new ConsumerCacheKey(consumerName, subscriptionName, topicNameList, topicPatternStr), () -> {
|
||||
final PulsarClient pulsarClient = this.pulsarSpace.getPulsarClient();
|
||||
|
||||
// Get other possible consumer settings that are set at global level
|
||||
Map<String, Object> consumerConf =
|
||||
new HashMap<>(pulsarSpace.getPulsarNBClientConf().getConsumerConfMapTgt());
|
||||
Map<String, Object> consumerConfToLoad = new HashMap<>();
|
||||
final Map<String, Object> consumerConf =
|
||||
new HashMap<>(this.pulsarSpace.getPulsarNBClientConf().getConsumerConfMapTgt());
|
||||
final Map<String, Object> consumerConfToLoad = new HashMap<>();
|
||||
consumerConfToLoad.putAll(consumerConf);
|
||||
|
||||
try {
|
||||
ConsumerBuilder<?> consumerBuilder;
|
||||
final ConsumerBuilder<?> consumerBuilder;
|
||||
|
||||
// Remove settings that will be handled outside "loadConf()"
|
||||
consumerConfToLoad.remove(PulsarAdapterUtil.CONSUMER_CONF_STD_KEY.topicNames.label);
|
||||
consumerConfToLoad.remove(PulsarAdapterUtil.CONSUMER_CONF_STD_KEY.topicsPattern.label);
|
||||
consumerConfToLoad.remove(PulsarAdapterUtil.CONSUMER_CONF_STD_KEY.subscriptionName.label);
|
||||
consumerConfToLoad.remove(PulsarAdapterUtil.CONSUMER_CONF_STD_KEY.subscriptionType.label);
|
||||
consumerConfToLoad.remove(PulsarAdapterUtil.CONSUMER_CONF_STD_KEY.consumerName.label);
|
||||
consumerConfToLoad.remove(CONSUMER_CONF_STD_KEY.topicNames.label);
|
||||
consumerConfToLoad.remove(CONSUMER_CONF_STD_KEY.topicsPattern.label);
|
||||
consumerConfToLoad.remove(CONSUMER_CONF_STD_KEY.subscriptionName.label);
|
||||
consumerConfToLoad.remove(CONSUMER_CONF_STD_KEY.subscriptionType.label);
|
||||
consumerConfToLoad.remove(CONSUMER_CONF_STD_KEY.consumerName.label);
|
||||
|
||||
// TODO: It looks like loadConf() method can't handle the following settings properly.
|
||||
// Do these settings manually for now
|
||||
// - deadLetterPolicy
|
||||
// - negativeAckRedeliveryBackoff
|
||||
// - ackTimeoutRedeliveryBackoff
|
||||
consumerConfToLoad.remove(PulsarAdapterUtil.CONSUMER_CONF_STD_KEY.deadLetterPolicy.label);
|
||||
consumerConfToLoad.remove(PulsarAdapterUtil.CONSUMER_CONF_STD_KEY.negativeAckRedeliveryBackoff.label);
|
||||
consumerConfToLoad.remove(PulsarAdapterUtil.CONSUMER_CONF_STD_KEY.ackTimeoutRedeliveryBackoff.label);
|
||||
consumerConfToLoad.remove(CONSUMER_CONF_STD_KEY.deadLetterPolicy.label);
|
||||
consumerConfToLoad.remove(CONSUMER_CONF_STD_KEY.negativeAckRedeliveryBackoff.label);
|
||||
consumerConfToLoad.remove(CONSUMER_CONF_STD_KEY.ackTimeoutRedeliveryBackoff.label);
|
||||
|
||||
boolean multiTopicConsumer = (topicNameList.size() > 1 || (topicPatternStr != null));
|
||||
final boolean multiTopicConsumer = 1 < topicNameList.size() || null != topicPatternStr;
|
||||
if (!multiTopicConsumer) {
|
||||
assert (topicNameList.size() == 1);
|
||||
consumerBuilder = pulsarClient.newConsumer(pulsarSpace.getPulsarSchema());
|
||||
assert 1 == topicNameList.size();
|
||||
consumerBuilder = pulsarClient.newConsumer(this.pulsarSpace.getPulsarSchema());
|
||||
consumerBuilder.topic(topicNameList.get(0));
|
||||
}
|
||||
else {
|
||||
consumerBuilder = pulsarClient.newConsumer();
|
||||
if (!topicNameList.isEmpty()) {
|
||||
assert (topicNameList.size() > 1);
|
||||
assert 1 < topicNameList.size();
|
||||
consumerBuilder.topics(topicNameList);
|
||||
}
|
||||
else {
|
||||
Pattern topicPattern = Pattern.compile(topicPatternStr);
|
||||
final Pattern topicPattern = Pattern.compile(topicPatternStr);
|
||||
consumerBuilder.topicsPattern(topicPattern);
|
||||
}
|
||||
}
|
||||
|
||||
consumerBuilder.loadConf(consumerConfToLoad);
|
||||
|
||||
if (consumerConf.containsKey(PulsarAdapterUtil.CONSUMER_CONF_STD_KEY.deadLetterPolicy.label)) {
|
||||
if (consumerConf.containsKey(CONSUMER_CONF_STD_KEY.deadLetterPolicy.label))
|
||||
consumerBuilder.deadLetterPolicy((DeadLetterPolicy)
|
||||
consumerConf.get(PulsarAdapterUtil.CONSUMER_CONF_STD_KEY.deadLetterPolicy.label));
|
||||
}
|
||||
if (consumerConf.containsKey(PulsarAdapterUtil.CONSUMER_CONF_STD_KEY.negativeAckRedeliveryBackoff.label)) {
|
||||
consumerConf.get(CONSUMER_CONF_STD_KEY.deadLetterPolicy.label));
|
||||
if (consumerConf.containsKey(CONSUMER_CONF_STD_KEY.negativeAckRedeliveryBackoff.label))
|
||||
consumerBuilder.negativeAckRedeliveryBackoff((RedeliveryBackoff)
|
||||
consumerConf.get(PulsarAdapterUtil.CONSUMER_CONF_STD_KEY.negativeAckRedeliveryBackoff.label));
|
||||
}
|
||||
if (consumerConf.containsKey(PulsarAdapterUtil.CONSUMER_CONF_STD_KEY.ackTimeoutRedeliveryBackoff.label)) {
|
||||
consumerConf.get(CONSUMER_CONF_STD_KEY.negativeAckRedeliveryBackoff.label));
|
||||
if (consumerConf.containsKey(CONSUMER_CONF_STD_KEY.ackTimeoutRedeliveryBackoff.label))
|
||||
consumerBuilder.ackTimeoutRedeliveryBackoff((RedeliveryBackoff)
|
||||
consumerConf.get(PulsarAdapterUtil.CONSUMER_CONF_STD_KEY.ackTimeoutRedeliveryBackoff.label));
|
||||
}
|
||||
consumerConf.get(CONSUMER_CONF_STD_KEY.ackTimeoutRedeliveryBackoff.label));
|
||||
|
||||
consumerBuilder
|
||||
.subscriptionName(subscriptionName)
|
||||
.subscriptionType(subscriptionType);
|
||||
|
||||
if (!StringUtils.isBlank(consumerName))
|
||||
if (!StringUtils.isBlank(consumerName)) {
|
||||
consumerBuilder.consumerName(consumerName);
|
||||
}
|
||||
|
||||
if (subscriptionType == SubscriptionType.Key_Shared) {
|
||||
if (SubscriptionType.Key_Shared == subscriptionType) {
|
||||
KeySharedPolicy keySharedPolicy = KeySharedPolicy.autoSplitHashRange();
|
||||
if (cycleKeySharedSubscriptionRanges != null && !cycleKeySharedSubscriptionRanges.isEmpty()) {
|
||||
Range[] ranges = parseRanges(cycleKeySharedSubscriptionRanges);
|
||||
logger.info("Configuring KeySharedPolicy#stickyHashRange with ranges {}", ranges);
|
||||
if ((null != cycleKeySharedSubscriptionRanges) && !cycleKeySharedSubscriptionRanges.isEmpty()) {
|
||||
final Range[] ranges = PulsarBaseOpDispenser.parseRanges(cycleKeySharedSubscriptionRanges);
|
||||
PulsarBaseOpDispenser.logger.info("Configuring KeySharedPolicy#stickyHashRange with ranges {}", ranges);
|
||||
keySharedPolicy = KeySharedPolicy.stickyHashRange().ranges(ranges);
|
||||
}
|
||||
consumerBuilder.keySharedPolicy(keySharedPolicy);
|
||||
}
|
||||
|
||||
Consumer<?> consumer = consumerBuilder.subscribe();
|
||||
final Consumer<?> consumer = consumerBuilder.subscribe();
|
||||
|
||||
String consumerTopicListString = (!topicNameList.isEmpty()) ? String.join("|", topicNameList) : topicPatternStr;
|
||||
pulsarAdapterMetrics.registerConsumerApiMetrics(
|
||||
final String consumerTopicListString = !topicNameList.isEmpty() ? String.join("|", topicNameList) : topicPatternStr;
|
||||
this.pulsarAdapterMetrics.registerConsumerApiMetrics(
|
||||
consumer,
|
||||
getPulsarAPIMetricsPrefix(
|
||||
PulsarAdapterUtil.PULSAR_API_TYPE.CONSUMER.label,
|
||||
this.getPulsarAPIMetricsPrefix(
|
||||
PULSAR_API_TYPE.CONSUMER.label,
|
||||
consumerName,
|
||||
consumerTopicListString));
|
||||
|
||||
return consumer;
|
||||
}
|
||||
catch (PulsarClientException ple) {
|
||||
catch (final PulsarClientException ple) {
|
||||
throw new PulsarAdapterUnexpectedException("Failed to create a Pulsar consumer!");
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
private static Range[] parseRanges(String ranges) {
|
||||
if (ranges == null || ranges.isEmpty()) {
|
||||
return new Range[0];
|
||||
}
|
||||
String[] split = ranges.split(",");
|
||||
Range[] result = new Range[split.length];
|
||||
private static Range[] parseRanges(final String ranges) {
|
||||
if ((null == ranges) || ranges.isEmpty()) return new Range[0];
|
||||
final String[] split = ranges.split(",");
|
||||
final Range[] result = new Range[split.length];
|
||||
for (int i = 0; i < split.length; i++) {
|
||||
String range = split[i];
|
||||
int pos = range.indexOf("..");
|
||||
if (pos <= 0) {
|
||||
throw new IllegalArgumentException("Invalid range '" + range + "'");
|
||||
}
|
||||
final String range = split[i];
|
||||
final int pos = range.indexOf("..");
|
||||
if (0 >= pos) throw new IllegalArgumentException("Invalid range '" + range + '\'');
|
||||
try {
|
||||
int start = Integer.parseInt(range.substring(0, pos));
|
||||
int end = Integer.parseInt(range.substring(pos + 2));
|
||||
final int start = Integer.parseInt(range.substring(0, pos));
|
||||
final int end = Integer.parseInt(range.substring(pos + 2));
|
||||
result[i] = Range.of(start, end);
|
||||
} catch (NumberFormatException err) {
|
||||
throw new IllegalArgumentException("Invalid range '" + range + "'");
|
||||
} catch (final NumberFormatException err) {
|
||||
throw new IllegalArgumentException("Invalid range '" + range + '\'');
|
||||
}
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
public Reader<?> getReader(String cycleTopicName,
|
||||
String cycleReaderName,
|
||||
String cycleStartMsgPos) {
|
||||
public Reader<?> getReader(final String cycleTopicName,
|
||||
final String cycleReaderName,
|
||||
final String cycleStartMsgPos) {
|
||||
|
||||
String topicName = getEffectiveConValue(
|
||||
PulsarAdapterUtil.CONF_GATEGORY.Reader.label,
|
||||
PulsarAdapterUtil.READER_CONF_STD_KEY.topicName.label,
|
||||
final String topicName = this.getEffectiveConValue(
|
||||
CONF_GATEGORY.Reader.label,
|
||||
READER_CONF_STD_KEY.topicName.label,
|
||||
cycleTopicName);
|
||||
|
||||
String readerName = getEffectiveConValue(
|
||||
PulsarAdapterUtil.CONF_GATEGORY.Reader.label,
|
||||
PulsarAdapterUtil.READER_CONF_STD_KEY.readerName.label,
|
||||
final String readerName = this.getEffectiveConValue(
|
||||
CONF_GATEGORY.Reader.label,
|
||||
READER_CONF_STD_KEY.readerName.label,
|
||||
cycleReaderName);
|
||||
|
||||
String startMsgPosStr = getEffectiveConValue(
|
||||
PulsarAdapterUtil.CONF_GATEGORY.Reader.label,
|
||||
PulsarAdapterUtil.READER_CONF_CUSTOM_KEY.startMessagePos.label,
|
||||
final String startMsgPosStr = this.getEffectiveConValue(
|
||||
CONF_GATEGORY.Reader.label,
|
||||
READER_CONF_CUSTOM_KEY.startMessagePos.label,
|
||||
cycleStartMsgPos);
|
||||
if (!PulsarAdapterUtil.isValideReaderStartPosition(startMsgPosStr)) {
|
||||
if (!PulsarAdapterUtil.isValideReaderStartPosition(startMsgPosStr))
|
||||
throw new RuntimeException("Reader:: Invalid value for reader start message position!");
|
||||
}
|
||||
|
||||
return pulsarSpace.getReader(new PulsarSpace.ReaderCacheKey(readerName, topicName, startMsgPosStr), () -> {
|
||||
PulsarClient pulsarClient = pulsarSpace.getPulsarClient();;
|
||||
return this.pulsarSpace.getReader(new ReaderCacheKey(readerName, topicName, startMsgPosStr), () -> {
|
||||
final PulsarClient pulsarClient = this.pulsarSpace.getPulsarClient();
|
||||
|
||||
Map<String, Object> readerConf = pulsarSpace.getPulsarNBClientConf().getReaderConfMapTgt();
|
||||
final Map<String, Object> readerConf = this.pulsarSpace.getPulsarNBClientConf().getReaderConfMapTgt();
|
||||
|
||||
// Remove global level settings: "topicName" and "readerName"
|
||||
readerConf.remove(PulsarAdapterUtil.READER_CONF_STD_KEY.topicName.label);
|
||||
readerConf.remove(PulsarAdapterUtil.READER_CONF_STD_KEY.readerName.label);
|
||||
readerConf.remove(READER_CONF_STD_KEY.topicName.label);
|
||||
readerConf.remove(READER_CONF_STD_KEY.readerName.label);
|
||||
// Remove non-standard reader configuration properties
|
||||
readerConf.remove(PulsarAdapterUtil.READER_CONF_CUSTOM_KEY.startMessagePos.label);
|
||||
readerConf.remove(READER_CONF_CUSTOM_KEY.startMessagePos.label);
|
||||
|
||||
try {
|
||||
ReaderBuilder<?> readerBuilder = pulsarClient.
|
||||
newReader(pulsarSpace.getPulsarSchema()).
|
||||
final ReaderBuilder<?> readerBuilder = pulsarClient.
|
||||
newReader(this.pulsarSpace.getPulsarSchema()).
|
||||
loadConf(readerConf).
|
||||
topic(topicName).
|
||||
readerName(readerName);
|
||||
|
||||
MessageId startMsgId = MessageId.latest;
|
||||
if (startMsgPosStr.equalsIgnoreCase(PulsarAdapterUtil.READER_MSG_POSITION_TYPE.earliest.label)) {
|
||||
if (startMsgPosStr.equalsIgnoreCase(READER_MSG_POSITION_TYPE.earliest.label))
|
||||
startMsgId = MessageId.earliest;
|
||||
}
|
||||
//TODO: custom start message position is NOT supported yet
|
||||
//else if (startMsgPosStr.startsWith(PulsarAdapterUtil.READER_MSG_POSITION_TYPE.custom.label)) {
|
||||
// startMsgId = MessageId.latest;
|
||||
//}
|
||||
|
||||
return readerBuilder.startMessageId(startMsgId).create();
|
||||
} catch (PulsarClientException ple) {
|
||||
} catch (final PulsarClientException ple) {
|
||||
ple.printStackTrace();
|
||||
throw new RuntimeException("Unable to create a Pulsar reader!");
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright (c) 2022 nosqlbench
|
||||
* Copyright (c) 2022-2023 nosqlbench
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -17,10 +17,13 @@
|
||||
package io.nosqlbench.adapter.pulsar.dispensers;
|
||||
|
||||
import com.codahale.metrics.Timer;
|
||||
import com.codahale.metrics.Timer.Context;
|
||||
import io.nosqlbench.adapter.pulsar.PulsarSpace;
|
||||
import io.nosqlbench.adapter.pulsar.util.PulsarAdapterUtil;
|
||||
import io.nosqlbench.adapter.pulsar.util.PulsarAdapterUtil.DOC_LEVEL_PARAMS;
|
||||
import io.nosqlbench.engine.api.activityimpl.uniform.DriverAdapter;
|
||||
import io.nosqlbench.engine.api.metrics.EndToEndMetricsAdapterUtil;
|
||||
import io.nosqlbench.engine.api.metrics.EndToEndMetricsAdapterUtil.MSG_SEQ_ERROR_SIMU_TYPE;
|
||||
import io.nosqlbench.engine.api.templating.ParsedOp;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
@@ -39,7 +42,7 @@ import java.util.stream.Collectors;
|
||||
|
||||
public abstract class PulsarClientOpDispenser extends PulsarBaseOpDispenser {
|
||||
|
||||
private final static Logger logger = LogManager.getLogger("PulsarClientOpDispenser");
|
||||
private static final Logger logger = LogManager.getLogger("PulsarClientOpDispenser");
|
||||
|
||||
protected final PulsarClient pulsarClient;
|
||||
protected final Schema<?> pulsarSchema;
|
||||
@@ -50,20 +53,20 @@ public abstract class PulsarClientOpDispenser extends PulsarBaseOpDispenser {
|
||||
protected final LongFunction<Boolean> seqTrackingFunc;
|
||||
protected final LongFunction<String> payloadRttFieldFunc;
|
||||
protected final LongFunction<Supplier<Transaction>> transactSupplierFunc;
|
||||
protected final LongFunction<Set<EndToEndMetricsAdapterUtil.MSG_SEQ_ERROR_SIMU_TYPE>> msgSeqErrSimuTypeSetFunc;
|
||||
protected final LongFunction<Set<MSG_SEQ_ERROR_SIMU_TYPE>> msgSeqErrSimuTypeSetFunc;
|
||||
|
||||
public PulsarClientOpDispenser(DriverAdapter adapter,
|
||||
ParsedOp op,
|
||||
LongFunction<String> tgtNameFunc,
|
||||
PulsarSpace pulsarSpace) {
|
||||
protected PulsarClientOpDispenser(final DriverAdapter adapter,
|
||||
final ParsedOp op,
|
||||
final LongFunction<String> tgtNameFunc,
|
||||
final PulsarSpace pulsarSpace) {
|
||||
super(adapter, op, tgtNameFunc, pulsarSpace);
|
||||
|
||||
this.pulsarClient = pulsarSpace.getPulsarClient();
|
||||
this.pulsarSchema = pulsarSpace.getPulsarSchema();
|
||||
pulsarClient = pulsarSpace.getPulsarClient();
|
||||
pulsarSchema = pulsarSpace.getPulsarSchema();
|
||||
|
||||
// Doc-level parameter: use_transaction
|
||||
this.useTransactFunc = lookupStaticBoolConfigValueFunc(
|
||||
PulsarAdapterUtil.DOC_LEVEL_PARAMS.USE_TRANSACTION.label, false);
|
||||
useTransactFunc = this.lookupStaticBoolConfigValueFunc(
|
||||
DOC_LEVEL_PARAMS.USE_TRANSACTION.label, false);
|
||||
|
||||
// TODO: add support for "operation number per transaction"
|
||||
// Doc-level parameter: transact_batch_num
|
||||
@@ -71,58 +74,53 @@ public abstract class PulsarClientOpDispenser extends PulsarBaseOpDispenser {
|
||||
// PulsarAdapterUtil.DOC_LEVEL_PARAMS.TRANSACT_BATCH_NUM.label, 1);
|
||||
|
||||
// Doc-level parameter: seq_tracking
|
||||
this.seqTrackingFunc = lookupStaticBoolConfigValueFunc(
|
||||
PulsarAdapterUtil.DOC_LEVEL_PARAMS.SEQ_TRACKING.label, false);
|
||||
seqTrackingFunc = this.lookupStaticBoolConfigValueFunc(
|
||||
DOC_LEVEL_PARAMS.SEQ_TRACKING.label, false);
|
||||
|
||||
// Doc-level parameter: payload-tracking-field
|
||||
this.payloadRttFieldFunc = (l) -> parsedOp.getStaticConfigOr(
|
||||
PulsarAdapterUtil.DOC_LEVEL_PARAMS.RTT_TRACKING_FIELD.label, "");
|
||||
payloadRttFieldFunc = l -> this.parsedOp.getStaticConfigOr(
|
||||
DOC_LEVEL_PARAMS.RTT_TRACKING_FIELD.label, "");
|
||||
|
||||
this.transactSupplierFunc = (l) -> getTransactionSupplier();
|
||||
transactSupplierFunc = l -> this.getTransactionSupplier();
|
||||
|
||||
this.msgSeqErrSimuTypeSetFunc = getStaticErrSimuTypeSetOpValueFunc();
|
||||
msgSeqErrSimuTypeSetFunc = this.getStaticErrSimuTypeSetOpValueFunc();
|
||||
}
|
||||
|
||||
protected Supplier<Transaction> getTransactionSupplier() {
|
||||
return () -> {
|
||||
try (Timer.Context time = pulsarAdapterMetrics.getCommitTransactionTimer().time() ){
|
||||
return pulsarClient
|
||||
try (final Context time = this.pulsarAdapterMetrics.getCommitTransactionTimer().time() ){
|
||||
return this.pulsarClient
|
||||
.newTransaction()
|
||||
.build()
|
||||
.get();
|
||||
} catch (ExecutionException | InterruptedException err) {
|
||||
if (logger.isWarnEnabled()) {
|
||||
logger.warn("Error while starting a new transaction", err);
|
||||
}
|
||||
} catch (final ExecutionException | InterruptedException err) {
|
||||
if (PulsarClientOpDispenser.logger.isWarnEnabled())
|
||||
PulsarClientOpDispenser.logger.warn("Error while starting a new transaction", err);
|
||||
throw new RuntimeException(err);
|
||||
} catch (PulsarClientException err) {
|
||||
} catch (final PulsarClientException err) {
|
||||
throw new RuntimeException("Transactions are not enabled on Pulsar Client, " +
|
||||
"please set client.enableTransaction=true in your Pulsar Client configuration");
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
protected LongFunction<Set<EndToEndMetricsAdapterUtil.MSG_SEQ_ERROR_SIMU_TYPE>> getStaticErrSimuTypeSetOpValueFunc() {
|
||||
LongFunction<Set<EndToEndMetricsAdapterUtil.MSG_SEQ_ERROR_SIMU_TYPE>> setStringLongFunction;
|
||||
setStringLongFunction = (l) ->
|
||||
parsedOp.getOptionalStaticValue(PulsarAdapterUtil.DOC_LEVEL_PARAMS.SEQERR_SIMU.label, String.class)
|
||||
protected LongFunction<Set<MSG_SEQ_ERROR_SIMU_TYPE>> getStaticErrSimuTypeSetOpValueFunc() {
|
||||
final LongFunction<Set<MSG_SEQ_ERROR_SIMU_TYPE>> setStringLongFunction;
|
||||
setStringLongFunction = l ->
|
||||
this.parsedOp.getOptionalStaticValue(DOC_LEVEL_PARAMS.SEQERR_SIMU.label, String.class)
|
||||
.filter(Predicate.not(String::isEmpty))
|
||||
.map(value -> {
|
||||
Set<EndToEndMetricsAdapterUtil.MSG_SEQ_ERROR_SIMU_TYPE> set = new HashSet<>();
|
||||
Set<MSG_SEQ_ERROR_SIMU_TYPE> set = new HashSet<>();
|
||||
|
||||
if (StringUtils.contains(value,',')) {
|
||||
set = Arrays.stream(value.split(","))
|
||||
.map(EndToEndMetricsAdapterUtil.MSG_SEQ_ERROR_SIMU_TYPE::parseSimuType)
|
||||
.filter(Optional::isPresent)
|
||||
.map(Optional::get)
|
||||
.collect(Collectors.toCollection(LinkedHashSet::new));
|
||||
}
|
||||
if (StringUtils.contains(value,',')) set = Arrays.stream(value.split(","))
|
||||
.map(MSG_SEQ_ERROR_SIMU_TYPE::parseSimuType)
|
||||
.filter(Optional::isPresent)
|
||||
.map(Optional::get)
|
||||
.collect(Collectors.toCollection(LinkedHashSet::new));
|
||||
|
||||
return set;
|
||||
}).orElse(Collections.emptySet());
|
||||
logger.info(
|
||||
PulsarAdapterUtil.DOC_LEVEL_PARAMS.SEQERR_SIMU.label + ": {}",
|
||||
setStringLongFunction.apply(0));
|
||||
PulsarClientOpDispenser.logger.info("{}: {}", DOC_LEVEL_PARAMS.SEQERR_SIMU.label, setStringLongFunction.apply(0));
|
||||
return setStringLongFunction;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright (c) 2022 nosqlbench
|
||||
* Copyright (c) 2022-2023 nosqlbench
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -17,6 +17,7 @@
|
||||
package io.nosqlbench.adapter.pulsar.ops;
|
||||
|
||||
import com.codahale.metrics.Timer;
|
||||
import com.codahale.metrics.Timer.Context;
|
||||
import io.nosqlbench.adapter.pulsar.exception.PulsarAdapterAsyncOperationFailedException;
|
||||
import io.nosqlbench.adapter.pulsar.exception.PulsarAdapterUnexpectedException;
|
||||
import io.nosqlbench.adapter.pulsar.util.*;
|
||||
@@ -31,6 +32,7 @@ import org.apache.pulsar.client.api.transaction.Transaction;
|
||||
import org.apache.pulsar.common.schema.KeyValue;
|
||||
import org.apache.pulsar.shade.org.apache.avro.AvroRuntimeException;
|
||||
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.util.concurrent.CompletableFuture;
|
||||
import java.util.concurrent.ExecutionException;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
@@ -40,7 +42,7 @@ import java.util.function.Supplier;
|
||||
|
||||
public class MessageConsumerOp extends PulsarClientOp {
|
||||
|
||||
private final static Logger logger = LogManager.getLogger(MessageConsumerOp.class);
|
||||
private static final Logger logger = LogManager.getLogger(MessageConsumerOp.class);
|
||||
|
||||
private final boolean useTransact;
|
||||
private final boolean seqTracking;
|
||||
@@ -51,18 +53,18 @@ public class MessageConsumerOp extends PulsarClientOp {
|
||||
private final Consumer<?> consumer;
|
||||
private final int consumerTimeoutInSec;
|
||||
|
||||
public MessageConsumerOp(PulsarAdapterMetrics pulsarAdapterMetrics,
|
||||
PulsarClient pulsarClient,
|
||||
Schema<?> pulsarSchema,
|
||||
boolean asyncApi,
|
||||
boolean useTransact,
|
||||
boolean seqTracking,
|
||||
Supplier<Transaction> transactSupplier,
|
||||
String payloadRttField,
|
||||
EndToEndStartingTimeSource e2eStartingTimeSrc,
|
||||
Function<String, ReceivedMessageSequenceTracker> receivedMessageSequenceTrackerForTopic,
|
||||
Consumer<?> consumer,
|
||||
int consumerTimeoutInSec) {
|
||||
public MessageConsumerOp(final PulsarAdapterMetrics pulsarAdapterMetrics,
|
||||
final PulsarClient pulsarClient,
|
||||
final Schema<?> pulsarSchema,
|
||||
final boolean asyncApi,
|
||||
final boolean useTransact,
|
||||
final boolean seqTracking,
|
||||
final Supplier<Transaction> transactSupplier,
|
||||
final String payloadRttField,
|
||||
final EndToEndStartingTimeSource e2eStartingTimeSrc,
|
||||
final Function<String, ReceivedMessageSequenceTracker> receivedMessageSequenceTrackerForTopic,
|
||||
final Consumer<?> consumer,
|
||||
final int consumerTimeoutInSec) {
|
||||
super(pulsarAdapterMetrics, pulsarClient, pulsarSchema, asyncApi);
|
||||
|
||||
this.useTransact = useTransact;
|
||||
@@ -76,193 +78,158 @@ public class MessageConsumerOp extends PulsarClientOp {
|
||||
}
|
||||
|
||||
@Override
|
||||
public Object apply(long value) {
|
||||
final Transaction transaction;
|
||||
if (useTransact) {
|
||||
// if you are in a transaction you cannot set the schema per-message
|
||||
transaction = transactSupplier.get();
|
||||
public Object apply(final long value) {
|
||||
Transaction transaction;
|
||||
// if you are in a transaction you cannot set the schema per-message
|
||||
if (this.useTransact) transaction = this.transactSupplier.get();
|
||||
else transaction = null;
|
||||
|
||||
if (!this.asyncApi) try {
|
||||
final Message<?> message;
|
||||
|
||||
// wait forever
|
||||
if (0 >= consumerTimeoutInSec) message = this.consumer.receive();
|
||||
else {
|
||||
message = this.consumer.receive(this.consumerTimeoutInSec, TimeUnit.SECONDS);
|
||||
if (null == message) if (MessageConsumerOp.logger.isDebugEnabled())
|
||||
MessageConsumerOp.logger.debug("Failed to sync-receive a message before time out ({} seconds)", this.consumerTimeoutInSec);
|
||||
}
|
||||
|
||||
this.handleMessage(transaction, message);
|
||||
} catch (final Exception e) {
|
||||
throw new PulsarAdapterUnexpectedException("Sync message receiving failed - timeout value: " + this.consumerTimeoutInSec + " seconds ");
|
||||
}
|
||||
else {
|
||||
transaction = null;
|
||||
}
|
||||
|
||||
if (!asyncApi) {
|
||||
try {
|
||||
Message<?> message;
|
||||
|
||||
if (consumerTimeoutInSec <= 0) {
|
||||
// wait forever
|
||||
message = consumer.receive();
|
||||
}
|
||||
else {
|
||||
message = consumer.receive(consumerTimeoutInSec, TimeUnit.SECONDS);
|
||||
if (message == null) {
|
||||
if ( logger.isDebugEnabled() ) {
|
||||
logger.debug("Failed to sync-receive a message before time out ({} seconds)", consumerTimeoutInSec);
|
||||
}
|
||||
}
|
||||
else try {
|
||||
CompletableFuture<? extends Message<?>> msgRecvFuture = this.consumer.receiveAsync();
|
||||
// add commit step
|
||||
if (this.useTransact) msgRecvFuture = msgRecvFuture.thenCompose(msg -> {
|
||||
final Context ctx = this.transactionCommitTimer.time();
|
||||
return transaction
|
||||
.commit()
|
||||
.whenComplete((m, e) -> ctx.close())
|
||||
.thenApply(v -> msg);
|
||||
}
|
||||
);
|
||||
|
||||
handleMessage(transaction, message);
|
||||
}
|
||||
catch (Exception e) {
|
||||
throw new PulsarAdapterUnexpectedException("" +
|
||||
"Sync message receiving failed - timeout value: " + consumerTimeoutInSec + " seconds ");
|
||||
}
|
||||
}
|
||||
else {
|
||||
try {
|
||||
CompletableFuture<? extends Message<?>> msgRecvFuture = consumer.receiveAsync();
|
||||
if (useTransact) {
|
||||
// add commit step
|
||||
msgRecvFuture = msgRecvFuture.thenCompose(msg -> {
|
||||
Timer.Context ctx = transactionCommitTimer.time();
|
||||
return transaction
|
||||
.commit()
|
||||
.whenComplete((m,e) -> ctx.close())
|
||||
.thenApply(v-> msg);
|
||||
}
|
||||
);
|
||||
msgRecvFuture.thenAccept(message -> {
|
||||
try {
|
||||
this.handleMessage(transaction, message);
|
||||
} catch (final PulsarClientException | TimeoutException e) {
|
||||
throw new PulsarAdapterAsyncOperationFailedException(e);
|
||||
} catch (final InterruptedException e) {
|
||||
Thread.currentThread().interrupt();
|
||||
} catch (final ExecutionException e) {
|
||||
throw new PulsarAdapterAsyncOperationFailedException(e.getCause());
|
||||
}
|
||||
|
||||
msgRecvFuture.thenAccept(message -> {
|
||||
try {
|
||||
handleMessage(transaction, message);
|
||||
} catch (PulsarClientException | TimeoutException e) {
|
||||
throw new PulsarAdapterAsyncOperationFailedException(e);
|
||||
} catch (InterruptedException e) {
|
||||
Thread.currentThread().interrupt();
|
||||
} catch (ExecutionException e) {
|
||||
throw new PulsarAdapterAsyncOperationFailedException(e.getCause());
|
||||
}
|
||||
}).exceptionally(ex -> {
|
||||
throw new PulsarAdapterAsyncOperationFailedException(ex);
|
||||
});
|
||||
}
|
||||
catch (Exception e) {
|
||||
throw new PulsarAdapterUnexpectedException(e);
|
||||
}
|
||||
}).exceptionally(ex -> {
|
||||
throw new PulsarAdapterAsyncOperationFailedException(ex);
|
||||
});
|
||||
} catch (final Exception e) {
|
||||
throw new PulsarAdapterUnexpectedException(e);
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
private void handleMessage(Transaction transaction, Message<?> message)
|
||||
private void handleMessage(final Transaction transaction, final Message<?> message)
|
||||
throws PulsarClientException, InterruptedException, ExecutionException, TimeoutException {
|
||||
|
||||
// acknowledge the message as soon as possible
|
||||
if (!useTransact) {
|
||||
consumer.acknowledgeAsync(message.getMessageId())
|
||||
.get(consumerTimeoutInSec, TimeUnit.SECONDS);
|
||||
} else {
|
||||
consumer.acknowledgeAsync(message.getMessageId(), transaction)
|
||||
.get(consumerTimeoutInSec, TimeUnit.SECONDS);
|
||||
if (!this.useTransact) this.consumer.acknowledgeAsync(message.getMessageId())
|
||||
.get(this.consumerTimeoutInSec, TimeUnit.SECONDS);
|
||||
else {
|
||||
this.consumer.acknowledgeAsync(message.getMessageId(), transaction)
|
||||
.get(this.consumerTimeoutInSec, TimeUnit.SECONDS);
|
||||
|
||||
// little problem: here we are counting the "commit" time
|
||||
// inside the overall time spent for the execution of the consume operation
|
||||
// we should refactor this operation as for PulsarProducerOp, and use the passed callback
|
||||
// to track with precision the time spent for the operation and for the commit
|
||||
try (Timer.Context ctx = transactionCommitTimer.time()) {
|
||||
try (final Context ctx = this.transactionCommitTimer.time()) {
|
||||
transaction.commit().get();
|
||||
}
|
||||
}
|
||||
|
||||
if (logger.isDebugEnabled()) {
|
||||
Object decodedPayload = message.getValue();
|
||||
if (decodedPayload instanceof GenericObject) {
|
||||
if (MessageConsumerOp.logger.isDebugEnabled()) {
|
||||
final Object decodedPayload = message.getValue();
|
||||
if (decodedPayload instanceof GenericObject object) {
|
||||
// GenericObject is a wrapper for Primitives, for AVRO/JSON structs and for KeyValu
|
||||
// we fall here with a configured AVRO schema or with AUTO_CONSUME
|
||||
GenericObject object = (GenericObject) decodedPayload;
|
||||
logger.debug("({}) message received: msg-key={}; msg-properties={}; msg-payload={}",
|
||||
consumer.getConsumerName(),
|
||||
MessageConsumerOp.logger.debug("({}) message received: msg-key={}; msg-properties={}; msg-payload={}",
|
||||
this.consumer.getConsumerName(),
|
||||
message.getKey(),
|
||||
message.getProperties(),
|
||||
object.getNativeObject() + "");
|
||||
}
|
||||
else {
|
||||
logger.debug("({}) message received: msg-key={}; msg-properties={}; msg-payload={}",
|
||||
consumer.getConsumerName(),
|
||||
message.getKey(),
|
||||
message.getProperties(),
|
||||
new String(message.getData()));
|
||||
String.valueOf(object.getNativeObject()));
|
||||
}
|
||||
else MessageConsumerOp.logger.debug("({}) message received: msg-key={}; msg-properties={}; msg-payload={}",
|
||||
this.consumer.getConsumerName(),
|
||||
message.getKey(),
|
||||
message.getProperties(),
|
||||
new String(message.getData(), StandardCharsets.UTF_8));
|
||||
}
|
||||
|
||||
if (!payloadRttField.isEmpty()) {
|
||||
if (!this.payloadRttField.isEmpty()) {
|
||||
boolean done = false;
|
||||
Object decodedPayload = message.getValue();
|
||||
final Object decodedPayload = message.getValue();
|
||||
Long extractedSendTime = null;
|
||||
// if Pulsar is able to decode this it is better to let it do the work
|
||||
// because Pulsar caches the Schema, handles Schema evolution
|
||||
// as much efficiently as possible
|
||||
if (decodedPayload instanceof GenericRecord) { // AVRO and AUTO_CONSUME
|
||||
final GenericRecord pulsarGenericRecord = (GenericRecord) decodedPayload;
|
||||
if (decodedPayload instanceof final GenericRecord pulsarGenericRecord) { // AVRO and AUTO_CONSUME
|
||||
|
||||
Object field = null;
|
||||
// KeyValue is a special wrapper in Pulsar to represent a pair of values
|
||||
// a Key and a Value
|
||||
Object nativeObject = pulsarGenericRecord.getNativeObject();
|
||||
if (nativeObject instanceof KeyValue) {
|
||||
KeyValue keyValue = (KeyValue) nativeObject;
|
||||
final Object nativeObject = pulsarGenericRecord.getNativeObject();
|
||||
if (nativeObject instanceof KeyValue keyValue) {
|
||||
// look into the Key
|
||||
if (keyValue.getKey() instanceof GenericRecord) {
|
||||
GenericRecord keyPart = (GenericRecord) keyValue.getKey();
|
||||
if (keyValue.getKey() instanceof GenericRecord keyPart) {
|
||||
try {
|
||||
field = keyPart.getField(payloadRttField);
|
||||
} catch (AvroRuntimeException err) {
|
||||
field = keyPart.getField(this.payloadRttField);
|
||||
} catch (final AvroRuntimeException err) {
|
||||
// field is not in the key
|
||||
logger.error("Cannot find {} in key {}: {}", payloadRttField, keyPart, err + "");
|
||||
MessageConsumerOp.logger.error("Cannot find {} in key {}: {}", this.payloadRttField, keyPart, String.valueOf(err));
|
||||
}
|
||||
}
|
||||
// look into the Value
|
||||
if (keyValue.getValue() instanceof GenericRecord && field == null) {
|
||||
GenericRecord valuePart = (GenericRecord) keyValue.getValue();
|
||||
if ((keyValue.getValue() instanceof GenericRecord valuePart) && (null == field)) {
|
||||
try {
|
||||
field = valuePart.getField(payloadRttField);
|
||||
} catch (AvroRuntimeException err) {
|
||||
field = valuePart.getField(this.payloadRttField);
|
||||
} catch (final AvroRuntimeException err) {
|
||||
// field is not in the value
|
||||
logger.error("Cannot find {} in value {}: {}", payloadRttField, valuePart, err + "");
|
||||
MessageConsumerOp.logger.error("Cannot find {} in value {}: {}", this.payloadRttField, valuePart, String.valueOf(err));
|
||||
}
|
||||
}
|
||||
if (field == null) {
|
||||
throw new RuntimeException("Cannot find field {}" + payloadRttField + " in " + keyValue.getKey() + " and " + keyValue.getValue());
|
||||
}
|
||||
} else {
|
||||
field = pulsarGenericRecord.getField(payloadRttField);
|
||||
}
|
||||
if (null == field)
|
||||
throw new RuntimeException("Cannot find field {}" + this.payloadRttField + " in " + keyValue.getKey() + " and " + keyValue.getValue());
|
||||
} else field = pulsarGenericRecord.getField(this.payloadRttField);
|
||||
|
||||
if (field != null) {
|
||||
if (field instanceof Number) {
|
||||
extractedSendTime = ((Number) field).longValue();
|
||||
} else {
|
||||
extractedSendTime = Long.valueOf(field.toString());
|
||||
}
|
||||
} else {
|
||||
logger.error("Cannot find {} in value {}", payloadRttField, pulsarGenericRecord);
|
||||
}
|
||||
if (null != field) if (field instanceof Number) extractedSendTime = ((Number) field).longValue();
|
||||
else extractedSendTime = Long.valueOf(field.toString());
|
||||
else
|
||||
MessageConsumerOp.logger.error("Cannot find {} in value {}", this.payloadRttField, pulsarGenericRecord);
|
||||
done = true;
|
||||
}
|
||||
if (!done) {
|
||||
org.apache.avro.Schema avroSchema = getAvroSchemaFromConfiguration();
|
||||
org.apache.avro.generic.GenericRecord avroGenericRecord =
|
||||
final org.apache.avro.Schema avroSchema = this.getAvroSchemaFromConfiguration();
|
||||
final org.apache.avro.generic.GenericRecord avroGenericRecord =
|
||||
PulsarAvroSchemaUtil.GetGenericRecord_ApacheAvro(avroSchema, message.getData());
|
||||
if (avroGenericRecord.hasField(payloadRttField)) {
|
||||
extractedSendTime = (Long) avroGenericRecord.get(payloadRttField);
|
||||
}
|
||||
if (avroGenericRecord.hasField(this.payloadRttField))
|
||||
extractedSendTime = (Long) avroGenericRecord.get(this.payloadRttField);
|
||||
}
|
||||
if (extractedSendTime != null) {
|
||||
if (null != extractedSendTime) {
|
||||
// fallout expects latencies in "ns" and not in "ms"
|
||||
long delta = TimeUnit.MILLISECONDS
|
||||
final long delta = TimeUnit.MILLISECONDS
|
||||
.toNanos(System.currentTimeMillis() - extractedSendTime);
|
||||
payloadRttHistogram.update(delta);
|
||||
this.payloadRttHistogram.update(delta);
|
||||
}
|
||||
}
|
||||
|
||||
// keep track end-to-end message processing latency
|
||||
if (e2eStartingTimeSrc != EndToEndStartingTimeSource.NONE) {
|
||||
if (EndToEndStartingTimeSource.NONE != e2eStartingTimeSrc) {
|
||||
long startTimeStamp = 0L;
|
||||
|
||||
switch (e2eStartingTimeSrc) {
|
||||
switch (this.e2eStartingTimeSrc) {
|
||||
case MESSAGE_PUBLISH_TIME:
|
||||
startTimeStamp = message.getPublishTime();
|
||||
break;
|
||||
@@ -270,31 +237,33 @@ public class MessageConsumerOp extends PulsarClientOp {
|
||||
startTimeStamp = message.getEventTime();
|
||||
break;
|
||||
case MESSAGE_PROPERTY_E2E_STARTING_TIME:
|
||||
String startingTimeProperty = message.getProperty("e2e_starting_time");
|
||||
startTimeStamp = startingTimeProperty != null ? Long.parseLong(startingTimeProperty) : 0L;
|
||||
final String startingTimeProperty = message.getProperty("e2e_starting_time");
|
||||
startTimeStamp = (null != startingTimeProperty) ? Long.parseLong(startingTimeProperty) : 0L;
|
||||
break;
|
||||
}
|
||||
|
||||
if (startTimeStamp != 0L) {
|
||||
long e2eMsgLatency = System.currentTimeMillis() - startTimeStamp;
|
||||
e2eMsgProcLatencyHistogram.update(e2eMsgLatency);
|
||||
if (0L != startTimeStamp) {
|
||||
final long e2eMsgLatency = System.currentTimeMillis() - startTimeStamp;
|
||||
this.e2eMsgProcLatencyHistogram.update(e2eMsgLatency);
|
||||
}
|
||||
}
|
||||
|
||||
// keep track of message errors and update error counters
|
||||
if (seqTracking) checkAndUpdateMessageErrorCounter(message);
|
||||
if (this.seqTracking) {
|
||||
this.checkAndUpdateMessageErrorCounter(message);
|
||||
}
|
||||
|
||||
int messageSize = message.getData().length;
|
||||
messageSizeHistogram.update(messageSize);
|
||||
final int messageSize = message.getData().length;
|
||||
this.messageSizeHistogram.update(messageSize);
|
||||
}
|
||||
|
||||
private void checkAndUpdateMessageErrorCounter(Message<?> message) {
|
||||
String msgSeqIdStr = message.getProperty(PulsarAdapterUtil.MSG_SEQUENCE_NUMBER);
|
||||
private void checkAndUpdateMessageErrorCounter(final Message<?> message) {
|
||||
final String msgSeqIdStr = message.getProperty(PulsarAdapterUtil.MSG_SEQUENCE_NUMBER);
|
||||
|
||||
if ( !StringUtils.isBlank(msgSeqIdStr) ) {
|
||||
long sequenceNumber = Long.parseLong(msgSeqIdStr);
|
||||
ReceivedMessageSequenceTracker receivedMessageSequenceTracker =
|
||||
receivedMessageSequenceTrackerForTopic.apply(message.getTopicName());
|
||||
final long sequenceNumber = Long.parseLong(msgSeqIdStr);
|
||||
final ReceivedMessageSequenceTracker receivedMessageSequenceTracker =
|
||||
this.receivedMessageSequenceTrackerForTopic.apply(message.getTopicName());
|
||||
receivedMessageSequenceTracker.sequenceNumberReceived(sequenceNumber);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright (c) 2022 nosqlbench
|
||||
* Copyright (c) 2022-2023 nosqlbench
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -17,8 +17,10 @@
|
||||
package io.nosqlbench.adapter.pulsar.ops;
|
||||
|
||||
import com.codahale.metrics.Timer;
|
||||
import com.codahale.metrics.Timer.Context;
|
||||
import io.nosqlbench.adapter.pulsar.exception.PulsarAdapterAsyncOperationFailedException;
|
||||
import io.nosqlbench.adapter.pulsar.exception.PulsarAdapterUnexpectedException;
|
||||
import io.nosqlbench.engine.api.metrics.EndToEndMetricsAdapterUtil.MSG_SEQ_ERROR_SIMU_TYPE;
|
||||
import io.nosqlbench.engine.api.metrics.MessageSequenceNumberSendingHandler;
|
||||
import io.nosqlbench.adapter.pulsar.util.PulsarAdapterMetrics;
|
||||
import io.nosqlbench.adapter.pulsar.util.PulsarAdapterUtil;
|
||||
@@ -45,12 +47,12 @@ import java.util.function.Supplier;
|
||||
|
||||
public class MessageProducerOp extends PulsarClientOp {
|
||||
|
||||
private final static Logger logger = LogManager.getLogger("MessageProducerOp");
|
||||
private static final Logger logger = LogManager.getLogger("MessageProducerOp");
|
||||
|
||||
private final boolean useTransact;
|
||||
private final boolean seqTracking;
|
||||
private final Supplier<Transaction> transactSupplier;
|
||||
private final Set<EndToEndMetricsAdapterUtil.MSG_SEQ_ERROR_SIMU_TYPE> errSimuTypeSet;
|
||||
private final Set<MSG_SEQ_ERROR_SIMU_TYPE> errSimuTypeSet;
|
||||
private final Producer<?> producer;
|
||||
private final String msgKey;
|
||||
private final String msgPropRawJsonStr;
|
||||
@@ -60,18 +62,18 @@ public class MessageProducerOp extends PulsarClientOp {
|
||||
private final ThreadLocal<Map<String, MessageSequenceNumberSendingHandler>> MessageSequenceNumberSendingHandlersThreadLocal =
|
||||
ThreadLocal.withInitial(HashMap::new);
|
||||
|
||||
public MessageProducerOp(PulsarAdapterMetrics pulsarAdapterMetrics,
|
||||
PulsarClient pulsarClient,
|
||||
Schema<?> pulsarSchema,
|
||||
boolean asyncApi,
|
||||
boolean useTransact,
|
||||
boolean seqTracking,
|
||||
Supplier<Transaction> transactSupplier,
|
||||
Set<EndToEndMetricsAdapterUtil.MSG_SEQ_ERROR_SIMU_TYPE> errSimuTypeSet,
|
||||
Producer<?> producer,
|
||||
String msgKey,
|
||||
String msgProp,
|
||||
String msgValue) {
|
||||
public MessageProducerOp(final PulsarAdapterMetrics pulsarAdapterMetrics,
|
||||
final PulsarClient pulsarClient,
|
||||
final Schema<?> pulsarSchema,
|
||||
final boolean asyncApi,
|
||||
final boolean useTransact,
|
||||
final boolean seqTracking,
|
||||
final Supplier<Transaction> transactSupplier,
|
||||
final Set<MSG_SEQ_ERROR_SIMU_TYPE> errSimuTypeSet,
|
||||
final Producer<?> producer,
|
||||
final String msgKey,
|
||||
final String msgProp,
|
||||
final String msgValue) {
|
||||
super(pulsarAdapterMetrics, pulsarClient, pulsarSchema, asyncApi);
|
||||
|
||||
this.useTransact = useTransact;
|
||||
@@ -80,14 +82,14 @@ public class MessageProducerOp extends PulsarClientOp {
|
||||
this.errSimuTypeSet = errSimuTypeSet;
|
||||
this.producer = producer;
|
||||
this.msgKey = msgKey;
|
||||
this.msgPropRawJsonStr = msgProp;
|
||||
msgPropRawJsonStr = msgProp;
|
||||
this.msgValue = msgValue;
|
||||
|
||||
getMsgPropMapFromRawJsonStr();
|
||||
this.getMsgPropMapFromRawJsonStr();
|
||||
}
|
||||
|
||||
private MessageSequenceNumberSendingHandler getMessageSequenceNumberSendingHandler(String topicName) {
|
||||
return MessageSequenceNumberSendingHandlersThreadLocal.get()
|
||||
private MessageSequenceNumberSendingHandler getMessageSequenceNumberSendingHandler(final String topicName) {
|
||||
return this.MessageSequenceNumberSendingHandlersThreadLocal.get()
|
||||
.computeIfAbsent(topicName, k -> new MessageSequenceNumberSendingHandler());
|
||||
}
|
||||
|
||||
@@ -95,185 +97,160 @@ public class MessageProducerOp extends PulsarClientOp {
|
||||
// - if Yes, convert it to a map
|
||||
// - otherwise, log an error message and ignore message properties without throwing a runtime exception
|
||||
private void getMsgPropMapFromRawJsonStr() {
|
||||
if (!StringUtils.isBlank(msgPropRawJsonStr)) {
|
||||
try {
|
||||
msgProperties.putAll(PulsarAdapterUtil.convertJsonToMap(msgPropRawJsonStr));
|
||||
}
|
||||
catch (Exception e) {
|
||||
logger.error(
|
||||
"Error parsing message property JSON string {}, ignore message properties!",
|
||||
msgPropRawJsonStr);
|
||||
}
|
||||
if (!StringUtils.isBlank(this.msgPropRawJsonStr)) try {
|
||||
this.msgProperties.putAll(PulsarAdapterUtil.convertJsonToMap(this.msgPropRawJsonStr));
|
||||
} catch (final Exception e) {
|
||||
MessageProducerOp.logger.error(
|
||||
"Error parsing message property JSON string {}, ignore message properties!",
|
||||
this.msgPropRawJsonStr);
|
||||
}
|
||||
|
||||
if (seqTracking) {
|
||||
long nextSequenceNumber = getMessageSequenceNumberSendingHandler(producer.getTopic())
|
||||
.getNextSequenceNumber(errSimuTypeSet);
|
||||
msgProperties.put(PulsarAdapterUtil.MSG_SEQUENCE_NUMBER, String.valueOf(nextSequenceNumber));
|
||||
if (this.seqTracking) {
|
||||
final long nextSequenceNumber = this.getMessageSequenceNumberSendingHandler(this.producer.getTopic())
|
||||
.getNextSequenceNumber(this.errSimuTypeSet);
|
||||
this.msgProperties.put(PulsarAdapterUtil.MSG_SEQUENCE_NUMBER, String.valueOf(nextSequenceNumber));
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public Object apply(long value) {
|
||||
public Object apply(final long value) {
|
||||
|
||||
TypedMessageBuilder typedMessageBuilder;
|
||||
|
||||
final Transaction transaction;
|
||||
if (useTransact) {
|
||||
Transaction transaction;
|
||||
if (this.useTransact) {
|
||||
// if you are in a transaction you cannot set the schema per-message
|
||||
transaction = transactSupplier.get();
|
||||
typedMessageBuilder = producer.newMessage(transaction);
|
||||
transaction = this.transactSupplier.get();
|
||||
typedMessageBuilder = this.producer.newMessage(transaction);
|
||||
}
|
||||
else {
|
||||
transaction = null;
|
||||
typedMessageBuilder = producer.newMessage(pulsarSchema);
|
||||
typedMessageBuilder = this.producer.newMessage(this.pulsarSchema);
|
||||
}
|
||||
|
||||
// set message key
|
||||
if ( !StringUtils.isBlank(msgKey) && !(pulsarSchema instanceof KeyValueSchema) ) {
|
||||
typedMessageBuilder = typedMessageBuilder.key(msgKey);
|
||||
}
|
||||
if ( !StringUtils.isBlank(this.msgKey) && !(this.pulsarSchema instanceof KeyValueSchema) )
|
||||
typedMessageBuilder = typedMessageBuilder.key(this.msgKey);
|
||||
|
||||
// set message properties
|
||||
if (!StringUtils.isBlank(msgPropRawJsonStr) || seqTracking) {
|
||||
typedMessageBuilder = typedMessageBuilder.properties(msgProperties);
|
||||
}
|
||||
if (!StringUtils.isBlank(this.msgPropRawJsonStr) || this.seqTracking)
|
||||
typedMessageBuilder = typedMessageBuilder.properties(this.msgProperties);
|
||||
|
||||
// set message payload
|
||||
int messageSize;
|
||||
SchemaType schemaType = pulsarSchema.getSchemaInfo().getType();
|
||||
if (pulsarSchema instanceof KeyValueSchema) {
|
||||
KeyValueSchema keyValueSchema = (KeyValueSchema) pulsarSchema;
|
||||
org.apache.avro.Schema avroSchema = getAvroSchemaFromConfiguration();
|
||||
GenericRecord payload = PulsarAvroSchemaUtil.GetGenericRecord_PulsarAvro(
|
||||
final int messageSize;
|
||||
final SchemaType schemaType = this.pulsarSchema.getSchemaInfo().getType();
|
||||
if (this.pulsarSchema instanceof KeyValueSchema keyValueSchema) {
|
||||
final org.apache.avro.Schema avroSchema = this.getAvroSchemaFromConfiguration();
|
||||
final GenericRecord payload = PulsarAvroSchemaUtil.GetGenericRecord_PulsarAvro(
|
||||
(GenericAvroSchema) keyValueSchema.getValueSchema(),
|
||||
avroSchema,
|
||||
msgValue
|
||||
this.msgValue
|
||||
);
|
||||
|
||||
org.apache.avro.Schema avroSchemaForKey = getKeyAvroSchemaFromConfiguration();
|
||||
GenericRecord key = PulsarAvroSchemaUtil.GetGenericRecord_PulsarAvro(
|
||||
final org.apache.avro.Schema avroSchemaForKey = this.getKeyAvroSchemaFromConfiguration();
|
||||
final GenericRecord key = PulsarAvroSchemaUtil.GetGenericRecord_PulsarAvro(
|
||||
(GenericAvroSchema) keyValueSchema.getKeySchema(),
|
||||
avroSchemaForKey,
|
||||
msgKey
|
||||
this.msgKey
|
||||
);
|
||||
|
||||
typedMessageBuilder = typedMessageBuilder.value(new KeyValue(key, payload));
|
||||
// TODO: add a way to calculate the message size for KEY_VALUE messages
|
||||
messageSize = msgKey.length() + msgValue.length();
|
||||
messageSize = this.msgKey.length() + this.msgValue.length();
|
||||
}
|
||||
else if (PulsarAdapterUtil.isAvroSchemaTypeStr(schemaType.name())) {
|
||||
GenericRecord payload = PulsarAvroSchemaUtil.GetGenericRecord_PulsarAvro(
|
||||
(GenericAvroSchema) pulsarSchema,
|
||||
pulsarSchema.getSchemaInfo().getSchemaDefinition(),
|
||||
msgValue
|
||||
final GenericRecord payload = PulsarAvroSchemaUtil.GetGenericRecord_PulsarAvro(
|
||||
(GenericAvroSchema) this.pulsarSchema,
|
||||
this.pulsarSchema.getSchemaInfo().getSchemaDefinition(),
|
||||
this.msgValue
|
||||
);
|
||||
typedMessageBuilder = typedMessageBuilder.value(payload);
|
||||
// TODO: add a way to calculate the message size for AVRO messages
|
||||
messageSize = msgValue.length();
|
||||
messageSize = this.msgValue.length();
|
||||
} else {
|
||||
byte[] array = msgValue.getBytes(StandardCharsets.UTF_8);
|
||||
final byte[] array = this.msgValue.getBytes(StandardCharsets.UTF_8);
|
||||
typedMessageBuilder = typedMessageBuilder.value(array);
|
||||
messageSize = array.length;
|
||||
}
|
||||
|
||||
messageSizeHistogram.update(messageSize);
|
||||
this.messageSizeHistogram.update(messageSize);
|
||||
|
||||
//TODO: add error handling with failed message production
|
||||
if (!asyncApi) {
|
||||
try {
|
||||
logger.trace("Sending message");
|
||||
typedMessageBuilder.send();
|
||||
if (!this.asyncApi) try {
|
||||
MessageProducerOp.logger.trace("Sending message");
|
||||
typedMessageBuilder.send();
|
||||
|
||||
if (useTransact) {
|
||||
try (Timer.Context ctx = transactionCommitTimer.time()) {
|
||||
transaction.commit().get();
|
||||
}
|
||||
}
|
||||
|
||||
if (logger.isDebugEnabled()) {
|
||||
if (PulsarAdapterUtil.isAvroSchemaTypeStr(schemaType.name())) {
|
||||
org.apache.avro.Schema avroSchema = getAvroSchemaFromConfiguration();
|
||||
org.apache.avro.generic.GenericRecord avroGenericRecord =
|
||||
PulsarAvroSchemaUtil.GetGenericRecord_ApacheAvro(avroSchema, msgValue);
|
||||
|
||||
logger.debug("({}) Sync message sent: msg-key={}; msg-properties={}; msg-payload={})",
|
||||
producer.getProducerName(),
|
||||
msgKey,
|
||||
msgProperties,
|
||||
avroGenericRecord.toString());
|
||||
}
|
||||
else {
|
||||
logger.debug("({}) Sync message sent; msg-key={}; msg-properties={}; msg-payload={}",
|
||||
producer.getProducerName(),
|
||||
msgKey,
|
||||
msgProperties,
|
||||
msgValue);
|
||||
}
|
||||
}
|
||||
if (this.useTransact) try (final Context ctx = this.transactionCommitTimer.time()) {
|
||||
transaction.commit().get();
|
||||
}
|
||||
catch (PulsarClientException | ExecutionException | InterruptedException pce) {
|
||||
String errMsg =
|
||||
"Sync message sending failed: " +
|
||||
"key - " + msgKey + "; " +
|
||||
"properties - " + msgProperties + "; " +
|
||||
"payload - " + msgValue;
|
||||
|
||||
logger.trace(errMsg);
|
||||
if (MessageProducerOp.logger.isDebugEnabled())
|
||||
if (PulsarAdapterUtil.isAvroSchemaTypeStr(schemaType.name())) {
|
||||
final org.apache.avro.Schema avroSchema = this.getAvroSchemaFromConfiguration();
|
||||
final org.apache.avro.generic.GenericRecord avroGenericRecord =
|
||||
PulsarAvroSchemaUtil.GetGenericRecord_ApacheAvro(avroSchema, this.msgValue);
|
||||
|
||||
throw new PulsarAdapterUnexpectedException(errMsg);
|
||||
}
|
||||
MessageProducerOp.logger.debug("({}) Sync message sent: msg-key={}; msg-properties={}; msg-payload={})",
|
||||
this.producer.getProducerName(),
|
||||
this.msgKey,
|
||||
this.msgProperties,
|
||||
avroGenericRecord.toString());
|
||||
} else
|
||||
MessageProducerOp.logger.debug("({}) Sync message sent; msg-key={}; msg-properties={}; msg-payload={}",
|
||||
this.producer.getProducerName(),
|
||||
this.msgKey,
|
||||
this.msgProperties,
|
||||
this.msgValue);
|
||||
} catch (final PulsarClientException | ExecutionException | InterruptedException pce) {
|
||||
final String errMsg =
|
||||
"Sync message sending failed: " +
|
||||
"key - " + this.msgKey + "; " +
|
||||
"properties - " + this.msgProperties + "; " +
|
||||
"payload - " + this.msgValue;
|
||||
|
||||
MessageProducerOp.logger.trace(errMsg);
|
||||
|
||||
throw new PulsarAdapterUnexpectedException(errMsg);
|
||||
}
|
||||
else {
|
||||
try {
|
||||
// we rely on blockIfQueueIsFull in order to throttle the request in this case
|
||||
CompletableFuture<?> future = typedMessageBuilder.sendAsync();
|
||||
else try {
|
||||
// we rely on blockIfQueueIsFull in order to throttle the request in this case
|
||||
CompletableFuture<?> future = typedMessageBuilder.sendAsync();
|
||||
|
||||
if (useTransact) {
|
||||
// add commit step
|
||||
future = future.thenCompose(msg -> {
|
||||
Timer.Context ctx = transactionCommitTimer.time();
|
||||
return transaction
|
||||
.commit()
|
||||
.whenComplete((m,e) -> ctx.close())
|
||||
.thenApply(v-> msg);
|
||||
}
|
||||
);
|
||||
// add commit step
|
||||
if (this.useTransact) future = future.thenCompose(msg -> {
|
||||
final Context ctx = this.transactionCommitTimer.time();
|
||||
return transaction
|
||||
.commit()
|
||||
.whenComplete((m, e) -> ctx.close())
|
||||
.thenApply(v -> msg);
|
||||
}
|
||||
);
|
||||
|
||||
future.whenComplete((messageId, error) -> {
|
||||
if (logger.isDebugEnabled()) {
|
||||
if (PulsarAdapterUtil.isAvroSchemaTypeStr(schemaType.name())) {
|
||||
org.apache.avro.Schema avroSchema = getAvroSchemaFromConfiguration();
|
||||
org.apache.avro.generic.GenericRecord avroGenericRecord =
|
||||
PulsarAvroSchemaUtil.GetGenericRecord_ApacheAvro(avroSchema, msgValue);
|
||||
future.whenComplete((messageId, error) -> {
|
||||
if (MessageProducerOp.logger.isDebugEnabled())
|
||||
if (PulsarAdapterUtil.isAvroSchemaTypeStr(schemaType.name())) {
|
||||
final org.apache.avro.Schema avroSchema = this.getAvroSchemaFromConfiguration();
|
||||
final org.apache.avro.generic.GenericRecord avroGenericRecord =
|
||||
PulsarAvroSchemaUtil.GetGenericRecord_ApacheAvro(avroSchema, this.msgValue);
|
||||
|
||||
logger.debug("({}) Aysnc message sent: msg-key={}; msg-properties={}; msg-payload={})",
|
||||
producer.getProducerName(),
|
||||
msgKey,
|
||||
msgProperties,
|
||||
avroGenericRecord.toString());
|
||||
}
|
||||
else {
|
||||
logger.debug("({}) Aysnc message sent: msg-key={}; msg-properties={}; msg-payload={}",
|
||||
producer.getProducerName(),
|
||||
msgKey,
|
||||
msgProperties,
|
||||
msgValue);
|
||||
}
|
||||
}
|
||||
}).exceptionally(ex -> {
|
||||
logger.error("Async message sending failed: " +
|
||||
"key - " + msgKey + "; " +
|
||||
"properties - " + msgProperties + "; " +
|
||||
"payload - " + msgValue);
|
||||
MessageProducerOp.logger.debug("({}) Aysnc message sent: msg-key={}; msg-properties={}; msg-payload={})",
|
||||
this.producer.getProducerName(),
|
||||
this.msgKey,
|
||||
this.msgProperties,
|
||||
avroGenericRecord.toString());
|
||||
} else
|
||||
MessageProducerOp.logger.debug("({}) Aysnc message sent: msg-key={}; msg-properties={}; msg-payload={}",
|
||||
this.producer.getProducerName(),
|
||||
this.msgKey,
|
||||
this.msgProperties,
|
||||
this.msgValue);
|
||||
}).exceptionally(ex -> {
|
||||
MessageProducerOp.logger.error("Async message sending failed: key - {}; properties - {}; payload - {}", this.msgKey, this.msgProperties, this.msgValue);
|
||||
|
||||
throw new PulsarAdapterAsyncOperationFailedException(ex);
|
||||
});
|
||||
}
|
||||
catch (Exception e) {
|
||||
throw new PulsarAdapterUnexpectedException(e);
|
||||
}
|
||||
throw new PulsarAdapterAsyncOperationFailedException(ex);
|
||||
});
|
||||
} catch (final Exception e) {
|
||||
throw new PulsarAdapterUnexpectedException(e);
|
||||
}
|
||||
|
||||
return null;
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright (c) 2022 nosqlbench
|
||||
* Copyright (c) 2022-2023 nosqlbench
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -22,7 +22,6 @@ import com.codahale.metrics.Histogram;
|
||||
import com.codahale.metrics.Timer;
|
||||
import io.nosqlbench.adapter.pulsar.dispensers.PulsarBaseOpDispenser;
|
||||
import io.nosqlbench.api.engine.metrics.ActivityMetrics;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.apache.pulsar.client.api.Consumer;
|
||||
@@ -34,11 +33,9 @@ import java.util.function.Function;
|
||||
|
||||
public class PulsarAdapterMetrics {
|
||||
|
||||
private final static Logger logger = LogManager.getLogger("PulsarAdapterMetrics");
|
||||
private static final Logger logger = LogManager.getLogger("PulsarAdapterMetrics");
|
||||
|
||||
private final PulsarBaseOpDispenser pulsarBaseOpDispenser;
|
||||
private final String defaultAdapterMetricsPrefix;
|
||||
|
||||
/**
|
||||
* Pulsar adapter specific metrics
|
||||
*/
|
||||
@@ -63,76 +60,53 @@ public class PulsarAdapterMetrics {
|
||||
private Timer createTransactionTimer;
|
||||
private Timer commitTransactionTimer;
|
||||
|
||||
public PulsarAdapterMetrics(PulsarBaseOpDispenser pulsarBaseOpDispenser, String defaultMetricsPrefix) {
|
||||
public PulsarAdapterMetrics(final PulsarBaseOpDispenser pulsarBaseOpDispenser) {
|
||||
this.pulsarBaseOpDispenser = pulsarBaseOpDispenser;
|
||||
this.defaultAdapterMetricsPrefix = defaultMetricsPrefix;
|
||||
}
|
||||
|
||||
public void initPulsarAdapterInstrumentation() {
|
||||
// Counter metrics
|
||||
this.msgErrOutOfSeqCounter =
|
||||
ActivityMetrics.counter(
|
||||
pulsarBaseOpDispenser,
|
||||
defaultAdapterMetricsPrefix + "err_msg_oos");
|
||||
this.msgErrLossCounter =
|
||||
ActivityMetrics.counter(
|
||||
pulsarBaseOpDispenser,
|
||||
defaultAdapterMetricsPrefix + "err_msg_loss");
|
||||
this.msgErrDuplicateCounter =
|
||||
ActivityMetrics.counter(
|
||||
pulsarBaseOpDispenser,
|
||||
defaultAdapterMetricsPrefix + "err_msg_dup");
|
||||
msgErrOutOfSeqCounter =
|
||||
ActivityMetrics.counter(this.pulsarBaseOpDispenser,"err_msg_oos");
|
||||
msgErrLossCounter =
|
||||
ActivityMetrics.counter(this.pulsarBaseOpDispenser, "err_msg_loss");
|
||||
msgErrDuplicateCounter =
|
||||
ActivityMetrics.counter(this.pulsarBaseOpDispenser, "err_msg_dup");
|
||||
|
||||
// Histogram metrics
|
||||
this.messageSizeHistogram =
|
||||
ActivityMetrics.histogram(
|
||||
pulsarBaseOpDispenser,
|
||||
defaultAdapterMetricsPrefix + "message_size",
|
||||
ActivityMetrics.DEFAULT_HDRDIGITS);
|
||||
this.e2eMsgProcLatencyHistogram =
|
||||
ActivityMetrics.histogram(
|
||||
pulsarBaseOpDispenser,
|
||||
defaultAdapterMetricsPrefix + "e2e_msg_latency",
|
||||
ActivityMetrics.DEFAULT_HDRDIGITS);
|
||||
this.payloadRttHistogram =
|
||||
ActivityMetrics.histogram(
|
||||
pulsarBaseOpDispenser,
|
||||
defaultAdapterMetricsPrefix + "payload_rtt",
|
||||
ActivityMetrics.DEFAULT_HDRDIGITS);
|
||||
messageSizeHistogram =
|
||||
ActivityMetrics.histogram(this.pulsarBaseOpDispenser,
|
||||
"message_size", ActivityMetrics.DEFAULT_HDRDIGITS);
|
||||
e2eMsgProcLatencyHistogram = ActivityMetrics.histogram(this.pulsarBaseOpDispenser,
|
||||
"e2e_msg_latency", ActivityMetrics.DEFAULT_HDRDIGITS);
|
||||
payloadRttHistogram = ActivityMetrics.histogram(this.pulsarBaseOpDispenser,
|
||||
"payload_rtt", ActivityMetrics.DEFAULT_HDRDIGITS);
|
||||
|
||||
// Timer metrics
|
||||
this.bindTimer =
|
||||
ActivityMetrics.timer(
|
||||
pulsarBaseOpDispenser,
|
||||
defaultAdapterMetricsPrefix + "bind",
|
||||
ActivityMetrics.DEFAULT_HDRDIGITS);
|
||||
this.executeTimer =
|
||||
ActivityMetrics.timer(
|
||||
pulsarBaseOpDispenser,
|
||||
defaultAdapterMetricsPrefix + "execute",
|
||||
ActivityMetrics.DEFAULT_HDRDIGITS);
|
||||
this.createTransactionTimer =
|
||||
ActivityMetrics.timer(
|
||||
pulsarBaseOpDispenser,
|
||||
defaultAdapterMetricsPrefix + "create_transaction",
|
||||
ActivityMetrics.DEFAULT_HDRDIGITS);
|
||||
this.commitTransactionTimer =
|
||||
ActivityMetrics.timer(
|
||||
pulsarBaseOpDispenser,
|
||||
defaultAdapterMetricsPrefix + "commit_transaction",
|
||||
ActivityMetrics.DEFAULT_HDRDIGITS);
|
||||
bindTimer =
|
||||
ActivityMetrics.timer(this.pulsarBaseOpDispenser,
|
||||
"bind", ActivityMetrics.DEFAULT_HDRDIGITS);
|
||||
executeTimer =
|
||||
ActivityMetrics.timer(this.pulsarBaseOpDispenser,
|
||||
"execute", ActivityMetrics.DEFAULT_HDRDIGITS);
|
||||
createTransactionTimer =
|
||||
ActivityMetrics.timer(this.pulsarBaseOpDispenser,
|
||||
"create_transaction", ActivityMetrics.DEFAULT_HDRDIGITS);
|
||||
commitTransactionTimer =
|
||||
ActivityMetrics.timer(this.pulsarBaseOpDispenser,
|
||||
"commit_transaction", ActivityMetrics.DEFAULT_HDRDIGITS);
|
||||
}
|
||||
|
||||
public Counter getMsgErrOutOfSeqCounter() { return this.msgErrOutOfSeqCounter; }
|
||||
public Counter getMsgErrLossCounter() { return this.msgErrLossCounter; }
|
||||
public Counter getMsgErrDuplicateCounter() { return this.msgErrDuplicateCounter; }
|
||||
public Histogram getMessageSizeHistogram() { return this.messageSizeHistogram; }
|
||||
public Histogram getE2eMsgProcLatencyHistogram() { return this.e2eMsgProcLatencyHistogram; }
|
||||
public Histogram getPayloadRttHistogram() { return payloadRttHistogram; }
|
||||
public Timer getBindTimer() { return bindTimer; }
|
||||
public Timer getExecuteTimer() { return executeTimer; }
|
||||
public Timer getCreateTransactionTimer() { return createTransactionTimer; }
|
||||
public Timer getCommitTransactionTimer() { return commitTransactionTimer; }
|
||||
public Counter getMsgErrOutOfSeqCounter() { return msgErrOutOfSeqCounter; }
|
||||
public Counter getMsgErrLossCounter() { return msgErrLossCounter; }
|
||||
public Counter getMsgErrDuplicateCounter() { return msgErrDuplicateCounter; }
|
||||
public Histogram getMessageSizeHistogram() { return messageSizeHistogram; }
|
||||
public Histogram getE2eMsgProcLatencyHistogram() { return e2eMsgProcLatencyHistogram; }
|
||||
public Histogram getPayloadRttHistogram() { return this.payloadRttHistogram; }
|
||||
public Timer getBindTimer() { return this.bindTimer; }
|
||||
public Timer getExecuteTimer() { return this.executeTimer; }
|
||||
public Timer getCreateTransactionTimer() { return this.createTransactionTimer; }
|
||||
public Timer getCommitTransactionTimer() { return this.commitTransactionTimer; }
|
||||
|
||||
|
||||
//////////////////////////////////////
|
||||
@@ -143,7 +117,7 @@ public class PulsarAdapterMetrics {
|
||||
private final Producer<?> producer;
|
||||
private final Function<ProducerStats, Object> valueExtractor;
|
||||
|
||||
ProducerGaugeImpl(Producer<?> producer, Function<ProducerStats, Object> valueExtractor) {
|
||||
ProducerGaugeImpl(final Producer<?> producer, final Function<ProducerStats, Object> valueExtractor) {
|
||||
this.producer = producer;
|
||||
this.valueExtractor = valueExtractor;
|
||||
}
|
||||
@@ -152,33 +126,29 @@ public class PulsarAdapterMetrics {
|
||||
public Object getValue() {
|
||||
// see Pulsar bug https://github.com/apache/pulsar/issues/10100
|
||||
// we need to synchronize on producer otherwise we could receive corrupted data
|
||||
synchronized(producer) {
|
||||
return valueExtractor.apply(producer.getStats());
|
||||
synchronized(this.producer) {
|
||||
return this.valueExtractor.apply(this.producer.getStats());
|
||||
}
|
||||
}
|
||||
}
|
||||
private static Gauge<Object> producerSafeExtractMetric(Producer<?> producer, Function<ProducerStats, Object> valueExtractor) {
|
||||
private static Gauge<Object> producerSafeExtractMetric(final Producer<?> producer, final Function<ProducerStats, Object> valueExtractor) {
|
||||
return new ProducerGaugeImpl(producer, valueExtractor);
|
||||
}
|
||||
|
||||
public void registerProducerApiMetrics(Producer<?> producer, String pulsarApiMetricsPrefix) {
|
||||
String metricsPrefix = defaultAdapterMetricsPrefix;
|
||||
if (!StringUtils.isBlank(pulsarApiMetricsPrefix)) {
|
||||
metricsPrefix = pulsarApiMetricsPrefix;
|
||||
}
|
||||
public void registerProducerApiMetrics(final Producer<?> producer) {
|
||||
|
||||
ActivityMetrics.gauge(pulsarBaseOpDispenser, metricsPrefix + "total_bytes_sent",
|
||||
producerSafeExtractMetric(producer, (s -> s.getTotalBytesSent() + s.getNumBytesSent())));
|
||||
ActivityMetrics.gauge(pulsarBaseOpDispenser, metricsPrefix + "total_msg_sent",
|
||||
producerSafeExtractMetric(producer, (s -> s.getTotalMsgsSent() + s.getNumMsgsSent())));
|
||||
ActivityMetrics.gauge(pulsarBaseOpDispenser, metricsPrefix + "total_send_failed",
|
||||
producerSafeExtractMetric(producer, (s -> s.getTotalSendFailed() + s.getNumSendFailed())));
|
||||
ActivityMetrics.gauge(pulsarBaseOpDispenser, metricsPrefix + "total_ack_received",
|
||||
producerSafeExtractMetric(producer,(s -> s.getTotalAcksReceived() + s.getNumAcksReceived())));
|
||||
ActivityMetrics.gauge(pulsarBaseOpDispenser, metricsPrefix + "send_bytes_rate",
|
||||
producerSafeExtractMetric(producer, ProducerStats::getSendBytesRate));
|
||||
ActivityMetrics.gauge(pulsarBaseOpDispenser, metricsPrefix + "send_msg_rate",
|
||||
producerSafeExtractMetric(producer, ProducerStats::getSendMsgsRate));
|
||||
ActivityMetrics.gauge(this.pulsarBaseOpDispenser, "total_bytes_sent",
|
||||
PulsarAdapterMetrics.producerSafeExtractMetric(producer, s -> s.getTotalBytesSent() + s.getNumBytesSent()));
|
||||
ActivityMetrics.gauge(this.pulsarBaseOpDispenser, "total_msg_sent",
|
||||
PulsarAdapterMetrics.producerSafeExtractMetric(producer, s -> s.getTotalMsgsSent() + s.getNumMsgsSent()));
|
||||
ActivityMetrics.gauge(this.pulsarBaseOpDispenser, "total_send_failed",
|
||||
PulsarAdapterMetrics.producerSafeExtractMetric(producer, s -> s.getTotalSendFailed() + s.getNumSendFailed()));
|
||||
ActivityMetrics.gauge(this.pulsarBaseOpDispenser, "total_ack_received",
|
||||
PulsarAdapterMetrics.producerSafeExtractMetric(producer, s -> s.getTotalAcksReceived() + s.getNumAcksReceived()));
|
||||
ActivityMetrics.gauge(this.pulsarBaseOpDispenser, "send_bytes_rate",
|
||||
PulsarAdapterMetrics.producerSafeExtractMetric(producer, ProducerStats::getSendBytesRate));
|
||||
ActivityMetrics.gauge(this.pulsarBaseOpDispenser, "send_msg_rate",
|
||||
PulsarAdapterMetrics.producerSafeExtractMetric(producer, ProducerStats::getSendMsgsRate));
|
||||
}
|
||||
|
||||
|
||||
@@ -190,7 +160,7 @@ public class PulsarAdapterMetrics {
|
||||
private final Consumer<?> consumer;
|
||||
private final Function<ConsumerStats, Object> valueExtractor;
|
||||
|
||||
ConsumerGaugeImpl(Consumer<?> consumer, Function<ConsumerStats, Object> valueExtractor) {
|
||||
ConsumerGaugeImpl(final Consumer<?> consumer, final Function<ConsumerStats, Object> valueExtractor) {
|
||||
this.consumer = consumer;
|
||||
this.valueExtractor = valueExtractor;
|
||||
}
|
||||
@@ -200,32 +170,28 @@ public class PulsarAdapterMetrics {
|
||||
// see Pulsar bug https://github.com/apache/pulsar/issues/10100
|
||||
// - this is a bug report for producer stats.
|
||||
// - assume this also applies to consumer stats.
|
||||
synchronized(consumer) {
|
||||
return valueExtractor.apply(consumer.getStats());
|
||||
synchronized(this.consumer) {
|
||||
return this.valueExtractor.apply(this.consumer.getStats());
|
||||
}
|
||||
}
|
||||
}
|
||||
static Gauge<Object> consumerSafeExtractMetric(Consumer<?> consumer, Function<ConsumerStats, Object> valueExtractor) {
|
||||
static Gauge<Object> consumerSafeExtractMetric(final Consumer<?> consumer, final Function<ConsumerStats, Object> valueExtractor) {
|
||||
return new ConsumerGaugeImpl(consumer, valueExtractor);
|
||||
}
|
||||
|
||||
public void registerConsumerApiMetrics(Consumer<?> consumer, String pulsarApiMetricsPrefix) {
|
||||
String metricsPrefix = defaultAdapterMetricsPrefix;
|
||||
if (!StringUtils.isBlank(pulsarApiMetricsPrefix)) {
|
||||
metricsPrefix = pulsarApiMetricsPrefix;
|
||||
}
|
||||
public void registerConsumerApiMetrics(final Consumer<?> consumer, final String pulsarApiMetricsPrefix) {
|
||||
|
||||
ActivityMetrics.gauge(pulsarBaseOpDispenser, metricsPrefix + "total_bytes_recv",
|
||||
consumerSafeExtractMetric(consumer, (s -> s.getTotalBytesReceived() + s.getNumBytesReceived())));
|
||||
ActivityMetrics.gauge(pulsarBaseOpDispenser, metricsPrefix + "total_msg_recv",
|
||||
consumerSafeExtractMetric(consumer, (s -> s.getTotalMsgsReceived() + s.getNumMsgsReceived())));
|
||||
ActivityMetrics.gauge(pulsarBaseOpDispenser, metricsPrefix + "total_recv_failed",
|
||||
consumerSafeExtractMetric(consumer, (s -> s.getTotalReceivedFailed() + s.getNumReceiveFailed())));
|
||||
ActivityMetrics.gauge(pulsarBaseOpDispenser, metricsPrefix + "total_acks_sent",
|
||||
consumerSafeExtractMetric(consumer,(s -> s.getTotalAcksSent() + s.getNumAcksSent())));
|
||||
ActivityMetrics.gauge(pulsarBaseOpDispenser, metricsPrefix + "recv_bytes_rate",
|
||||
consumerSafeExtractMetric(consumer, ConsumerStats::getRateBytesReceived));
|
||||
ActivityMetrics.gauge(pulsarBaseOpDispenser, metricsPrefix + "recv_msg_rate",
|
||||
consumerSafeExtractMetric(consumer, ConsumerStats::getRateMsgsReceived));
|
||||
ActivityMetrics.gauge(this.pulsarBaseOpDispenser, "total_bytes_recv",
|
||||
PulsarAdapterMetrics.consumerSafeExtractMetric(consumer, s -> s.getTotalBytesReceived() + s.getNumBytesReceived()));
|
||||
ActivityMetrics.gauge(this.pulsarBaseOpDispenser, "total_msg_recv",
|
||||
PulsarAdapterMetrics.consumerSafeExtractMetric(consumer, s -> s.getTotalMsgsReceived() + s.getNumMsgsReceived()));
|
||||
ActivityMetrics.gauge(this.pulsarBaseOpDispenser, "total_recv_failed",
|
||||
PulsarAdapterMetrics.consumerSafeExtractMetric(consumer, s -> s.getTotalReceivedFailed() + s.getNumReceiveFailed()));
|
||||
ActivityMetrics.gauge(this.pulsarBaseOpDispenser, "total_acks_sent",
|
||||
PulsarAdapterMetrics.consumerSafeExtractMetric(consumer, s -> s.getTotalAcksSent() + s.getNumAcksSent()));
|
||||
ActivityMetrics.gauge(this.pulsarBaseOpDispenser, "recv_bytes_rate",
|
||||
PulsarAdapterMetrics.consumerSafeExtractMetric(consumer, ConsumerStats::getRateBytesReceived));
|
||||
ActivityMetrics.gauge(this.pulsarBaseOpDispenser, "recv_msg_rate",
|
||||
PulsarAdapterMetrics.consumerSafeExtractMetric(consumer, ConsumerStats::getRateMsgsReceived));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright (c) 2022 nosqlbench
|
||||
* Copyright (c) 2022-2023 nosqlbench
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -39,9 +39,10 @@ import java.util.concurrent.ConcurrentHashMap;
|
||||
import java.util.stream.Collectors;
|
||||
import java.util.stream.Stream;
|
||||
|
||||
public class PulsarAdapterUtil {
|
||||
public enum PulsarAdapterUtil {
|
||||
;
|
||||
|
||||
private final static Logger logger = LogManager.getLogger(PulsarAdapterUtil.class);
|
||||
private static final Logger logger = LogManager.getLogger(PulsarAdapterUtil.class);
|
||||
|
||||
public static final String MSG_SEQUENCE_NUMBER = "sequence_number";
|
||||
|
||||
@@ -61,7 +62,7 @@ public class PulsarAdapterUtil {
|
||||
|
||||
public final String label;
|
||||
|
||||
DOC_LEVEL_PARAMS(String label) {
|
||||
DOC_LEVEL_PARAMS(final String label) {
|
||||
this.label = label;
|
||||
}
|
||||
}
|
||||
@@ -75,17 +76,17 @@ public class PulsarAdapterUtil {
|
||||
|
||||
public final String label;
|
||||
|
||||
PULSAR_API_TYPE(String label) {
|
||||
PULSAR_API_TYPE(final String label) {
|
||||
this.label = label;
|
||||
}
|
||||
|
||||
private static final Set<String> LABELS = Stream.of(values()).map(v -> v.label).collect(Collectors.toUnmodifiableSet());
|
||||
private static final Set<String> LABELS = Stream.of(PULSAR_API_TYPE.values()).map(v -> v.label).collect(Collectors.toUnmodifiableSet());
|
||||
|
||||
public static boolean isValidLabel(String label) {
|
||||
return LABELS.contains(label);
|
||||
public static boolean isValidLabel(final String label) {
|
||||
return PULSAR_API_TYPE.LABELS.contains(label);
|
||||
}
|
||||
}
|
||||
public static boolean isValidPulsarApiType(String param) {
|
||||
public static boolean isValidPulsarApiType(final String param) {
|
||||
return PULSAR_API_TYPE.isValidLabel(param);
|
||||
}
|
||||
|
||||
@@ -101,17 +102,17 @@ public class PulsarAdapterUtil {
|
||||
|
||||
public final String label;
|
||||
|
||||
CONF_GATEGORY(String label) {
|
||||
CONF_GATEGORY(final String label) {
|
||||
this.label = label;
|
||||
}
|
||||
|
||||
private static final Set<String> LABELS = Stream.of(values()).map(v -> v.label).collect(Collectors.toUnmodifiableSet());
|
||||
private static final Set<String> LABELS = Stream.of(CONF_GATEGORY.values()).map(v -> v.label).collect(Collectors.toUnmodifiableSet());
|
||||
|
||||
public static boolean isValidLabel(String label) {
|
||||
return LABELS.contains(label);
|
||||
public static boolean isValidLabel(final String label) {
|
||||
return CONF_GATEGORY.LABELS.contains(label);
|
||||
}
|
||||
}
|
||||
public static boolean isValidConfCategory(String item) {
|
||||
public static boolean isValidConfCategory(final String item) {
|
||||
return CONF_GATEGORY.isValidLabel(item);
|
||||
}
|
||||
///////
|
||||
@@ -122,7 +123,7 @@ public class PulsarAdapterUtil {
|
||||
;
|
||||
|
||||
public final String label;
|
||||
PERSISTENT_TYPES(String label) {
|
||||
PERSISTENT_TYPES(final String label) {
|
||||
this.label = label;
|
||||
}
|
||||
}
|
||||
@@ -157,7 +158,7 @@ public class PulsarAdapterUtil {
|
||||
;
|
||||
|
||||
public final String label;
|
||||
CLNT_CONF_KEY(String label) {
|
||||
CLNT_CONF_KEY(final String label) {
|
||||
this.label = label;
|
||||
}
|
||||
}
|
||||
@@ -182,7 +183,7 @@ public class PulsarAdapterUtil {
|
||||
|
||||
public final String label;
|
||||
|
||||
PRODUCER_CONF_STD_KEY(String label) {
|
||||
PRODUCER_CONF_STD_KEY(final String label) {
|
||||
this.label = label;
|
||||
}
|
||||
}
|
||||
@@ -197,11 +198,11 @@ public class PulsarAdapterUtil {
|
||||
|
||||
public final String label;
|
||||
|
||||
COMPRESSION_TYPE(String label) {
|
||||
COMPRESSION_TYPE(final String label) {
|
||||
this.label = label;
|
||||
}
|
||||
|
||||
private final static String TYPE_LIST = Stream.of(COMPRESSION_TYPE.values()).map(t -> t.label).collect(Collectors.joining(", "));
|
||||
private static final String TYPE_LIST = Stream.of(values()).map(t -> t.label).collect(Collectors.joining(", "));
|
||||
}
|
||||
|
||||
public static String getValidCompressionTypeList() {
|
||||
@@ -241,7 +242,7 @@ public class PulsarAdapterUtil {
|
||||
|
||||
public final String label;
|
||||
|
||||
CONSUMER_CONF_STD_KEY(String label) {
|
||||
CONSUMER_CONF_STD_KEY(final String label) {
|
||||
this.label = label;
|
||||
}
|
||||
}
|
||||
@@ -256,18 +257,18 @@ public class PulsarAdapterUtil {
|
||||
|
||||
public final String label;
|
||||
|
||||
CONSUMER_CONF_CUSTOM_KEY(String label) {
|
||||
CONSUMER_CONF_CUSTOM_KEY(final String label) {
|
||||
this.label = label;
|
||||
}
|
||||
|
||||
private static final Set<String> LABELS = Stream.of(values()).map(v -> v.label).collect(Collectors.toUnmodifiableSet());
|
||||
private static final Set<String> LABELS = Stream.of(CONSUMER_CONF_CUSTOM_KEY.values()).map(v -> v.label).collect(Collectors.toUnmodifiableSet());
|
||||
|
||||
public static boolean isValidLabel(String label) {
|
||||
return LABELS.contains(label);
|
||||
public static boolean isValidLabel(final String label) {
|
||||
return CONSUMER_CONF_CUSTOM_KEY.LABELS.contains(label);
|
||||
}
|
||||
|
||||
}
|
||||
public static boolean isCustomConsumerConfItem(String item) {
|
||||
public static boolean isCustomConsumerConfItem(final String item) {
|
||||
return CONSUMER_CONF_CUSTOM_KEY.isValidLabel(item);
|
||||
}
|
||||
|
||||
@@ -280,20 +281,20 @@ public class PulsarAdapterUtil {
|
||||
|
||||
public final String label;
|
||||
|
||||
SUBSCRIPTION_TYPE(String label) {
|
||||
SUBSCRIPTION_TYPE(final String label) {
|
||||
this.label = label;
|
||||
}
|
||||
|
||||
private static final Set<String> LABELS = Stream.of(values()).map(v -> v.label)
|
||||
private static final Set<String> LABELS = Stream.of(SUBSCRIPTION_TYPE.values()).map(v -> v.label)
|
||||
.collect(Collectors.toUnmodifiableSet());
|
||||
|
||||
public static boolean isValidLabel(String label) {
|
||||
return LABELS.contains(label);
|
||||
public static boolean isValidLabel(final String label) {
|
||||
return SUBSCRIPTION_TYPE.LABELS.contains(label);
|
||||
}
|
||||
|
||||
private final static String TYPE_LIST = Stream.of(COMPRESSION_TYPE.values()).map(t -> t.label).collect(Collectors.joining(", "));
|
||||
private static final String TYPE_LIST = Stream.of(COMPRESSION_TYPE.values()).map(t -> t.label).collect(Collectors.joining(", "));
|
||||
}
|
||||
public static boolean isValidSubscriptionType(String item) {
|
||||
public static boolean isValidSubscriptionType(final String item) {
|
||||
return SUBSCRIPTION_TYPE.isValidLabel(item);
|
||||
}
|
||||
public static String getValidSubscriptionTypeList() {
|
||||
@@ -307,11 +308,11 @@ public class PulsarAdapterUtil {
|
||||
|
||||
public final String label;
|
||||
|
||||
SUBSCRIPTION_INITIAL_POSITION(String label) {
|
||||
SUBSCRIPTION_INITIAL_POSITION(final String label) {
|
||||
this.label = label;
|
||||
}
|
||||
|
||||
private final static String TYPE_LIST = Stream.of(COMPRESSION_TYPE.values()).map(t -> t.label).collect(Collectors.joining(", "));
|
||||
private static final String TYPE_LIST = Stream.of(COMPRESSION_TYPE.values()).map(t -> t.label).collect(Collectors.joining(", "));
|
||||
|
||||
}
|
||||
public static String getValidSubscriptionInitialPositionList() {
|
||||
@@ -326,11 +327,11 @@ public class PulsarAdapterUtil {
|
||||
|
||||
public final String label;
|
||||
|
||||
REGEX_SUBSCRIPTION_MODE(String label) {
|
||||
REGEX_SUBSCRIPTION_MODE(final String label) {
|
||||
this.label = label;
|
||||
}
|
||||
|
||||
private final static String TYPE_LIST = Stream.of(COMPRESSION_TYPE.values()).map(t -> t.label).collect(Collectors.joining(", "));
|
||||
private static final String TYPE_LIST = Stream.of(COMPRESSION_TYPE.values()).map(t -> t.label).collect(Collectors.joining(", "));
|
||||
}
|
||||
|
||||
public static String getValidRegexSubscriptionModeList() {
|
||||
@@ -353,7 +354,7 @@ public class PulsarAdapterUtil {
|
||||
|
||||
public final String label;
|
||||
|
||||
READER_CONF_STD_KEY(String label) {
|
||||
READER_CONF_STD_KEY(final String label) {
|
||||
this.label = label;
|
||||
}
|
||||
}
|
||||
@@ -367,7 +368,7 @@ public class PulsarAdapterUtil {
|
||||
|
||||
public final String label;
|
||||
|
||||
READER_CONF_CUSTOM_KEY(String label) {
|
||||
READER_CONF_CUSTOM_KEY(final String label) {
|
||||
this.label = label;
|
||||
}
|
||||
}
|
||||
@@ -380,18 +381,18 @@ public class PulsarAdapterUtil {
|
||||
|
||||
public final String label;
|
||||
|
||||
READER_MSG_POSITION_TYPE(String label) {
|
||||
READER_MSG_POSITION_TYPE(final String label) {
|
||||
this.label = label;
|
||||
}
|
||||
|
||||
private static final Set<String> LABELS = Stream.of(values()).map(v -> v.label)
|
||||
private static final Set<String> LABELS = Stream.of(READER_MSG_POSITION_TYPE.values()).map(v -> v.label)
|
||||
.collect(Collectors.toUnmodifiableSet());
|
||||
|
||||
public static boolean isValidLabel(String label) {
|
||||
return LABELS.contains(label);
|
||||
public static boolean isValidLabel(final String label) {
|
||||
return READER_MSG_POSITION_TYPE.LABELS.contains(label);
|
||||
}
|
||||
}
|
||||
public static boolean isValideReaderStartPosition(String item) {
|
||||
public static boolean isValideReaderStartPosition(final String item) {
|
||||
return READER_MSG_POSITION_TYPE.isValidLabel(item);
|
||||
}
|
||||
|
||||
@@ -402,53 +403,48 @@ public class PulsarAdapterUtil {
|
||||
|
||||
///////
|
||||
// Primitive Schema type
|
||||
public static boolean isPrimitiveSchemaTypeStr(String typeStr) {
|
||||
return StringUtils.isBlank(typeStr) || PRIMITIVE_SCHEMA_TYPE_MAPPING.containsKey(typeStr.toUpperCase());
|
||||
public static boolean isPrimitiveSchemaTypeStr(final String typeStr) {
|
||||
return StringUtils.isBlank(typeStr) || PulsarAdapterUtil.PRIMITIVE_SCHEMA_TYPE_MAPPING.containsKey(typeStr.toUpperCase());
|
||||
}
|
||||
|
||||
public static Schema<?> getPrimitiveTypeSchema(String typeStr) {
|
||||
String lookupKey = StringUtils.isBlank(typeStr) ? "BYTES" : typeStr.toUpperCase();
|
||||
Schema<?> schema = PRIMITIVE_SCHEMA_TYPE_MAPPING.get(lookupKey);
|
||||
if (schema == null) {
|
||||
public static Schema<?> getPrimitiveTypeSchema(final String typeStr) {
|
||||
final String lookupKey = StringUtils.isBlank(typeStr) ? "BYTES" : typeStr.toUpperCase();
|
||||
final Schema<?> schema = PulsarAdapterUtil.PRIMITIVE_SCHEMA_TYPE_MAPPING.get(lookupKey);
|
||||
if (null == schema)
|
||||
throw new PulsarAdapterInvalidParamException("Invalid Pulsar primitive schema type string : " + typeStr);
|
||||
}
|
||||
return schema;
|
||||
}
|
||||
|
||||
///////
|
||||
// Complex strut type: Avro or Json
|
||||
public static boolean isAvroSchemaTypeStr(String typeStr) {
|
||||
public static boolean isAvroSchemaTypeStr(final String typeStr) {
|
||||
return "AVRO".equalsIgnoreCase(typeStr);
|
||||
}
|
||||
|
||||
// automatic decode the type from the Registry
|
||||
public static boolean isAutoConsumeSchemaTypeStr(String typeStr) {
|
||||
public static boolean isAutoConsumeSchemaTypeStr(final String typeStr) {
|
||||
return "AUTO_CONSUME".equalsIgnoreCase(typeStr);
|
||||
}
|
||||
|
||||
private static final Map<String, Schema<?>> AVRO_SCHEMA_CACHE = new ConcurrentHashMap<>();
|
||||
|
||||
public static Schema<?> getAvroSchema(String typeStr, final String definitionStr) {
|
||||
public static Schema<?> getAvroSchema(final String typeStr, String definitionStr) {
|
||||
// Check if payloadStr points to a file (e.g. "file:///path/to/a/file")
|
||||
if (isAvroSchemaTypeStr(typeStr)) {
|
||||
if (StringUtils.isBlank(definitionStr)) {
|
||||
if (PulsarAdapterUtil.isAvroSchemaTypeStr(typeStr)) {
|
||||
if (StringUtils.isBlank(definitionStr))
|
||||
throw new PulsarAdapterInvalidParamException("Schema definition must be provided for \"Avro\" schema type!");
|
||||
}
|
||||
return AVRO_SCHEMA_CACHE.computeIfAbsent(definitionStr, __ -> {
|
||||
return PulsarAdapterUtil.AVRO_SCHEMA_CACHE.computeIfAbsent(definitionStr, __ -> {
|
||||
String schemaDefinitionStr = definitionStr;
|
||||
if (schemaDefinitionStr.startsWith("file://")) {
|
||||
try {
|
||||
Path filePath = Paths.get(URI.create(schemaDefinitionStr));
|
||||
schemaDefinitionStr = Files.readString(filePath, StandardCharsets.UTF_8);
|
||||
} catch (IOException ioe) {
|
||||
throw new PulsarAdapterUnexpectedException("Error reading the specified \"Avro\" schema definition file: " + definitionStr + ": " + ioe.getMessage());
|
||||
}
|
||||
if (schemaDefinitionStr.startsWith("file://")) try {
|
||||
final Path filePath = Paths.get(URI.create(schemaDefinitionStr));
|
||||
schemaDefinitionStr = Files.readString(filePath, StandardCharsets.UTF_8);
|
||||
} catch (final IOException ioe) {
|
||||
throw new PulsarAdapterUnexpectedException("Error reading the specified \"Avro\" schema definition file: " + definitionStr + ": " + ioe.getMessage());
|
||||
}
|
||||
return PulsarAvroSchemaUtil.GetSchema_PulsarAvro("NBAvro", schemaDefinitionStr);
|
||||
});
|
||||
} else {
|
||||
throw new PulsarAdapterInvalidParamException("Trying to create a \"Avro\" schema for a non-Avro schema type string: " + typeStr);
|
||||
}
|
||||
throw new PulsarAdapterInvalidParamException("Trying to create a \"Avro\" schema for a non-Avro schema type string: " + typeStr);
|
||||
}
|
||||
|
||||
///////
|
||||
@@ -456,20 +452,20 @@ public class PulsarAdapterUtil {
|
||||
private static final ObjectMapper JACKSON_OBJECT_MAPPER = new ObjectMapper();
|
||||
private static final TypeReference<Map<String, String>> MAP_TYPE_REF = new TypeReference<>() {};
|
||||
|
||||
public static Map<String, String> convertJsonToMap(String jsonStr) throws IOException {
|
||||
return JACKSON_OBJECT_MAPPER.readValue(jsonStr, MAP_TYPE_REF);
|
||||
public static Map<String, String> convertJsonToMap(final String jsonStr) throws IOException {
|
||||
return PulsarAdapterUtil.JACKSON_OBJECT_MAPPER.readValue(jsonStr, PulsarAdapterUtil.MAP_TYPE_REF);
|
||||
}
|
||||
|
||||
|
||||
///////
|
||||
// Get full namespace name (<tenant>/<namespace>) from a Pulsar topic URI
|
||||
public static String getFullNamespaceName(String topicUri) {
|
||||
public static String getFullNamespaceName(final String topicUri) {
|
||||
// Get tenant/namespace string
|
||||
// - topicUri : persistent://<tenant>/<namespace>/<topic>
|
||||
// - tmpStr : <tenant>/<namespace>/<topic>
|
||||
// - fullNsName : <tenant>/<namespace>
|
||||
|
||||
String tmpStr = StringUtils.substringAfter(topicUri,"://");
|
||||
final String tmpStr = StringUtils.substringAfter(topicUri,"://");
|
||||
return StringUtils.substringBeforeLast(tmpStr, "/");
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,4 +1,20 @@
|
||||
#!/usr/local/bin/bash
|
||||
#
|
||||
# Copyright (c) 2023 nosqlbench
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
#
|
||||
|
||||
: "${SKIP_TESTS:=1}"
|
||||
(
|
||||
cd "$(git rev-parse --show-toplevel)" && \
|
||||
|
||||
@@ -1,4 +1,20 @@
|
||||
#!/usr/local/bin/bash
|
||||
#
|
||||
# Copyright (c) 2023 nosqlbench
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
#
|
||||
|
||||
: "${REBUILD:=1}"
|
||||
: "${CYCLES:=1000000000}"
|
||||
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" &>/dev/null && pwd)"
|
||||
|
||||
@@ -1,4 +1,20 @@
|
||||
#!/usr/local/bin/bash
|
||||
#
|
||||
# Copyright (c) 2023 nosqlbench
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
#
|
||||
|
||||
: "${REBUILD:=1}"
|
||||
: "${CYCLES:=1000000000}"
|
||||
: "${CYCLERATE:=100}"
|
||||
|
||||
@@ -1,7 +1,5 @@
|
||||
package io.nosqlbench.adapter.s4j.dispensers;
|
||||
|
||||
/*
|
||||
* Copyright (c) 2022 nosqlbench
|
||||
* Copyright (c) 2023 nosqlbench
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -16,6 +14,7 @@ package io.nosqlbench.adapter.s4j.dispensers;
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package io.nosqlbench.adapter.s4j.dispensers;
|
||||
|
||||
import io.nosqlbench.adapter.s4j.S4JSpace;
|
||||
import io.nosqlbench.adapter.s4j.ops.S4JOp;
|
||||
@@ -38,7 +37,7 @@ import java.util.stream.Collectors;
|
||||
|
||||
public abstract class S4JBaseOpDispenser extends BaseOpDispenser<S4JOp, S4JSpace> {
|
||||
|
||||
private final static Logger logger = LogManager.getLogger("PulsarBaseOpDispenser");
|
||||
private static final Logger logger = LogManager.getLogger("PulsarBaseOpDispenser");
|
||||
|
||||
protected final ParsedOp parsedOp;
|
||||
protected final S4JSpace s4jSpace;
|
||||
@@ -65,10 +64,10 @@ public abstract class S4JBaseOpDispenser extends BaseOpDispenser<S4JOp, S4JSpac
|
||||
protected final int totalThreadNum;
|
||||
protected final long totalCycleNum;
|
||||
|
||||
public S4JBaseOpDispenser(DriverAdapter adapter,
|
||||
ParsedOp op,
|
||||
LongFunction<String> destNameStrFunc,
|
||||
S4JSpace s4jSpace) {
|
||||
protected S4JBaseOpDispenser(DriverAdapter adapter,
|
||||
ParsedOp op,
|
||||
LongFunction<String> destNameStrFunc,
|
||||
S4JSpace s4jSpace) {
|
||||
|
||||
super(adapter, op);
|
||||
|
||||
@@ -77,7 +76,7 @@ public abstract class S4JBaseOpDispenser extends BaseOpDispenser<S4JOp, S4JSpac
|
||||
this.connLvlJmsContexts.putAll(s4jSpace.getConnLvlJmsContexts());
|
||||
this.sessionLvlJmsContexts.putAll(s4jSpace.getSessionLvlJmsContexts());
|
||||
|
||||
String defaultMetricsPrefix = getDefaultMetricsPrefix(this.parsedOp);
|
||||
String defaultMetricsPrefix = parsedOp.getLabels().linearize("activity");
|
||||
this.s4jAdapterMetrics = new S4JAdapterMetrics(defaultMetricsPrefix);
|
||||
s4jAdapterMetrics.initS4JAdapterInstrumentation();
|
||||
|
||||
@@ -101,7 +100,7 @@ public abstract class S4JBaseOpDispenser extends BaseOpDispenser<S4JOp, S4JSpac
|
||||
|
||||
protected LongFunction<Boolean> lookupStaticBoolConfigValueFunc(String paramName, boolean defaultValue) {
|
||||
LongFunction<Boolean> booleanLongFunction;
|
||||
booleanLongFunction = (l) -> parsedOp.getOptionalStaticConfig(paramName, String.class)
|
||||
booleanLongFunction = l -> parsedOp.getOptionalStaticConfig(paramName, String.class)
|
||||
.filter(Predicate.not(String::isEmpty))
|
||||
.map(value -> BooleanUtils.toBoolean(value))
|
||||
.orElse(defaultValue);
|
||||
@@ -111,7 +110,7 @@ public abstract class S4JBaseOpDispenser extends BaseOpDispenser<S4JOp, S4JSpac
|
||||
|
||||
protected LongFunction<Set<String>> lookupStaticStrSetOpValueFunc(String paramName) {
|
||||
LongFunction<Set<String>> setStringLongFunction;
|
||||
setStringLongFunction = (l) -> parsedOp.getOptionalStaticValue(paramName, String.class)
|
||||
setStringLongFunction = l -> parsedOp.getOptionalStaticValue(paramName, String.class)
|
||||
.filter(Predicate.not(String::isEmpty))
|
||||
.map(value -> {
|
||||
Set<String > set = new HashSet<>();
|
||||
@@ -132,12 +131,12 @@ public abstract class S4JBaseOpDispenser extends BaseOpDispenser<S4JOp, S4JSpac
|
||||
// If the corresponding Op parameter is not provided, use the specified default value
|
||||
protected LongFunction<Integer> lookupStaticIntOpValueFunc(String paramName, int defaultValue) {
|
||||
LongFunction<Integer> integerLongFunction;
|
||||
integerLongFunction = (l) -> parsedOp.getOptionalStaticValue(paramName, String.class)
|
||||
integerLongFunction = l -> parsedOp.getOptionalStaticValue(paramName, String.class)
|
||||
.filter(Predicate.not(String::isEmpty))
|
||||
.map(value -> NumberUtils.toInt(value))
|
||||
.map(value -> {
|
||||
if (value < 0) return 0;
|
||||
else return value;
|
||||
if (0 > value) return 0;
|
||||
return value;
|
||||
}).orElse(defaultValue);
|
||||
logger.info("{}: {}", paramName, integerLongFunction.apply(0));
|
||||
return integerLongFunction;
|
||||
@@ -147,7 +146,7 @@ public abstract class S4JBaseOpDispenser extends BaseOpDispenser<S4JOp, S4JSpac
|
||||
protected LongFunction<String> lookupOptionalStrOpValueFunc(String paramName, String defaultValue) {
|
||||
LongFunction<String> stringLongFunction;
|
||||
stringLongFunction = parsedOp.getAsOptionalFunction(paramName, String.class)
|
||||
.orElse((l) -> defaultValue);
|
||||
.orElse(l -> defaultValue);
|
||||
logger.info("{}: {}", paramName, stringLongFunction.apply(0));
|
||||
|
||||
return stringLongFunction;
|
||||
@@ -182,10 +181,10 @@ public abstract class S4JBaseOpDispenser extends BaseOpDispenser<S4JOp, S4JSpac
|
||||
String destinationCacheKey = S4JAdapterUtil.buildCacheKey(jmsContextIdStr, destType, destName);
|
||||
Destination destination = jmsDestinations.get(destinationCacheKey);
|
||||
|
||||
if (destination == null) {
|
||||
if (null == destination) {
|
||||
if (StringUtils.equalsIgnoreCase(destType, S4JAdapterUtil.JMS_DEST_TYPES.QUEUE.label)) {
|
||||
destination = jmsContext.createQueue(destName);
|
||||
} else {
|
||||
} else {
|
||||
destination = jmsContext.createTopic(destName);
|
||||
}
|
||||
|
||||
@@ -195,23 +194,19 @@ public abstract class S4JBaseOpDispenser extends BaseOpDispenser<S4JOp, S4JSpac
|
||||
return destination;
|
||||
}
|
||||
// Temporary destination
|
||||
else {
|
||||
if (StringUtils.equalsIgnoreCase(destType, S4JAdapterUtil.JMS_DEST_TYPES.QUEUE.label)) {
|
||||
return jmsContext.createTemporaryQueue();
|
||||
} else {
|
||||
return jmsContext.createTemporaryTopic();
|
||||
}
|
||||
|
||||
if (StringUtils.equalsIgnoreCase(destType, S4JAdapterUtil.JMS_DEST_TYPES.QUEUE.label)) {
|
||||
return jmsContext.createTemporaryQueue();
|
||||
}
|
||||
return jmsContext.createTemporaryTopic();
|
||||
}
|
||||
|
||||
// Get simplified NB thread name
|
||||
private String getSimplifiedNBThreadName(String fullThreadName) {
|
||||
assert (StringUtils.isNotBlank(fullThreadName));
|
||||
assert StringUtils.isNotBlank(fullThreadName);
|
||||
|
||||
if (StringUtils.contains(fullThreadName, '/'))
|
||||
return StringUtils.substringAfterLast(fullThreadName, "/");
|
||||
else
|
||||
return fullThreadName;
|
||||
if (StringUtils.contains(fullThreadName, '/')) return StringUtils.substringAfterLast(fullThreadName, "/");
|
||||
return fullThreadName;
|
||||
}
|
||||
|
||||
|
||||
@@ -227,7 +222,7 @@ public abstract class S4JBaseOpDispenser extends BaseOpDispenser<S4JOp, S4JSpac
|
||||
getSimplifiedNBThreadName(Thread.currentThread().getName()), "producer");
|
||||
JMSProducer jmsProducer = jmsProducers.get(producerCacheKey);
|
||||
|
||||
if (jmsProducer == null) {
|
||||
if (null == jmsProducer) {
|
||||
jmsProducer = jmsContext.createProducer();
|
||||
|
||||
if (asyncApi) {
|
||||
@@ -267,7 +262,7 @@ public abstract class S4JBaseOpDispenser extends BaseOpDispenser<S4JOp, S4JSpac
|
||||
getSimplifiedNBThreadName(Thread.currentThread().getName()), "consumer");
|
||||
|
||||
JMSConsumer jmsConsumer = jmsConsumers.get(consumerCacheKey);
|
||||
if (jmsConsumer == null) {
|
||||
if (null == jmsConsumer) {
|
||||
if (isTopic) {
|
||||
if (!durable && !shared)
|
||||
jmsConsumer = jmsContext.createConsumer(destination, msgSelector, nonLocal);
|
||||
@@ -276,16 +271,13 @@ public abstract class S4JBaseOpDispenser extends BaseOpDispenser<S4JOp, S4JSpac
|
||||
throw new RuntimeException("Subscription name is required for receiving messages from a durable or shared topic!");
|
||||
}
|
||||
|
||||
if (durable && !shared)
|
||||
jmsConsumer = jmsContext.createDurableConsumer(
|
||||
(Topic) destination, subName, msgSelector, nonLocal);
|
||||
if (durable && !shared) jmsConsumer = jmsContext.createDurableConsumer(
|
||||
(Topic) destination, subName, msgSelector, nonLocal);
|
||||
else if (!durable)
|
||||
jmsConsumer = jmsContext.createSharedConsumer((Topic) destination, subName, msgSelector);
|
||||
else
|
||||
jmsConsumer = jmsContext.createSharedDurableConsumer((Topic) destination, subName, msgSelector);
|
||||
else jmsConsumer = jmsContext.createSharedDurableConsumer((Topic) destination, subName, msgSelector);
|
||||
}
|
||||
}
|
||||
else {
|
||||
} else {
|
||||
jmsConsumer = jmsContext.createConsumer(destination, msgSelector, nonLocal);
|
||||
}
|
||||
|
||||
@@ -309,12 +301,12 @@ public abstract class S4JBaseOpDispenser extends BaseOpDispenser<S4JOp, S4JSpac
|
||||
// Whether to commit the transaction which happens when:
|
||||
// - session mode is equal to "SESSION_TRANSACTED"
|
||||
// - "txn_batch_num" has been reached since last reset
|
||||
boolean commitTransaction = ( (Session.SESSION_TRANSACTED == jmsSessionMode) && (txnBatchNum > 0) );
|
||||
boolean commitTransaction = (Session.SESSION_TRANSACTED == jmsSessionMode) && (0 < txnBatchNum);
|
||||
if (commitTransaction) {
|
||||
int txnBatchTackingCnt = s4jSpace.getTxnBatchTrackingCnt();
|
||||
|
||||
if ( ( (txnBatchTackingCnt > 0) && ((txnBatchTackingCnt % txnBatchNum) == 0) ) ||
|
||||
( curCycleNum >= (totalCycleNum - 1) ) ) {
|
||||
if (((0 < txnBatchTackingCnt) && (0 == (txnBatchTackingCnt % txnBatchNum))) ||
|
||||
(curCycleNum >= (totalCycleNum - 1))) {
|
||||
if (logger.isDebugEnabled()) {
|
||||
logger.debug("Commit transaction ({}, {}, {})",
|
||||
txnBatchTackingCnt,
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright (c) 2022 nosqlbench
|
||||
* Copyright (c) 2022-2023 nosqlbench
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -15,16 +15,18 @@
|
||||
*/
|
||||
|
||||
package io.nosqlbench.adapter.s4j.util;
|
||||
|
||||
import com.codahale.metrics.Histogram;
|
||||
import com.codahale.metrics.Timer;
|
||||
import io.nosqlbench.api.config.NBNamedElement;
|
||||
import io.nosqlbench.api.config.NBLabeledElement;
|
||||
import io.nosqlbench.api.config.NBLabels;
|
||||
import io.nosqlbench.api.engine.metrics.ActivityMetrics;
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
|
||||
public class S4JAdapterMetrics implements NBNamedElement {
|
||||
public class S4JAdapterMetrics implements NBLabeledElement {
|
||||
|
||||
private final static Logger logger = LogManager.getLogger("S4JAdapterMetrics");
|
||||
private static final Logger logger = LogManager.getLogger("S4JAdapterMetrics");
|
||||
|
||||
private final String defaultAdapterMetricsPrefix;
|
||||
|
||||
@@ -36,7 +38,6 @@ public class S4JAdapterMetrics implements NBNamedElement {
|
||||
this.defaultAdapterMetricsPrefix = defaultMetricsPrefix;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getName() {
|
||||
return "S4JAdapterMetrics";
|
||||
}
|
||||
@@ -65,4 +66,9 @@ public class S4JAdapterMetrics implements NBNamedElement {
|
||||
public Timer getBindTimer() { return bindTimer; }
|
||||
public Timer getExecuteTimer() { return executeTimer; }
|
||||
public Histogram getMessagesizeHistogram() { return messageSizeHistogram; }
|
||||
|
||||
@Override
|
||||
public NBLabels getLabels() {
|
||||
return NBLabels.forKV("name", getName());
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright (c) 2022 nosqlbench
|
||||
* Copyright (c) 2022-2023 nosqlbench
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -23,23 +23,24 @@ import io.nosqlbench.api.config.standard.Param;
|
||||
import io.nosqlbench.api.engine.util.SSLKsFactory;
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import java.net.ServerSocket;
|
||||
|
||||
import javax.net.ServerSocketFactory;
|
||||
import javax.net.ssl.SSLServerSocketFactory;
|
||||
import java.io.IOException;
|
||||
import java.io.Writer;
|
||||
import java.net.InetAddress;
|
||||
import java.net.Socket;
|
||||
import java.net.SocketTimeoutException;
|
||||
import java.io.OutputStream;
|
||||
import java.io.OutputStreamWriter;
|
||||
import java.io.Writer;
|
||||
import java.net.InetAddress;
|
||||
import java.net.ServerSocket;
|
||||
import java.net.Socket;
|
||||
import java.net.SocketTimeoutException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.concurrent.BlockingQueue;
|
||||
import java.util.concurrent.LinkedBlockingQueue;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
import javax.net.ServerSocketFactory;
|
||||
import javax.net.ssl.SSLServerSocketFactory;
|
||||
|
||||
public class TcpServerAdapterSpace implements AutoCloseable{
|
||||
public class TcpServerAdapterSpace implements AutoCloseable {
|
||||
|
||||
|
||||
private final static Logger logger = LogManager.getLogger(TcpServerAdapterSpace.class);
|
||||
@@ -187,7 +188,7 @@ public class TcpServerAdapterSpace implements AutoCloseable{
|
||||
} catch (IOException e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
try (Writer runWriter = new OutputStreamWriter(outputStream);) {
|
||||
try (Writer runWriter = new OutputStreamWriter(outputStream)) {
|
||||
while (running ) {
|
||||
if(!sourceQueue.isEmpty()) {
|
||||
try {
|
||||
|
||||
@@ -57,7 +57,7 @@
|
||||
<dependency>
|
||||
<groupId>com.databricks</groupId>
|
||||
<artifactId>sjsonnet_2.13</artifactId>
|
||||
<version>0.4.3</version>
|
||||
<version>0.4.4</version>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
|
||||
@@ -18,6 +18,8 @@ package io.nosqlbench.engine.api.activityimpl;
|
||||
|
||||
import com.codahale.metrics.Histogram;
|
||||
import com.codahale.metrics.Timer;
|
||||
import io.nosqlbench.api.config.NBLabeledElement;
|
||||
import io.nosqlbench.api.config.NBLabels;
|
||||
import io.nosqlbench.api.engine.metrics.ActivityMetrics;
|
||||
import io.nosqlbench.api.errors.MVELCompilationError;
|
||||
import io.nosqlbench.engine.api.activityimpl.uniform.DriverAdapter;
|
||||
@@ -30,7 +32,7 @@ import java.io.Serializable;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
|
||||
/**
|
||||
* {@inheritDoc}
|
||||
*
|
||||
* See {@link OpDispenser} for details on how to use this type.
|
||||
* <p>
|
||||
* Some details are tracked per op template, which aligns to the life-cycle of the op dispenser.
|
||||
@@ -38,11 +40,12 @@ import java.util.concurrent.TimeUnit;
|
||||
*
|
||||
* @param <T> The type of operation
|
||||
*/
|
||||
public abstract class BaseOpDispenser<T extends Op, S> implements OpDispenser<T> {
|
||||
public abstract class BaseOpDispenser<T extends Op, S> implements OpDispenser<T>, NBLabeledElement {
|
||||
|
||||
private final String opName;
|
||||
private Serializable expectedResultExpression;
|
||||
protected final DriverAdapter<T, S> adapter;
|
||||
private final NBLabels labels;
|
||||
private boolean instrument;
|
||||
private Histogram resultSizeHistogram;
|
||||
private Timer successTimer;
|
||||
@@ -50,24 +53,23 @@ public abstract class BaseOpDispenser<T extends Op, S> implements OpDispenser<T>
|
||||
private final String[] timerStarts;
|
||||
private final String[] timerStops;
|
||||
|
||||
protected BaseOpDispenser(DriverAdapter<T, S> adapter, ParsedOp op) {
|
||||
this.opName = op.getName();
|
||||
protected BaseOpDispenser(final DriverAdapter<T, S> adapter, final ParsedOp op) {
|
||||
opName = op.getName();
|
||||
this.adapter = adapter;
|
||||
timerStarts = op.takeOptionalStaticValue("start-timers", String.class)
|
||||
labels = op.getLabels();
|
||||
|
||||
this.timerStarts = op.takeOptionalStaticValue("start-timers", String.class)
|
||||
.map(s -> s.split(", *"))
|
||||
.orElse(null);
|
||||
|
||||
timerStops = op.takeOptionalStaticValue("stop-timers", String.class)
|
||||
this.timerStops = op.takeOptionalStaticValue("stop-timers", String.class)
|
||||
.map(s -> s.split(", *"))
|
||||
.orElse(null);
|
||||
|
||||
if (timerStarts != null) {
|
||||
for (String timerStart : timerStarts) {
|
||||
ThreadLocalNamedTimers.addTimer(op, timerStart);
|
||||
}
|
||||
}
|
||||
configureInstrumentation(op);
|
||||
configureResultExpectations(op);
|
||||
if (null != timerStarts)
|
||||
for (final String timerStart : this.timerStarts) ThreadLocalNamedTimers.addTimer(op, timerStart);
|
||||
this.configureInstrumentation(op);
|
||||
this.configureResultExpectations(op);
|
||||
}
|
||||
|
||||
public Serializable getExpectedResultExpression() {
|
||||
@@ -91,55 +93,47 @@ public abstract class BaseOpDispenser<T extends Op, S> implements OpDispenser<T>
|
||||
}
|
||||
|
||||
String getOpName() {
|
||||
return opName;
|
||||
return this.opName;
|
||||
}
|
||||
|
||||
public DriverAdapter<T, S> getAdapter() {
|
||||
return adapter;
|
||||
return this.adapter;
|
||||
}
|
||||
|
||||
protected String getDefaultMetricsPrefix(ParsedOp pop) {
|
||||
return pop.getStaticConfigOr("alias", "UNKNOWN") + "-" + pop.getName() + "--";
|
||||
}
|
||||
|
||||
private void configureInstrumentation(ParsedOp pop) {
|
||||
this.instrument = pop.takeStaticConfigOr("instrument", false);
|
||||
if (instrument) {
|
||||
this.successTimer = ActivityMetrics.timer(getDefaultMetricsPrefix(pop) + "success");
|
||||
this.errorTimer = ActivityMetrics.timer(getDefaultMetricsPrefix(pop) + "error");
|
||||
this.resultSizeHistogram = ActivityMetrics.histogram(getDefaultMetricsPrefix(pop) + "resultset-size");
|
||||
private void configureInstrumentation(final ParsedOp pop) {
|
||||
instrument = pop.takeStaticConfigOr("instrument", false);
|
||||
if (this.instrument) {
|
||||
final int hdrDigits = pop.getStaticConfigOr("hdr_digits", 4).intValue();
|
||||
successTimer = ActivityMetrics.timer(pop, "success",hdrDigits);
|
||||
errorTimer = ActivityMetrics.timer(pop, "error", hdrDigits);
|
||||
resultSizeHistogram = ActivityMetrics.histogram(pop, "resultset-size", hdrDigits);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onStart(long cycleValue) {
|
||||
if (timerStarts != null) {
|
||||
ThreadLocalNamedTimers.TL_INSTANCE.get().start(timerStarts);
|
||||
}
|
||||
public void onStart(final long cycleValue) {
|
||||
if (null != timerStarts) ThreadLocalNamedTimers.TL_INSTANCE.get().start(this.timerStarts);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onSuccess(long cycleValue, long nanoTime, long resultSize) {
|
||||
if (instrument) {
|
||||
successTimer.update(nanoTime, TimeUnit.NANOSECONDS);
|
||||
if (resultSize > -1) {
|
||||
resultSizeHistogram.update(resultSize);
|
||||
}
|
||||
}
|
||||
if (timerStops != null) {
|
||||
ThreadLocalNamedTimers.TL_INSTANCE.get().stop(timerStops);
|
||||
public void onSuccess(final long cycleValue, final long nanoTime, final long resultSize) {
|
||||
if (this.instrument) {
|
||||
this.successTimer.update(nanoTime, TimeUnit.NANOSECONDS);
|
||||
if (-1 < resultSize) this.resultSizeHistogram.update(resultSize);
|
||||
}
|
||||
if (null != timerStops) ThreadLocalNamedTimers.TL_INSTANCE.get().stop(this.timerStops);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onError(long cycleValue, long resultNanos, Throwable t) {
|
||||
public void onError(final long cycleValue, final long resultNanos, final Throwable t) {
|
||||
|
||||
if (instrument) {
|
||||
errorTimer.update(resultNanos, TimeUnit.NANOSECONDS);
|
||||
}
|
||||
if (timerStops != null) {
|
||||
ThreadLocalNamedTimers.TL_INSTANCE.get().stop(timerStops);
|
||||
}
|
||||
if (this.instrument) this.errorTimer.update(resultNanos, TimeUnit.NANOSECONDS);
|
||||
if (null != timerStops) ThreadLocalNamedTimers.TL_INSTANCE.get().stop(this.timerStops);
|
||||
}
|
||||
|
||||
@Override
|
||||
public NBLabels getLabels() {
|
||||
return this.labels;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@@ -28,6 +28,7 @@ package io.nosqlbench.engine.api.activityimpl.uniform.flowtypes;
|
||||
* hand down the chain is more costly, so implementing this interface allows the runtime
|
||||
* to be more optimized.</li>
|
||||
* <li>{@link ChainingOp}</li>
|
||||
* <li>{@link RunnableOp}</li>
|
||||
* </ul>
|
||||
* </p>
|
||||
*/
|
||||
|
||||
@@ -1,24 +1,23 @@
|
||||
package io.nosqlbench.engine.api.metrics;
|
||||
|
||||
/*
|
||||
* Copyright (c) 2022 nosqlbench
|
||||
* Copyright (c) 2023 nosqlbench
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package io.nosqlbench.engine.api.metrics;
|
||||
|
||||
import java.util.Map;
|
||||
import java.util.Map.Entry;
|
||||
import java.util.Optional;
|
||||
import java.util.stream.Collectors;
|
||||
import java.util.stream.Stream;
|
||||
@@ -33,11 +32,11 @@ public class EndToEndMetricsAdapterUtil {
|
||||
|
||||
public final String label;
|
||||
|
||||
MSG_SEQ_ERROR_SIMU_TYPE(String label) {
|
||||
MSG_SEQ_ERROR_SIMU_TYPE(final String label) {
|
||||
this.label = label;
|
||||
}
|
||||
|
||||
private static final Map<String, MSG_SEQ_ERROR_SIMU_TYPE> MAPPING = Stream.of(values())
|
||||
private static final Map<String, MSG_SEQ_ERROR_SIMU_TYPE> MAPPING = Stream.of(MSG_SEQ_ERROR_SIMU_TYPE.values())
|
||||
.flatMap(simuType ->
|
||||
Stream.of(simuType.label,
|
||||
simuType.label.toLowerCase(),
|
||||
@@ -46,10 +45,10 @@ public class EndToEndMetricsAdapterUtil {
|
||||
simuType.name().toLowerCase(),
|
||||
simuType.name().toUpperCase())
|
||||
.distinct().map(key -> Map.entry(key, simuType)))
|
||||
.collect(Collectors.toUnmodifiableMap(Map.Entry::getKey, Map.Entry::getValue));
|
||||
.collect(Collectors.toUnmodifiableMap(Entry::getKey, Entry::getValue));
|
||||
|
||||
public static Optional<MSG_SEQ_ERROR_SIMU_TYPE> parseSimuType(String simuTypeString) {
|
||||
return Optional.ofNullable(MAPPING.get(simuTypeString.trim()));
|
||||
public static Optional<MSG_SEQ_ERROR_SIMU_TYPE> parseSimuType(final String simuTypeString) {
|
||||
return Optional.ofNullable(MSG_SEQ_ERROR_SIMU_TYPE.MAPPING.get(simuTypeString.trim()));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,23 +1,22 @@
|
||||
package io.nosqlbench.engine.api.metrics;
|
||||
|
||||
/*
|
||||
* Copyright (c) 2022 nosqlbench
|
||||
* Copyright (c) 2023 nosqlbench
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package io.nosqlbench.engine.api.metrics;
|
||||
|
||||
import io.nosqlbench.engine.api.metrics.EndToEndMetricsAdapterUtil.MSG_SEQ_ERROR_SIMU_TYPE;
|
||||
import org.apache.commons.lang3.RandomUtils;
|
||||
|
||||
import java.util.ArrayDeque;
|
||||
@@ -33,76 +32,70 @@ public class MessageSequenceNumberSendingHandler {
|
||||
long number = 1;
|
||||
Queue<Long> outOfOrderNumbers;
|
||||
|
||||
public long getNextSequenceNumber(Set<EndToEndMetricsAdapterUtil.MSG_SEQ_ERROR_SIMU_TYPE> simulatedErrorTypes) {
|
||||
return getNextSequenceNumber(simulatedErrorTypes, SIMULATED_ERROR_PROBABILITY_PERCENTAGE);
|
||||
public long getNextSequenceNumber(final Set<MSG_SEQ_ERROR_SIMU_TYPE> simulatedErrorTypes) {
|
||||
return this.getNextSequenceNumber(simulatedErrorTypes, MessageSequenceNumberSendingHandler.SIMULATED_ERROR_PROBABILITY_PERCENTAGE);
|
||||
}
|
||||
|
||||
long getNextSequenceNumber(Set<EndToEndMetricsAdapterUtil.MSG_SEQ_ERROR_SIMU_TYPE> simulatedErrorTypes, int errorProbabilityPercentage) {
|
||||
simulateError(simulatedErrorTypes, errorProbabilityPercentage);
|
||||
return nextNumber();
|
||||
long getNextSequenceNumber(final Set<MSG_SEQ_ERROR_SIMU_TYPE> simulatedErrorTypes, final int errorProbabilityPercentage) {
|
||||
this.simulateError(simulatedErrorTypes, errorProbabilityPercentage);
|
||||
return this.nextNumber();
|
||||
}
|
||||
|
||||
private void simulateError(Set<EndToEndMetricsAdapterUtil.MSG_SEQ_ERROR_SIMU_TYPE> simulatedErrorTypes, int errorProbabilityPercentage) {
|
||||
if (!simulatedErrorTypes.isEmpty() && shouldSimulateError(errorProbabilityPercentage)) {
|
||||
private void simulateError(final Set<MSG_SEQ_ERROR_SIMU_TYPE> simulatedErrorTypes, final int errorProbabilityPercentage) {
|
||||
if (!simulatedErrorTypes.isEmpty() && this.shouldSimulateError(errorProbabilityPercentage)) {
|
||||
int selectIndex = 0;
|
||||
int numberOfErrorTypes = simulatedErrorTypes.size();
|
||||
if (numberOfErrorTypes > 1) {
|
||||
// pick one of the simulated error type randomly
|
||||
selectIndex = RandomUtils.nextInt(0, numberOfErrorTypes);
|
||||
}
|
||||
EndToEndMetricsAdapterUtil.MSG_SEQ_ERROR_SIMU_TYPE errorType = simulatedErrorTypes.stream()
|
||||
final int numberOfErrorTypes = simulatedErrorTypes.size();
|
||||
// pick one of the simulated error type randomly
|
||||
if (1 < numberOfErrorTypes) selectIndex = RandomUtils.nextInt(0, numberOfErrorTypes);
|
||||
final MSG_SEQ_ERROR_SIMU_TYPE errorType = simulatedErrorTypes.stream()
|
||||
.skip(selectIndex)
|
||||
.findFirst()
|
||||
.get();
|
||||
switch (errorType) {
|
||||
case OutOfOrder:
|
||||
// simulate message out of order
|
||||
injectMessagesOutOfOrder();
|
||||
this.injectMessagesOutOfOrder();
|
||||
break;
|
||||
case MsgDup:
|
||||
// simulate message duplication
|
||||
injectMessageDuplication();
|
||||
this.injectMessageDuplication();
|
||||
break;
|
||||
case MsgLoss:
|
||||
// simulate message loss
|
||||
injectMessageLoss();
|
||||
this.injectMessageLoss();
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private boolean shouldSimulateError(int errorProbabilityPercentage) {
|
||||
private boolean shouldSimulateError(final int errorProbabilityPercentage) {
|
||||
// Simulate error with the specified probability
|
||||
return RandomUtils.nextInt(0, 100) < errorProbabilityPercentage;
|
||||
}
|
||||
|
||||
long nextNumber() {
|
||||
if (outOfOrderNumbers != null) {
|
||||
long nextNumber = outOfOrderNumbers.poll();
|
||||
if (outOfOrderNumbers.isEmpty()) {
|
||||
outOfOrderNumbers = null;
|
||||
}
|
||||
if (null != outOfOrderNumbers) {
|
||||
final long nextNumber = this.outOfOrderNumbers.poll();
|
||||
if (this.outOfOrderNumbers.isEmpty()) this.outOfOrderNumbers = null;
|
||||
return nextNumber;
|
||||
}
|
||||
return number++;
|
||||
long l = this.number;
|
||||
this.number++;
|
||||
return l;
|
||||
}
|
||||
|
||||
void injectMessagesOutOfOrder() {
|
||||
if (outOfOrderNumbers == null) {
|
||||
outOfOrderNumbers = new ArrayDeque<>(Arrays.asList(number + 2, number, number + 1));
|
||||
number += 3;
|
||||
if (null == outOfOrderNumbers) {
|
||||
this.outOfOrderNumbers = new ArrayDeque<>(Arrays.asList(this.number + 2, this.number, this.number + 1));
|
||||
this.number += 3;
|
||||
}
|
||||
}
|
||||
|
||||
void injectMessageDuplication() {
|
||||
if (outOfOrderNumbers == null) {
|
||||
number--;
|
||||
}
|
||||
if (null == outOfOrderNumbers) this.number--;
|
||||
}
|
||||
|
||||
void injectMessageLoss() {
|
||||
if (outOfOrderNumbers == null) {
|
||||
number++;
|
||||
}
|
||||
if (null == outOfOrderNumbers) this.number++;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,22 +1,20 @@
|
||||
package io.nosqlbench.engine.api.metrics;
|
||||
|
||||
/*
|
||||
* Copyright (c) 2022 nosqlbench
|
||||
* Copyright (c) 2023 nosqlbench
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package io.nosqlbench.engine.api.metrics;
|
||||
|
||||
import com.codahale.metrics.Counter;
|
||||
|
||||
@@ -46,20 +44,20 @@ public class ReceivedMessageSequenceTracker implements AutoCloseable {
|
||||
private final int maxTrackSkippedSequenceNumbers;
|
||||
private long expectedNumber = -1;
|
||||
|
||||
public ReceivedMessageSequenceTracker(Counter msgErrOutOfSeqCounter, Counter msgErrDuplicateCounter, Counter msgErrLossCounter) {
|
||||
public ReceivedMessageSequenceTracker(final Counter msgErrOutOfSeqCounter, final Counter msgErrDuplicateCounter, final Counter msgErrLossCounter) {
|
||||
this(msgErrOutOfSeqCounter, msgErrDuplicateCounter, msgErrLossCounter,
|
||||
DEFAULT_MAX_TRACK_OUT_OF_ORDER_SEQUENCE_NUMBERS, DEFAULT_MAX_TRACK_SKIPPED_SEQUENCE_NUMBERS);
|
||||
ReceivedMessageSequenceTracker.DEFAULT_MAX_TRACK_OUT_OF_ORDER_SEQUENCE_NUMBERS, ReceivedMessageSequenceTracker.DEFAULT_MAX_TRACK_SKIPPED_SEQUENCE_NUMBERS);
|
||||
}
|
||||
|
||||
public ReceivedMessageSequenceTracker(Counter msgErrOutOfSeqCounter, Counter msgErrDuplicateCounter, Counter msgErrLossCounter,
|
||||
int maxTrackOutOfOrderSequenceNumbers, int maxTrackSkippedSequenceNumbers) {
|
||||
public ReceivedMessageSequenceTracker(final Counter msgErrOutOfSeqCounter, final Counter msgErrDuplicateCounter, final Counter msgErrLossCounter,
|
||||
final int maxTrackOutOfOrderSequenceNumbers, final int maxTrackSkippedSequenceNumbers) {
|
||||
this.msgErrOutOfSeqCounter = msgErrOutOfSeqCounter;
|
||||
this.msgErrDuplicateCounter = msgErrDuplicateCounter;
|
||||
this.msgErrLossCounter = msgErrLossCounter;
|
||||
this.maxTrackOutOfOrderSequenceNumbers = maxTrackOutOfOrderSequenceNumbers;
|
||||
this.maxTrackSkippedSequenceNumbers = maxTrackSkippedSequenceNumbers;
|
||||
this.pendingOutOfSeqNumbers = new TreeSet<>();
|
||||
this.skippedSeqNumbers = new TreeSet<>();
|
||||
pendingOutOfSeqNumbers = new TreeSet<>();
|
||||
skippedSeqNumbers = new TreeSet<>();
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -67,84 +65,71 @@ public class ReceivedMessageSequenceTracker implements AutoCloseable {
|
||||
*
|
||||
* @param sequenceNumber the sequence number of the received message
|
||||
*/
|
||||
public void sequenceNumberReceived(long sequenceNumber) {
|
||||
if (expectedNumber == -1) {
|
||||
expectedNumber = sequenceNumber + 1;
|
||||
public void sequenceNumberReceived(final long sequenceNumber) {
|
||||
if (-1 == expectedNumber) {
|
||||
this.expectedNumber = sequenceNumber + 1;
|
||||
return;
|
||||
}
|
||||
|
||||
if (sequenceNumber < expectedNumber) {
|
||||
if (skippedSeqNumbers.remove(sequenceNumber)) {
|
||||
if (sequenceNumber < this.expectedNumber) {
|
||||
if (this.skippedSeqNumbers.remove(sequenceNumber)) {
|
||||
// late out-of-order delivery was detected
|
||||
// decrease the loss counter
|
||||
msgErrLossCounter.dec();
|
||||
this.msgErrLossCounter.dec();
|
||||
// increment the out-of-order counter
|
||||
msgErrOutOfSeqCounter.inc();
|
||||
} else {
|
||||
msgErrDuplicateCounter.inc();
|
||||
}
|
||||
this.msgErrOutOfSeqCounter.inc();
|
||||
} else this.msgErrDuplicateCounter.inc();
|
||||
return;
|
||||
}
|
||||
|
||||
boolean messagesSkipped = false;
|
||||
if (sequenceNumber > expectedNumber) {
|
||||
if (pendingOutOfSeqNumbers.size() == maxTrackOutOfOrderSequenceNumbers) {
|
||||
messagesSkipped = processLowestPendingOutOfSequenceNumber();
|
||||
}
|
||||
if (!pendingOutOfSeqNumbers.add(sequenceNumber)) {
|
||||
msgErrDuplicateCounter.inc();
|
||||
}
|
||||
} else {
|
||||
// sequenceNumber == expectedNumber
|
||||
expectedNumber++;
|
||||
}
|
||||
processPendingOutOfSequenceNumbers(messagesSkipped);
|
||||
cleanUpTooFarBehindOutOfSequenceNumbers();
|
||||
// sequenceNumber == expectedNumber
|
||||
if (sequenceNumber > this.expectedNumber) {
|
||||
if (this.pendingOutOfSeqNumbers.size() == this.maxTrackOutOfOrderSequenceNumbers)
|
||||
messagesSkipped = this.processLowestPendingOutOfSequenceNumber();
|
||||
if (!this.pendingOutOfSeqNumbers.add(sequenceNumber)) this.msgErrDuplicateCounter.inc();
|
||||
} else this.expectedNumber++;
|
||||
this.processPendingOutOfSequenceNumbers(messagesSkipped);
|
||||
this.cleanUpTooFarBehindOutOfSequenceNumbers();
|
||||
}
|
||||
|
||||
private boolean processLowestPendingOutOfSequenceNumber() {
|
||||
// remove the lowest pending out of sequence number
|
||||
Long lowestOutOfSeqNumber = pendingOutOfSeqNumbers.first();
|
||||
pendingOutOfSeqNumbers.remove(lowestOutOfSeqNumber);
|
||||
if (lowestOutOfSeqNumber > expectedNumber) {
|
||||
final Long lowestOutOfSeqNumber = this.pendingOutOfSeqNumbers.first();
|
||||
this.pendingOutOfSeqNumbers.remove(lowestOutOfSeqNumber);
|
||||
if (lowestOutOfSeqNumber > this.expectedNumber) {
|
||||
// skip the expected number ahead to the number after the lowest sequence number
|
||||
// increment the counter with the amount of sequence numbers that got skipped
|
||||
// keep track of the skipped sequence numbers to detect late out-of-order message delivery
|
||||
for (long l = expectedNumber; l < lowestOutOfSeqNumber; l++) {
|
||||
msgErrLossCounter.inc();
|
||||
skippedSeqNumbers.add(l);
|
||||
if (skippedSeqNumbers.size() > maxTrackSkippedSequenceNumbers) {
|
||||
skippedSeqNumbers.remove(skippedSeqNumbers.first());
|
||||
}
|
||||
for (long l = this.expectedNumber; l < lowestOutOfSeqNumber; l++) {
|
||||
this.msgErrLossCounter.inc();
|
||||
this.skippedSeqNumbers.add(l);
|
||||
if (this.skippedSeqNumbers.size() > this.maxTrackSkippedSequenceNumbers)
|
||||
this.skippedSeqNumbers.remove(this.skippedSeqNumbers.first());
|
||||
}
|
||||
expectedNumber = lowestOutOfSeqNumber + 1;
|
||||
this.expectedNumber = lowestOutOfSeqNumber + 1;
|
||||
return true;
|
||||
} else {
|
||||
msgErrLossCounter.inc();
|
||||
}
|
||||
this.msgErrLossCounter.inc();
|
||||
return false;
|
||||
}
|
||||
|
||||
private void processPendingOutOfSequenceNumbers(boolean messagesSkipped) {
|
||||
private void processPendingOutOfSequenceNumbers(final boolean messagesSkipped) {
|
||||
// check if there are previously received out-of-order sequence number that have been received
|
||||
while (pendingOutOfSeqNumbers.remove(expectedNumber)) {
|
||||
expectedNumber++;
|
||||
if (!messagesSkipped) {
|
||||
msgErrOutOfSeqCounter.inc();
|
||||
}
|
||||
while (this.pendingOutOfSeqNumbers.remove(this.expectedNumber)) {
|
||||
this.expectedNumber++;
|
||||
if (!messagesSkipped) this.msgErrOutOfSeqCounter.inc();
|
||||
}
|
||||
}
|
||||
|
||||
private void cleanUpTooFarBehindOutOfSequenceNumbers() {
|
||||
// remove sequence numbers that are too far behind
|
||||
for (Iterator<Long> iterator = pendingOutOfSeqNumbers.iterator(); iterator.hasNext(); ) {
|
||||
Long number = iterator.next();
|
||||
if (number < expectedNumber - maxTrackOutOfOrderSequenceNumbers) {
|
||||
msgErrLossCounter.inc();
|
||||
for (final Iterator<Long> iterator = this.pendingOutOfSeqNumbers.iterator(); iterator.hasNext(); ) {
|
||||
final Long number = iterator.next();
|
||||
if (number < (this.expectedNumber - this.maxTrackOutOfOrderSequenceNumbers)) {
|
||||
this.msgErrLossCounter.inc();
|
||||
iterator.remove();
|
||||
} else {
|
||||
break;
|
||||
}
|
||||
} else break;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -154,16 +139,15 @@ public class ReceivedMessageSequenceTracker implements AutoCloseable {
|
||||
*/
|
||||
@Override
|
||||
public void close() {
|
||||
while (!pendingOutOfSeqNumbers.isEmpty()) {
|
||||
processPendingOutOfSequenceNumbers(processLowestPendingOutOfSequenceNumber());
|
||||
}
|
||||
while (!this.pendingOutOfSeqNumbers.isEmpty())
|
||||
this.processPendingOutOfSequenceNumbers(this.processLowestPendingOutOfSequenceNumber());
|
||||
}
|
||||
|
||||
public int getMaxTrackOutOfOrderSequenceNumbers() {
|
||||
return maxTrackOutOfOrderSequenceNumbers;
|
||||
return this.maxTrackOutOfOrderSequenceNumbers;
|
||||
}
|
||||
|
||||
public int getMaxTrackSkippedSequenceNumbers() {
|
||||
return maxTrackSkippedSequenceNumbers;
|
||||
return this.maxTrackSkippedSequenceNumbers;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright (c) 2022 nosqlbench
|
||||
* Copyright (c) 2022-2023 nosqlbench
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -17,7 +17,7 @@
|
||||
package io.nosqlbench.engine.api.metrics;
|
||||
|
||||
import com.codahale.metrics.Timer;
|
||||
import io.nosqlbench.api.engine.activityimpl.ActivityDef;
|
||||
import com.codahale.metrics.Timer.Context;
|
||||
import io.nosqlbench.api.engine.metrics.ActivityMetrics;
|
||||
import io.nosqlbench.engine.api.templating.ParsedOp;
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
@@ -32,59 +32,41 @@ import java.util.Map;
|
||||
*/
|
||||
public class ThreadLocalNamedTimers {
|
||||
|
||||
private final static Logger logger = LogManager.getLogger(ThreadLocalNamedTimers.class);
|
||||
private static final Logger logger = LogManager.getLogger(ThreadLocalNamedTimers.class);
|
||||
|
||||
public transient final static ThreadLocal<ThreadLocalNamedTimers> TL_INSTANCE = ThreadLocal.withInitial(ThreadLocalNamedTimers::new);
|
||||
private final static Map<String, Timer> timers = new HashMap<>();
|
||||
private final Map<String, Timer.Context> contexts = new HashMap<>();
|
||||
public final static ThreadLocal<ThreadLocalNamedTimers> TL_INSTANCE = ThreadLocal.withInitial(ThreadLocalNamedTimers::new);
|
||||
private static final Map<String, Timer> timers = new HashMap<>();
|
||||
private final Map<String, Context> contexts = new HashMap<>();
|
||||
|
||||
public static void addTimer(ActivityDef def, String name, int hdrdigits) {
|
||||
if (timers.containsKey("name")) {
|
||||
logger.warn("A timer named '" + name + "' was already defined and initialized.");
|
||||
}
|
||||
Timer timer = ActivityMetrics.timer(def, name, hdrdigits);
|
||||
timers.put(name, timer);
|
||||
public static void addTimer(final ParsedOp pop, final String name) {
|
||||
if (ThreadLocalNamedTimers.timers.containsKey("name"))
|
||||
ThreadLocalNamedTimers.logger.warn("A timer named '{}' was already defined and initialized.", name);
|
||||
ThreadLocalNamedTimers.timers.put(name, ActivityMetrics.timer(pop,name,ActivityMetrics.DEFAULT_HDRDIGITS));
|
||||
}
|
||||
|
||||
public static void addTimer(ParsedOp pop, String name) {
|
||||
if (timers.containsKey("name")) {
|
||||
logger.warn("A timer named '" + name + "' was already defined and initialized.");
|
||||
}
|
||||
Timer timer = ActivityMetrics.timer(pop.getStaticConfig("alias",String.class)+"."+name);
|
||||
timers.put(name, timer);
|
||||
public void start(final String name) {
|
||||
final Context context = ThreadLocalNamedTimers.timers.get(name).time();
|
||||
this.contexts.put(name, context);
|
||||
}
|
||||
|
||||
public void start(String name) {
|
||||
Timer.Context context = timers.get(name).time();
|
||||
contexts.put(name, context);
|
||||
}
|
||||
|
||||
public void stop(String name) {
|
||||
Timer.Context context = contexts.get(name);
|
||||
public void stop(final String name) {
|
||||
final Context context = this.contexts.get(name);
|
||||
context.stop();
|
||||
}
|
||||
|
||||
public void start(List<String> timerNames) {
|
||||
for (String timerName : timerNames) {
|
||||
start(timerName);
|
||||
}
|
||||
public void start(final List<String> timerNames) {
|
||||
for (final String timerName : timerNames) this.start(timerName);
|
||||
}
|
||||
|
||||
public void start(String[] timerNames) {
|
||||
for (String timerName : timerNames) {
|
||||
start(timerName);
|
||||
}
|
||||
public void start(final String[] timerNames) {
|
||||
for (final String timerName : timerNames) this.start(timerName);
|
||||
}
|
||||
|
||||
public void stop(List<String> timerName) {
|
||||
for (String stopTimer : timerName) {
|
||||
stop(stopTimer);
|
||||
}
|
||||
public void stop(final List<String> timerName) {
|
||||
for (final String stopTimer : timerName) this.stop(stopTimer);
|
||||
}
|
||||
|
||||
public void stop(String[] timerStops) {
|
||||
for (String timerStop : timerStops) {
|
||||
stop(timerStop);
|
||||
}
|
||||
public void stop(final String[] timerStops) {
|
||||
for (final String timerStop : timerStops) this.stop(timerStop);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -16,6 +16,8 @@
|
||||
|
||||
package io.nosqlbench.engine.api.templating;
|
||||
|
||||
import io.nosqlbench.api.config.NBLabeledElement;
|
||||
import io.nosqlbench.api.config.NBLabels;
|
||||
import io.nosqlbench.api.config.fieldreaders.DynamicFieldReader;
|
||||
import io.nosqlbench.api.config.fieldreaders.StaticFieldReader;
|
||||
import io.nosqlbench.api.config.standard.NBConfigError;
|
||||
@@ -292,9 +294,9 @@ import java.util.function.LongFunction;
|
||||
* in the activity parameters if needed to find a missing configuration parameter, but this will only work if
|
||||
* the specific named parameter is allowed at the activity level.</P>
|
||||
*/
|
||||
public class ParsedOp implements LongFunction<Map<String, ?>>, StaticFieldReader, DynamicFieldReader {
|
||||
public class ParsedOp implements LongFunction<Map<String, ?>>, NBLabeledElement, StaticFieldReader, DynamicFieldReader {
|
||||
|
||||
private final static Logger logger = LogManager.getLogger(ParsedOp.class);
|
||||
private static final Logger logger = LogManager.getLogger(ParsedOp.class);
|
||||
|
||||
/**
|
||||
* The names of payload values in the result of the operation which should be saved.
|
||||
@@ -307,36 +309,32 @@ public class ParsedOp implements LongFunction<Map<String, ?>>, StaticFieldReader
|
||||
private final OpTemplate _opTemplate;
|
||||
private final NBConfiguration activityCfg;
|
||||
private final ParsedTemplateMap tmap;
|
||||
|
||||
/**
|
||||
* Create a parsed command from an Op template.
|
||||
*
|
||||
* @param ot An OpTemplate representing an operation to be performed in a native driver.
|
||||
* @param activityCfg The activity configuration, used for reading config parameters
|
||||
*/
|
||||
public ParsedOp(OpTemplate ot, NBConfiguration activityCfg) {
|
||||
this(ot, activityCfg, List.of());
|
||||
}
|
||||
private final NBLabels labels;
|
||||
|
||||
/**
|
||||
* Create a parsed command from an Op template. This version is exactly like
|
||||
* {@link ParsedOp (OpTemplate,NBConfiguration)} except that it allows
|
||||
* except that it allows
|
||||
* preprocessors. Preprocessors are all applied to the the op template before
|
||||
* it is applied to the parsed command fields, allowing you to combine or destructure
|
||||
* fields from more tha one representation into a single canonical representation
|
||||
* for processing.
|
||||
*
|
||||
* @param opTemplate The OpTemplate as provided by a user via YAML, JSON, or API (data structure)
|
||||
* @param activityCfg The activity configuration, used to resolve nested config parameters
|
||||
* @param preprocessors Map->Map transformers.
|
||||
* @param opTemplate
|
||||
* The OpTemplate as provided by a user via YAML, JSON, or API (data structure)
|
||||
* @param activityCfg
|
||||
* The activity configuration, used to resolve nested config parameters
|
||||
* @param preprocessors
|
||||
* Map->Map transformers.
|
||||
* @param labels
|
||||
*/
|
||||
public ParsedOp(
|
||||
OpTemplate opTemplate,
|
||||
NBConfiguration activityCfg,
|
||||
List<Function<Map<String, Object>, Map<String, Object>>> preprocessors
|
||||
) {
|
||||
List<Function<Map<String, Object>, Map<String, Object>>> preprocessors,
|
||||
NBLabeledElement parent) {
|
||||
this._opTemplate = opTemplate;
|
||||
this.activityCfg = activityCfg;
|
||||
labels=parent.getLabels().and("op", this.getName());
|
||||
|
||||
Map<String, Object> map = opTemplate.getOp().orElseThrow(() ->
|
||||
new OpConfigError("ParsedOp constructor requires a non-null value for the op field, but it was missing."));
|
||||
@@ -542,7 +540,7 @@ public class ParsedOp implements LongFunction<Map<String, ?>>, StaticFieldReader
|
||||
* @param name The field name which must be defined as static or dynamic
|
||||
* @return A function which can provide the named field value
|
||||
*/
|
||||
public LongFunction<? extends String> getAsRequiredFunction(String name) {
|
||||
public LongFunction<String> getAsRequiredFunction(String name) {
|
||||
return tmap.getAsRequiredFunction(name, String.class);
|
||||
}
|
||||
|
||||
@@ -601,6 +599,7 @@ public class ParsedOp implements LongFunction<Map<String, ?>>, StaticFieldReader
|
||||
* @param field The requested field name
|
||||
* @return true if the named field is defined as static or dynamic
|
||||
*/
|
||||
@Override
|
||||
public boolean isDefined(String field) {
|
||||
return tmap.isDefined(field);
|
||||
}
|
||||
@@ -920,4 +919,9 @@ public class ParsedOp implements LongFunction<Map<String, ?>>, StaticFieldReader
|
||||
public List<CapturePoint> getCaptures() {
|
||||
return tmap.getCaptures();
|
||||
}
|
||||
|
||||
@Override
|
||||
public NBLabels getLabels() {
|
||||
return labels;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright (c) 2022 nosqlbench
|
||||
* Copyright (c) 2022-2023 nosqlbench
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -16,6 +16,7 @@
|
||||
|
||||
package io.nosqlbench.engine.api.metrics;
|
||||
|
||||
import io.nosqlbench.engine.api.metrics.EndToEndMetricsAdapterUtil.MSG_SEQ_ERROR_SIMU_TYPE;
|
||||
import org.junit.jupiter.api.Test;
|
||||
|
||||
import java.util.Arrays;
|
||||
@@ -31,59 +32,53 @@ class MessageSequenceNumberSendingHandlerTest {
|
||||
|
||||
@Test
|
||||
void shouldAddMonotonicSequence() {
|
||||
for (long l = 1; l <= 100; l++) {
|
||||
assertEquals(l, sequenceNumberSendingHandler.getNextSequenceNumber(Collections.emptySet()));
|
||||
}
|
||||
for (long l = 1; 100 >= l; l++)
|
||||
assertEquals(l, this.sequenceNumberSendingHandler.getNextSequenceNumber(Collections.emptySet()));
|
||||
}
|
||||
|
||||
@Test
|
||||
void shouldInjectMessageLoss() {
|
||||
assertEquals(1L, sequenceNumberSendingHandler.getNextSequenceNumber(Collections.emptySet()));
|
||||
assertEquals(3L, sequenceNumberSendingHandler.getNextSequenceNumber(Collections.singleton(EndToEndMetricsAdapterUtil.MSG_SEQ_ERROR_SIMU_TYPE.MsgLoss), 100));
|
||||
assertEquals(1L, this.sequenceNumberSendingHandler.getNextSequenceNumber(Collections.emptySet()));
|
||||
assertEquals(3L, this.sequenceNumberSendingHandler.getNextSequenceNumber(Collections.singleton(MSG_SEQ_ERROR_SIMU_TYPE.MsgLoss), 100));
|
||||
}
|
||||
|
||||
@Test
|
||||
void shouldInjectMessageDuplication() {
|
||||
assertEquals(1L, sequenceNumberSendingHandler.getNextSequenceNumber(Collections.emptySet()));
|
||||
assertEquals(1L, sequenceNumberSendingHandler.getNextSequenceNumber(Collections.singleton(EndToEndMetricsAdapterUtil.MSG_SEQ_ERROR_SIMU_TYPE.MsgDup), 100));
|
||||
assertEquals(1L, this.sequenceNumberSendingHandler.getNextSequenceNumber(Collections.emptySet()));
|
||||
assertEquals(1L, this.sequenceNumberSendingHandler.getNextSequenceNumber(Collections.singleton(MSG_SEQ_ERROR_SIMU_TYPE.MsgDup), 100));
|
||||
}
|
||||
|
||||
@Test
|
||||
void shouldInjectMessageOutOfOrder() {
|
||||
assertEquals(1L, sequenceNumberSendingHandler.getNextSequenceNumber(Collections.emptySet()));
|
||||
assertEquals(4L, sequenceNumberSendingHandler.getNextSequenceNumber(Collections.singleton(EndToEndMetricsAdapterUtil.MSG_SEQ_ERROR_SIMU_TYPE.OutOfOrder), 100));
|
||||
assertEquals(2L, sequenceNumberSendingHandler.getNextSequenceNumber(Collections.emptySet()));
|
||||
assertEquals(3L, sequenceNumberSendingHandler.getNextSequenceNumber(Collections.emptySet()));
|
||||
assertEquals(5L, sequenceNumberSendingHandler.getNextSequenceNumber(Collections.emptySet()));
|
||||
assertEquals(6, sequenceNumberSendingHandler.getNextSequenceNumber(Collections.emptySet()));
|
||||
assertEquals(1L, this.sequenceNumberSendingHandler.getNextSequenceNumber(Collections.emptySet()));
|
||||
assertEquals(4L, this.sequenceNumberSendingHandler.getNextSequenceNumber(Collections.singleton(MSG_SEQ_ERROR_SIMU_TYPE.OutOfOrder), 100));
|
||||
assertEquals(2L, this.sequenceNumberSendingHandler.getNextSequenceNumber(Collections.emptySet()));
|
||||
assertEquals(3L, this.sequenceNumberSendingHandler.getNextSequenceNumber(Collections.emptySet()));
|
||||
assertEquals(5L, this.sequenceNumberSendingHandler.getNextSequenceNumber(Collections.emptySet()));
|
||||
assertEquals(6, this.sequenceNumberSendingHandler.getNextSequenceNumber(Collections.emptySet()));
|
||||
}
|
||||
|
||||
@Test
|
||||
void shouldInjectOneOfTheSimulatedErrorsRandomly() {
|
||||
Set<EndToEndMetricsAdapterUtil.MSG_SEQ_ERROR_SIMU_TYPE> allErrorTypes = new HashSet<>(Arrays.asList(EndToEndMetricsAdapterUtil.MSG_SEQ_ERROR_SIMU_TYPE.values()));
|
||||
final Set<MSG_SEQ_ERROR_SIMU_TYPE> allErrorTypes = new HashSet<>(Arrays.asList(MSG_SEQ_ERROR_SIMU_TYPE.values()));
|
||||
|
||||
assertEquals(1L, sequenceNumberSendingHandler.getNextSequenceNumber(Collections.emptySet()));
|
||||
assertEquals(1L, this.sequenceNumberSendingHandler.getNextSequenceNumber(Collections.emptySet()));
|
||||
long previousSequenceNumber = 1L;
|
||||
int outOfSequenceInjectionCounter = 0;
|
||||
int messageDupCounter = 0;
|
||||
int messageLossCounter = 0;
|
||||
int successCounter = 0;
|
||||
for (int i = 0; i < 1000; i++) {
|
||||
long nextSequenceNumber = sequenceNumberSendingHandler.getNextSequenceNumber(allErrorTypes);
|
||||
if (nextSequenceNumber >= previousSequenceNumber + 3) {
|
||||
outOfSequenceInjectionCounter++;
|
||||
} else if (nextSequenceNumber <= previousSequenceNumber) {
|
||||
messageDupCounter++;
|
||||
} else if (nextSequenceNumber >= previousSequenceNumber + 2) {
|
||||
messageLossCounter++;
|
||||
} else if (nextSequenceNumber == previousSequenceNumber + 1) {
|
||||
successCounter++;
|
||||
}
|
||||
for (int i = 0; 1000 > i; i++) {
|
||||
final long nextSequenceNumber = this.sequenceNumberSendingHandler.getNextSequenceNumber(allErrorTypes);
|
||||
if (nextSequenceNumber >= (previousSequenceNumber + 3)) outOfSequenceInjectionCounter++;
|
||||
else if (nextSequenceNumber <= previousSequenceNumber) messageDupCounter++;
|
||||
else if (nextSequenceNumber >= (previousSequenceNumber + 2)) messageLossCounter++;
|
||||
else if (nextSequenceNumber == (previousSequenceNumber + 1)) successCounter++;
|
||||
previousSequenceNumber = nextSequenceNumber;
|
||||
}
|
||||
assertTrue(outOfSequenceInjectionCounter > 0);
|
||||
assertTrue(messageDupCounter > 0);
|
||||
assertTrue(messageLossCounter > 0);
|
||||
assertTrue(0 < outOfSequenceInjectionCounter);
|
||||
assertTrue(0 < messageDupCounter);
|
||||
assertTrue(0 < messageLossCounter);
|
||||
assertEquals(1000, outOfSequenceInjectionCounter + messageDupCounter + messageLossCounter + successCounter);
|
||||
}
|
||||
|
||||
|
||||
@@ -1,18 +1,17 @@
|
||||
/*
|
||||
* Copyright (c) 2022 nosqlbench
|
||||
* Copyright (c) 2022-2023 nosqlbench
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package io.nosqlbench.engine.api.metrics;
|
||||
@@ -28,220 +27,189 @@ class ReceivedMessageSequenceTrackerTest {
|
||||
Counter msgErrOutOfSeqCounter = new Counter();
|
||||
Counter msgErrDuplicateCounter = new Counter();
|
||||
Counter msgErrLossCounter = new Counter();
|
||||
ReceivedMessageSequenceTracker messageSequenceTracker = new ReceivedMessageSequenceTracker(msgErrOutOfSeqCounter, msgErrDuplicateCounter, msgErrLossCounter, 20, 20);
|
||||
ReceivedMessageSequenceTracker messageSequenceTracker = new ReceivedMessageSequenceTracker(this.msgErrOutOfSeqCounter, this.msgErrDuplicateCounter, this.msgErrLossCounter, 20, 20);
|
||||
|
||||
@Test
|
||||
void shouldCountersBeZeroWhenSequenceDoesntContainGaps() {
|
||||
// when
|
||||
for (long l = 0; l < 100L; l++) {
|
||||
messageSequenceTracker.sequenceNumberReceived(l);
|
||||
}
|
||||
messageSequenceTracker.close();
|
||||
for (long l = 0; 100L > l; l++) this.messageSequenceTracker.sequenceNumberReceived(l);
|
||||
this.messageSequenceTracker.close();
|
||||
// then
|
||||
assertEquals(0, msgErrOutOfSeqCounter.getCount());
|
||||
assertEquals(0, msgErrDuplicateCounter.getCount());
|
||||
assertEquals(0, msgErrLossCounter.getCount());
|
||||
assertEquals(0, this.msgErrOutOfSeqCounter.getCount());
|
||||
assertEquals(0, this.msgErrDuplicateCounter.getCount());
|
||||
assertEquals(0, this.msgErrLossCounter.getCount());
|
||||
}
|
||||
|
||||
@ParameterizedTest
|
||||
@ValueSource(longs = {10L, 11L, 19L, 20L, 21L, 100L})
|
||||
void shouldDetectMsgLossWhenEverySecondMessageIsLost(long totalMessages) {
|
||||
doShouldDetectMsgLoss(totalMessages, 2);
|
||||
void shouldDetectMsgLossWhenEverySecondMessageIsLost(final long totalMessages) {
|
||||
this.doShouldDetectMsgLoss(totalMessages, 2);
|
||||
}
|
||||
|
||||
@ParameterizedTest
|
||||
@ValueSource(longs = {10L, 11L, 19L, 20L, 21L, 100L})
|
||||
void shouldDetectMsgLossWhenEveryThirdMessageIsLost(long totalMessages) {
|
||||
doShouldDetectMsgLoss(totalMessages, 3);
|
||||
void shouldDetectMsgLossWhenEveryThirdMessageIsLost(final long totalMessages) {
|
||||
this.doShouldDetectMsgLoss(totalMessages, 3);
|
||||
}
|
||||
|
||||
@ParameterizedTest
|
||||
@ValueSource(longs = {20L, 21L, 40L, 41L, 42L, 43L, 100L})
|
||||
void shouldDetectMsgLossWhenEvery21stMessageIsLost(long totalMessages) {
|
||||
doShouldDetectMsgLoss(totalMessages, 21);
|
||||
void shouldDetectMsgLossWhenEvery21stMessageIsLost(final long totalMessages) {
|
||||
this.doShouldDetectMsgLoss(totalMessages, 21);
|
||||
}
|
||||
|
||||
private void doShouldDetectMsgLoss(long totalMessages, int looseEveryNthMessage) {
|
||||
private void doShouldDetectMsgLoss(final long totalMessages, final int looseEveryNthMessage) {
|
||||
int messagesLost = 0;
|
||||
// when
|
||||
boolean lastMessageWasLost = false;
|
||||
for (long l = 0; l < totalMessages; l++) {
|
||||
if (l % looseEveryNthMessage == 1) {
|
||||
if (1 == (l % looseEveryNthMessage)) {
|
||||
messagesLost++;
|
||||
lastMessageWasLost = true;
|
||||
continue;
|
||||
} else {
|
||||
lastMessageWasLost = false;
|
||||
}
|
||||
messageSequenceTracker.sequenceNumberReceived(l);
|
||||
lastMessageWasLost = false;
|
||||
this.messageSequenceTracker.sequenceNumberReceived(l);
|
||||
}
|
||||
if (lastMessageWasLost) {
|
||||
messageSequenceTracker.sequenceNumberReceived(totalMessages);
|
||||
}
|
||||
messageSequenceTracker.close();
|
||||
if (lastMessageWasLost) this.messageSequenceTracker.sequenceNumberReceived(totalMessages);
|
||||
this.messageSequenceTracker.close();
|
||||
// then
|
||||
assertEquals(0, msgErrOutOfSeqCounter.getCount());
|
||||
assertEquals(0, msgErrDuplicateCounter.getCount());
|
||||
assertEquals(messagesLost, msgErrLossCounter.getCount());
|
||||
assertEquals(0, this.msgErrOutOfSeqCounter.getCount());
|
||||
assertEquals(0, this.msgErrDuplicateCounter.getCount());
|
||||
assertEquals(messagesLost, this.msgErrLossCounter.getCount());
|
||||
}
|
||||
|
||||
@ParameterizedTest
|
||||
@ValueSource(longs = {10L, 11L, 19L, 20L, 21L, 100L})
|
||||
void shouldDetectMsgDuplication(long totalMessages) {
|
||||
void shouldDetectMsgDuplication(final long totalMessages) {
|
||||
int messagesDuplicated = 0;
|
||||
// when
|
||||
for (long l = 0; l < totalMessages; l++) {
|
||||
if (l % 2 == 1) {
|
||||
if (1 == (l % 2)) {
|
||||
messagesDuplicated++;
|
||||
messageSequenceTracker.sequenceNumberReceived(l);
|
||||
this.messageSequenceTracker.sequenceNumberReceived(l);
|
||||
}
|
||||
messageSequenceTracker.sequenceNumberReceived(l);
|
||||
}
|
||||
if (totalMessages % 2 == 0) {
|
||||
messageSequenceTracker.sequenceNumberReceived(totalMessages);
|
||||
}
|
||||
if (totalMessages < 2 * messageSequenceTracker.getMaxTrackOutOfOrderSequenceNumbers()) {
|
||||
messageSequenceTracker.close();
|
||||
this.messageSequenceTracker.sequenceNumberReceived(l);
|
||||
}
|
||||
if (0 == (totalMessages % 2)) this.messageSequenceTracker.sequenceNumberReceived(totalMessages);
|
||||
if (totalMessages < (2L * this.messageSequenceTracker.getMaxTrackOutOfOrderSequenceNumbers()))
|
||||
this.messageSequenceTracker.close();
|
||||
|
||||
// then
|
||||
assertEquals(0, msgErrOutOfSeqCounter.getCount());
|
||||
assertEquals(messagesDuplicated, msgErrDuplicateCounter.getCount());
|
||||
assertEquals(0, msgErrLossCounter.getCount());
|
||||
assertEquals(0, this.msgErrOutOfSeqCounter.getCount());
|
||||
assertEquals(messagesDuplicated, this.msgErrDuplicateCounter.getCount());
|
||||
assertEquals(0, this.msgErrLossCounter.getCount());
|
||||
}
|
||||
|
||||
@Test
|
||||
void shouldDetectSingleMessageOutOfSequence() {
|
||||
// when
|
||||
for (long l = 0; l < 10L; l++) {
|
||||
messageSequenceTracker.sequenceNumberReceived(l);
|
||||
}
|
||||
messageSequenceTracker.sequenceNumberReceived(10L);
|
||||
messageSequenceTracker.sequenceNumberReceived(12L);
|
||||
messageSequenceTracker.sequenceNumberReceived(11L);
|
||||
for (long l = 13L; l < 100L; l++) {
|
||||
messageSequenceTracker.sequenceNumberReceived(l);
|
||||
}
|
||||
for (long l = 0; 10L > l; l++) this.messageSequenceTracker.sequenceNumberReceived(l);
|
||||
this.messageSequenceTracker.sequenceNumberReceived(10L);
|
||||
this.messageSequenceTracker.sequenceNumberReceived(12L);
|
||||
this.messageSequenceTracker.sequenceNumberReceived(11L);
|
||||
for (long l = 13L; 100L > l; l++) this.messageSequenceTracker.sequenceNumberReceived(l);
|
||||
|
||||
// then
|
||||
assertEquals(1, msgErrOutOfSeqCounter.getCount());
|
||||
assertEquals(0, msgErrDuplicateCounter.getCount());
|
||||
assertEquals(0, msgErrLossCounter.getCount());
|
||||
assertEquals(1, this.msgErrOutOfSeqCounter.getCount());
|
||||
assertEquals(0, this.msgErrDuplicateCounter.getCount());
|
||||
assertEquals(0, this.msgErrLossCounter.getCount());
|
||||
}
|
||||
|
||||
@Test
|
||||
void shouldDetectMultipleMessagesOutOfSequence() {
|
||||
// when
|
||||
for (long l = 0; l < 10L; l++) {
|
||||
messageSequenceTracker.sequenceNumberReceived(l);
|
||||
}
|
||||
messageSequenceTracker.sequenceNumberReceived(10L);
|
||||
messageSequenceTracker.sequenceNumberReceived(14L);
|
||||
messageSequenceTracker.sequenceNumberReceived(13L);
|
||||
messageSequenceTracker.sequenceNumberReceived(11L);
|
||||
messageSequenceTracker.sequenceNumberReceived(12L);
|
||||
for (long l = 15L; l < 100L; l++) {
|
||||
messageSequenceTracker.sequenceNumberReceived(l);
|
||||
}
|
||||
for (long l = 0; 10L > l; l++) this.messageSequenceTracker.sequenceNumberReceived(l);
|
||||
this.messageSequenceTracker.sequenceNumberReceived(10L);
|
||||
this.messageSequenceTracker.sequenceNumberReceived(14L);
|
||||
this.messageSequenceTracker.sequenceNumberReceived(13L);
|
||||
this.messageSequenceTracker.sequenceNumberReceived(11L);
|
||||
this.messageSequenceTracker.sequenceNumberReceived(12L);
|
||||
for (long l = 15L; 100L > l; l++) this.messageSequenceTracker.sequenceNumberReceived(l);
|
||||
|
||||
// then
|
||||
assertEquals(2, msgErrOutOfSeqCounter.getCount());
|
||||
assertEquals(0, msgErrDuplicateCounter.getCount());
|
||||
assertEquals(0, msgErrLossCounter.getCount());
|
||||
assertEquals(2, this.msgErrOutOfSeqCounter.getCount());
|
||||
assertEquals(0, this.msgErrDuplicateCounter.getCount());
|
||||
assertEquals(0, this.msgErrLossCounter.getCount());
|
||||
}
|
||||
|
||||
@Test
|
||||
void shouldDetectIndividualMessageLoss() {
|
||||
// when
|
||||
for (long l = 0; l < 100L; l++) {
|
||||
if (l != 11L) {
|
||||
messageSequenceTracker.sequenceNumberReceived(l);
|
||||
}
|
||||
}
|
||||
messageSequenceTracker.close();
|
||||
for (long l = 0; 100L > l; l++) if (11L != l) this.messageSequenceTracker.sequenceNumberReceived(l);
|
||||
this.messageSequenceTracker.close();
|
||||
|
||||
// then
|
||||
assertEquals(0, msgErrOutOfSeqCounter.getCount());
|
||||
assertEquals(0, msgErrDuplicateCounter.getCount());
|
||||
assertEquals(1, msgErrLossCounter.getCount());
|
||||
assertEquals(0, this.msgErrOutOfSeqCounter.getCount());
|
||||
assertEquals(0, this.msgErrDuplicateCounter.getCount());
|
||||
assertEquals(1, this.msgErrLossCounter.getCount());
|
||||
}
|
||||
|
||||
@Test
|
||||
void shouldDetectGapAndMessageDuplication() {
|
||||
// when
|
||||
for (long l = 0; l < 100L; l++) {
|
||||
if (l != 11L) {
|
||||
messageSequenceTracker.sequenceNumberReceived(l);
|
||||
}
|
||||
if (l == 12L) {
|
||||
messageSequenceTracker.sequenceNumberReceived(l);
|
||||
}
|
||||
for (long l = 0; 100L > l; l++) {
|
||||
if (11L != l) this.messageSequenceTracker.sequenceNumberReceived(l);
|
||||
if (12L == l) this.messageSequenceTracker.sequenceNumberReceived(12L);
|
||||
}
|
||||
messageSequenceTracker.close();
|
||||
this.messageSequenceTracker.close();
|
||||
|
||||
// then
|
||||
assertEquals(0, msgErrOutOfSeqCounter.getCount());
|
||||
assertEquals(1, msgErrDuplicateCounter.getCount());
|
||||
assertEquals(1, msgErrLossCounter.getCount());
|
||||
assertEquals(0, this.msgErrOutOfSeqCounter.getCount());
|
||||
assertEquals(1, this.msgErrDuplicateCounter.getCount());
|
||||
assertEquals(1, this.msgErrLossCounter.getCount());
|
||||
}
|
||||
|
||||
@Test
|
||||
void shouldDetectGapAndMessageDuplicationTimes2() {
|
||||
// when
|
||||
for (long l = 0; l < 100L; l++) {
|
||||
if (l != 11L) {
|
||||
messageSequenceTracker.sequenceNumberReceived(l);
|
||||
}
|
||||
if (l == 12L) {
|
||||
messageSequenceTracker.sequenceNumberReceived(l);
|
||||
messageSequenceTracker.sequenceNumberReceived(l);
|
||||
for (long l = 0; 100L > l; l++) {
|
||||
if (11L != l) this.messageSequenceTracker.sequenceNumberReceived(l);
|
||||
if (12L == l) {
|
||||
this.messageSequenceTracker.sequenceNumberReceived(12L);
|
||||
this.messageSequenceTracker.sequenceNumberReceived(l);
|
||||
}
|
||||
}
|
||||
messageSequenceTracker.close();
|
||||
this.messageSequenceTracker.close();
|
||||
|
||||
// then
|
||||
assertEquals(0, msgErrOutOfSeqCounter.getCount());
|
||||
assertEquals(2, msgErrDuplicateCounter.getCount());
|
||||
assertEquals(1, msgErrLossCounter.getCount());
|
||||
assertEquals(0, this.msgErrOutOfSeqCounter.getCount());
|
||||
assertEquals(2, this.msgErrDuplicateCounter.getCount());
|
||||
assertEquals(1, this.msgErrLossCounter.getCount());
|
||||
}
|
||||
|
||||
|
||||
@Test
|
||||
void shouldDetectDelayedOutOfOrderDelivery() {
|
||||
// when
|
||||
for (long l = 0; l < 5 * messageSequenceTracker.getMaxTrackOutOfOrderSequenceNumbers(); l++) {
|
||||
if (l != 10) {
|
||||
messageSequenceTracker.sequenceNumberReceived(l);
|
||||
}
|
||||
if (l == messageSequenceTracker.getMaxTrackOutOfOrderSequenceNumbers() * 2) {
|
||||
messageSequenceTracker.sequenceNumberReceived(10);
|
||||
}
|
||||
for (long l = 0; l < (5L * this.messageSequenceTracker.getMaxTrackOutOfOrderSequenceNumbers()); l++) {
|
||||
if (10 != l) this.messageSequenceTracker.sequenceNumberReceived(l);
|
||||
if (l == (this.messageSequenceTracker.getMaxTrackOutOfOrderSequenceNumbers() * 2L))
|
||||
this.messageSequenceTracker.sequenceNumberReceived(10);
|
||||
}
|
||||
messageSequenceTracker.close();
|
||||
this.messageSequenceTracker.close();
|
||||
|
||||
// then
|
||||
assertEquals(1, msgErrOutOfSeqCounter.getCount());
|
||||
assertEquals(0, msgErrDuplicateCounter.getCount());
|
||||
assertEquals(0, msgErrLossCounter.getCount());
|
||||
assertEquals(1, this.msgErrOutOfSeqCounter.getCount());
|
||||
assertEquals(0, this.msgErrDuplicateCounter.getCount());
|
||||
assertEquals(0, this.msgErrLossCounter.getCount());
|
||||
}
|
||||
|
||||
@Test
|
||||
void shouldDetectDelayedOutOfOrderDeliveryOf2ConsecutiveSequenceNumbers() {
|
||||
// when
|
||||
for (long l = 0; l < 5 * messageSequenceTracker.getMaxTrackOutOfOrderSequenceNumbers(); l++) {
|
||||
if (l != 10 && l != 11) {
|
||||
messageSequenceTracker.sequenceNumberReceived(l);
|
||||
}
|
||||
if (l == messageSequenceTracker.getMaxTrackOutOfOrderSequenceNumbers() * 2) {
|
||||
messageSequenceTracker.sequenceNumberReceived(10);
|
||||
messageSequenceTracker.sequenceNumberReceived(11);
|
||||
for (long l = 0; l < (5L * this.messageSequenceTracker.getMaxTrackOutOfOrderSequenceNumbers()); l++) {
|
||||
if ((10 != l) && (11 != l)) this.messageSequenceTracker.sequenceNumberReceived(l);
|
||||
if (l == (this.messageSequenceTracker.getMaxTrackOutOfOrderSequenceNumbers() * 2L)) {
|
||||
this.messageSequenceTracker.sequenceNumberReceived(10);
|
||||
this.messageSequenceTracker.sequenceNumberReceived(11);
|
||||
}
|
||||
}
|
||||
messageSequenceTracker.close();
|
||||
this.messageSequenceTracker.close();
|
||||
|
||||
// then
|
||||
assertEquals(2, msgErrOutOfSeqCounter.getCount());
|
||||
assertEquals(0, msgErrDuplicateCounter.getCount());
|
||||
assertEquals(0, msgErrLossCounter.getCount());
|
||||
assertEquals(2, this.msgErrOutOfSeqCounter.getCount());
|
||||
assertEquals(0, this.msgErrDuplicateCounter.getCount());
|
||||
assertEquals(0, this.msgErrLossCounter.getCount());
|
||||
}
|
||||
}
|
||||
|
||||
@@ -16,6 +16,7 @@
|
||||
|
||||
package io.nosqlbench.engine.api.templating;
|
||||
|
||||
import io.nosqlbench.api.config.NBLabeledElement;
|
||||
import io.nosqlbench.engine.api.activityconfig.OpsLoader;
|
||||
import io.nosqlbench.engine.api.activityconfig.yaml.OpData;
|
||||
import io.nosqlbench.engine.api.activityconfig.yaml.OpTemplate;
|
||||
@@ -51,17 +52,19 @@ public class ParsedOpTest {
|
||||
ConfigModel.of(ParsedOpTest.class)
|
||||
.add(Param.defaultTo("testcfg", "testval"))
|
||||
.asReadOnly()
|
||||
.apply(Map.of())
|
||||
.apply(Map.of()),
|
||||
List.of(),
|
||||
NBLabeledElement.forMap(Map.of())
|
||||
);
|
||||
|
||||
@Test
|
||||
public void testFieldDelegationFromDynamicToStaticToConfig() {
|
||||
NBConfiguration cfg = ConfigModel.of(ParsedOpTest.class)
|
||||
final NBConfiguration cfg = ConfigModel.of(ParsedOpTest.class)
|
||||
.add(Param.defaultTo("puppy", "dog"))
|
||||
.add(Param.required("surname", String.class))
|
||||
.asReadOnly().apply(Map.of("surname", "yes"));
|
||||
|
||||
String opt = """
|
||||
final String opt = """
|
||||
ops:
|
||||
op1:
|
||||
d1: "{{NumberNameToString()}}"
|
||||
@@ -69,10 +72,10 @@ public class ParsedOpTest {
|
||||
params:
|
||||
ps1: "param-one"
|
||||
""";
|
||||
OpsDocList stmtsDocs = OpsLoader.loadString(opt, OpTemplateFormat.yaml, cfg.getMap(), null);
|
||||
final OpsDocList stmtsDocs = OpsLoader.loadString(opt, OpTemplateFormat.yaml, cfg.getMap(), null);
|
||||
assertThat(stmtsDocs.getOps().size()).isEqualTo(1);
|
||||
OpTemplate opTemplate = stmtsDocs.getOps().get(0);
|
||||
ParsedOp parsedOp = new ParsedOp(opTemplate, cfg);
|
||||
final OpTemplate opTemplate = stmtsDocs.getOps().get(0);
|
||||
final ParsedOp parsedOp = new ParsedOp(opTemplate, cfg, List.of(), NBLabeledElement.forMap(Map.of()));
|
||||
|
||||
assertThat(parsedOp.getAsFunctionOr("d1","invalid").apply(1L)).isEqualTo("one");
|
||||
assertThat(parsedOp.getAsFunctionOr("s1","invalid").apply(1L)).isEqualTo("static-one");
|
||||
@@ -90,7 +93,7 @@ public class ParsedOpTest {
|
||||
|
||||
@Test
|
||||
public void testSubMapTemplates() {
|
||||
ParsedOp parsedOp = new ParsedOp(
|
||||
final ParsedOp parsedOp = new ParsedOp(
|
||||
new OpData().applyFields(Map.of(
|
||||
"op", Map.of(
|
||||
"field1-literal", "literalvalue1",
|
||||
@@ -109,13 +112,15 @@ public class ParsedOpTest {
|
||||
ConfigModel.of(ParsedOpTest.class)
|
||||
.add(Param.defaultTo("testcfg", "testval"))
|
||||
.asReadOnly()
|
||||
.apply(Map.of())
|
||||
.apply(Map.of()),
|
||||
List.of(),
|
||||
NBLabeledElement.forMap(Map.of())
|
||||
);
|
||||
LongFunction<? extends String> f1 = parsedOp.getAsRequiredFunction("field1-literal");
|
||||
LongFunction<? extends String> f2 = parsedOp.getAsRequiredFunction("field2-object");
|
||||
LongFunction<? extends String> f3 = parsedOp.getAsRequiredFunction("field3-template");
|
||||
LongFunction<? extends Map> f4 = parsedOp.getAsRequiredFunction("field4-map-template",Map.class);
|
||||
LongFunction<? extends Map> f5 = parsedOp.getAsRequiredFunction("field5-map-literal",Map.class);
|
||||
final LongFunction<? extends String> f1 = parsedOp.getAsRequiredFunction("field1-literal");
|
||||
final LongFunction<? extends String> f2 = parsedOp.getAsRequiredFunction("field2-object");
|
||||
final LongFunction<? extends String> f3 = parsedOp.getAsRequiredFunction("field3-template");
|
||||
final LongFunction<? extends Map> f4 = parsedOp.getAsRequiredFunction("field4-map-template",Map.class);
|
||||
final LongFunction<? extends Map> f5 = parsedOp.getAsRequiredFunction("field5-map-literal",Map.class);
|
||||
assertThat(f1.apply(1)).isNotNull();
|
||||
assertThat(f2.apply(2)).isNotNull();
|
||||
assertThat(f3.apply(3)).isNotNull();
|
||||
@@ -126,7 +131,7 @@ public class ParsedOpTest {
|
||||
|
||||
@Test
|
||||
public void testParsedOp() {
|
||||
Map<String, Object> m1 = pc.apply(0);
|
||||
final Map<String, Object> m1 = this.pc.apply(0);
|
||||
assertThat(m1).containsEntry("stmt", "test");
|
||||
assertThat(m1).containsEntry("dyna1", "zero");
|
||||
assertThat(m1).containsEntry("dyna2", "zero");
|
||||
@@ -135,22 +140,22 @@ public class ParsedOpTest {
|
||||
|
||||
@Test
|
||||
public void testNewListBinder() {
|
||||
LongFunction<List<Object>> lb = pc.newListBinder("dyna1", "identity", "dyna2", "identity");
|
||||
List<Object> objects = lb.apply(1);
|
||||
final LongFunction<List<Object>> lb = this.pc.newListBinder("dyna1", "identity", "dyna2", "identity");
|
||||
final List<Object> objects = lb.apply(1);
|
||||
assertThat(objects).isEqualTo(List.of("one", 1L, "one", 1L));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testNewMapBinder() {
|
||||
LongFunction<Map<String, Object>> mb = pc.newOrderedMapBinder("dyna1", "identity", "dyna2");
|
||||
Map<String, Object> objects = mb.apply(2);
|
||||
final LongFunction<Map<String, Object>> mb = this.pc.newOrderedMapBinder("dyna1", "identity", "dyna2");
|
||||
final Map<String, Object> objects = mb.apply(2);
|
||||
assertThat(objects).isEqualTo(Map.<String, Object>of("dyna1", "two", "identity", 2L, "dyna2", "two"));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testNewAryBinder() {
|
||||
LongFunction<Object[]> ab = pc.newArrayBinder("dyna1", "dyna1", "identity", "identity");
|
||||
Object[] objects = ab.apply(3);
|
||||
final LongFunction<Object[]> ab = this.pc.newArrayBinder("dyna1", "dyna1", "identity", "identity");
|
||||
final Object[] objects = ab.apply(3);
|
||||
assertThat(objects).isEqualTo(new Object[]{"three", "three", 3L, 3L});
|
||||
}
|
||||
|
||||
|
||||
BIN
devdocs/devguide/_tosort/MetricTypes.png
Normal file
BIN
devdocs/devguide/_tosort/MetricTypes.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 226 KiB |
124
devdocs/devguide/_tosort/MetricTypes.uml
Normal file
124
devdocs/devguide/_tosort/MetricTypes.uml
Normal file
@@ -0,0 +1,124 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<Diagram>
|
||||
<ID>JAVA</ID>
|
||||
<OriginalElement>com.codahale.metrics.Counter</OriginalElement>
|
||||
<nodes>
|
||||
<node x="1176.5" y="736.5">com.codahale.metrics.Metered</node>
|
||||
<node x="800.0" y="1156.5">io.nosqlbench.api.engine.metrics.DeltaSnapshotter</node>
|
||||
<node x="545.0" y="656.5">com.codahale.metrics.Metric</node>
|
||||
<node x="1125.5" y="1056.5">io.nosqlbench.api.engine.metrics.instruments.NBMetricTimer</node>
|
||||
<node x="328.5" y="1056.5">io.nosqlbench.api.engine.metrics.instruments.NBMetricCounter</node>
|
||||
<node x="1193.5" y="896.5">com.codahale.metrics.Timer</node>
|
||||
<node x="711.5" y="1056.5">io.nosqlbench.api.engine.metrics.instruments.NBMetricHistogram</node>
|
||||
<node x="788.0" y="736.5">com.codahale.metrics.Counting</node>
|
||||
<node x="396.5" y="896.5">com.codahale.metrics.Counter</node>
|
||||
<node x="-12.0" y="896.5">com.codahale.metrics.Gauge</node>
|
||||
<node x="706.5" y="1236.5">io.nosqlbench.api.engine.metrics.HdrDeltaHistogramAttachment</node>
|
||||
<node x="779.5" y="896.5">com.codahale.metrics.Histogram</node>
|
||||
<node x="1533.5" y="896.5">com.codahale.metrics.Meter</node>
|
||||
<node x="988.0" y="816.5">com.codahale.metrics.Sampling</node>
|
||||
<node x="-80.0" y="1056.5">io.nosqlbench.api.engine.metrics.instruments.NBMetricGauge</node>
|
||||
<node x="723.5" y="1316.5">io.nosqlbench.api.engine.metrics.HdrDeltaHistogramProvider</node>
|
||||
<node x="1465.5" y="1056.5">io.nosqlbench.api.engine.metrics.instruments.NBMetricMeter</node>
|
||||
<node x="996.5" y="976.5">com.codahale.metrics.Timer.Context</node>
|
||||
</nodes>
|
||||
<notes />
|
||||
<edges>
|
||||
<edge source="com.codahale.metrics.Metered" target="com.codahale.metrics.Metric" relationship="INTERFACE_GENERALIZATION">
|
||||
<point x="0.0" y="-0.0" />
|
||||
<point x="0.0" y="-0.0" />
|
||||
</edge>
|
||||
<edge source="com.codahale.metrics.Meter" target="com.codahale.metrics.Metered" relationship="REALIZATION">
|
||||
<point x="0.0" y="-0.0" />
|
||||
<point x="0.0" y="-0.0" />
|
||||
</edge>
|
||||
<edge source="com.codahale.metrics.Histogram" target="com.codahale.metrics.Sampling" relationship="REALIZATION">
|
||||
<point x="0.0" y="-0.0" />
|
||||
<point x="0.0" y="-0.0" />
|
||||
</edge>
|
||||
<edge source="com.codahale.metrics.Histogram" target="com.codahale.metrics.Metric" relationship="REALIZATION">
|
||||
<point x="0.0" y="-0.0" />
|
||||
<point x="0.0" y="-0.0" />
|
||||
</edge>
|
||||
<edge source="io.nosqlbench.api.engine.metrics.instruments.NBMetricCounter" target="com.codahale.metrics.Counter" relationship="GENERALIZATION">
|
||||
<point x="0.0" y="-0.0" />
|
||||
<point x="0.0" y="-0.0" />
|
||||
</edge>
|
||||
<edge source="io.nosqlbench.api.engine.metrics.instruments.NBMetricTimer" target="io.nosqlbench.api.engine.metrics.DeltaSnapshotter" relationship="REALIZATION">
|
||||
<point x="0.0" y="-0.0" />
|
||||
<point x="0.0" y="-0.0" />
|
||||
</edge>
|
||||
<edge source="com.codahale.metrics.Timer" target="com.codahale.metrics.Metered" relationship="REALIZATION">
|
||||
<point x="0.0" y="-0.0" />
|
||||
<point x="0.0" y="-0.0" />
|
||||
</edge>
|
||||
<edge source="com.codahale.metrics.Counter" target="com.codahale.metrics.Metric" relationship="REALIZATION">
|
||||
<point x="0.0" y="-0.0" />
|
||||
<point x="0.0" y="-0.0" />
|
||||
</edge>
|
||||
<edge source="io.nosqlbench.api.engine.metrics.instruments.NBMetricHistogram" target="io.nosqlbench.api.engine.metrics.HdrDeltaHistogramAttachment" relationship="REALIZATION">
|
||||
<point x="0.0" y="-0.0" />
|
||||
<point x="0.0" y="-0.0" />
|
||||
</edge>
|
||||
<edge source="io.nosqlbench.api.engine.metrics.instruments.NBMetricTimer" target="com.codahale.metrics.Timer" relationship="GENERALIZATION">
|
||||
<point x="0.0" y="-0.0" />
|
||||
<point x="0.0" y="-0.0" />
|
||||
</edge>
|
||||
<edge source="io.nosqlbench.api.engine.metrics.instruments.NBMetricMeter" target="com.codahale.metrics.Meter" relationship="GENERALIZATION">
|
||||
<point x="0.0" y="-0.0" />
|
||||
<point x="0.0" y="-0.0" />
|
||||
</edge>
|
||||
<edge source="com.codahale.metrics.Counter" target="com.codahale.metrics.Counting" relationship="REALIZATION">
|
||||
<point x="0.0" y="-0.0" />
|
||||
<point x="0.0" y="-0.0" />
|
||||
</edge>
|
||||
<edge source="io.nosqlbench.api.engine.metrics.instruments.NBMetricHistogram" target="io.nosqlbench.api.engine.metrics.DeltaSnapshotter" relationship="REALIZATION">
|
||||
<point x="0.0" y="-0.0" />
|
||||
<point x="0.0" y="-0.0" />
|
||||
</edge>
|
||||
<edge source="com.codahale.metrics.Timer.Context" target="com.codahale.metrics.Timer" relationship="INNER_CLASS">
|
||||
<point x="0.0" y="-0.0" />
|
||||
<point x="0.0" y="-0.0" />
|
||||
</edge>
|
||||
<edge source="com.codahale.metrics.Gauge" target="com.codahale.metrics.Metric" relationship="INTERFACE_GENERALIZATION">
|
||||
<point x="0.0" y="-0.0" />
|
||||
<point x="0.0" y="-0.0" />
|
||||
</edge>
|
||||
<edge source="com.codahale.metrics.Metered" target="com.codahale.metrics.Counting" relationship="INTERFACE_GENERALIZATION">
|
||||
<point x="0.0" y="-0.0" />
|
||||
<point x="0.0" y="-0.0" />
|
||||
</edge>
|
||||
<edge source="io.nosqlbench.api.engine.metrics.instruments.NBMetricGauge" target="com.codahale.metrics.Gauge" relationship="REALIZATION">
|
||||
<point x="0.0" y="-0.0" />
|
||||
<point x="0.0" y="-0.0" />
|
||||
</edge>
|
||||
<edge source="io.nosqlbench.api.engine.metrics.instruments.NBMetricHistogram" target="com.codahale.metrics.Histogram" relationship="GENERALIZATION">
|
||||
<point x="0.0" y="-0.0" />
|
||||
<point x="0.0" y="-0.0" />
|
||||
</edge>
|
||||
<edge source="com.codahale.metrics.Histogram" target="com.codahale.metrics.Counting" relationship="REALIZATION">
|
||||
<point x="0.0" y="-0.0" />
|
||||
<point x="0.0" y="-0.0" />
|
||||
</edge>
|
||||
<edge source="com.codahale.metrics.Timer" target="com.codahale.metrics.Sampling" relationship="REALIZATION">
|
||||
<point x="0.0" y="-0.0" />
|
||||
<point x="0.0" y="-0.0" />
|
||||
</edge>
|
||||
<edge source="io.nosqlbench.api.engine.metrics.HdrDeltaHistogramAttachment" target="io.nosqlbench.api.engine.metrics.HdrDeltaHistogramProvider" relationship="INTERFACE_GENERALIZATION">
|
||||
<point x="0.0" y="-0.0" />
|
||||
<point x="0.0" y="-0.0" />
|
||||
</edge>
|
||||
<edge source="io.nosqlbench.api.engine.metrics.instruments.NBMetricTimer" target="io.nosqlbench.api.engine.metrics.HdrDeltaHistogramAttachment" relationship="REALIZATION">
|
||||
<point x="0.0" y="-0.0" />
|
||||
<point x="0.0" y="-0.0" />
|
||||
</edge>
|
||||
</edges>
|
||||
<settings layout="Circular" zoom="0.8310111464968153" showDependencies="false" x="605.3405988023951" y="860.1901796407186" />
|
||||
<SelectedNodes />
|
||||
<Categories>
|
||||
<Category>Inner Classes</Category>
|
||||
</Categories>
|
||||
<SCOPE>All</SCOPE>
|
||||
<VISIBILITY>private</VISIBILITY>
|
||||
</Diagram>
|
||||
|
||||
64
devdocs/metrics_labeling.md
Normal file
64
devdocs/metrics_labeling.md
Normal file
@@ -0,0 +1,64 @@
|
||||
# Metrics Labeling
|
||||
|
||||
All metrics flowing from NoSQLBench should come with a useful set of labels which
|
||||
are presented in a self-consistent manner. These labels serve to identify a given metric
|
||||
not only within a given study or deployment, but across time with macr-level identifiers.
|
||||
|
||||
Those identifiers which are nominal for the study or deployment should also be provided
|
||||
in the annotations which can be queried later to find the original set of related metrics.
|
||||
|
||||
# Naming Context
|
||||
|
||||
In order to simplify the naming methods, all metrics instruments are created through
|
||||
a helper type called ActivityMetrics. (This name might change).
|
||||
It contains factory methods for all the metric types you may use within the NoSQLBench runtime.
|
||||
|
||||
Each factory method must start with an NBLabeledElement, which provides the naming context
|
||||
for the _thing to which the metric pertains_, *separate* from the actual metric family name.
|
||||
The metric family name is provided separately. This means that the factory methods have,
|
||||
injected at the construction site, all the identifying labels needed by the metric for
|
||||
reporting to the metrics collector.
|
||||
|
||||
However, the appropriate set of labels which should be provided might vary by caller, as sometimes
|
||||
the caller is an Activity, sometimes an OpDispenser within an activity, sometimes a user script,
|
||||
etc.
|
||||
|
||||
This section describes the different caller (instrumented element, AKA NBLabeledElement)
|
||||
contexts and what labels are expected to be provided for each. Each level is considered
|
||||
a nested layer below some other element, which implicitly includes all labeling data from
|
||||
above.
|
||||
|
||||
# Labeling Contexts
|
||||
|
||||
- NoSQLBench Process
|
||||
- "appname": "nosqlbench"
|
||||
- Scenario Context (calling as Scenario)
|
||||
- IFF Named Scenario Mode:
|
||||
- "workload": "..." # from the file
|
||||
- "scenario": "..." # from the scenario name
|
||||
- "usermode": "named_scenario"
|
||||
- IFF Run Mode:
|
||||
- "workload": "..." # from the file
|
||||
- "scenario": "..." # from the (auto) scenario name
|
||||
- "usermode": "adhoc_activity"
|
||||
- Activity Context (calling as Activity)
|
||||
- includes above labels
|
||||
- IFF Named Scenario Mode
|
||||
- "step": "..."
|
||||
- "alias": "${workload}_${scenario}_${step}"
|
||||
- ELSE
|
||||
- "alias": "..." # just the activity alias
|
||||
- Op Template Context (calling as OpDispenser)
|
||||
- includes above labels
|
||||
- "op": "<name of the parsed op>"
|
||||
|
||||
# Additional Data
|
||||
In the future it would be nice to include both the driver adapter name and the space name.
|
||||
|
||||
# Caller and Callee Semantics
|
||||
When constructing child elements, or _owned_ fields, the calling convention is to provide
|
||||
_this_ element as the labeled object.
|
||||
|
||||
When returning labels as a labeled object, the convention is to return the labels from
|
||||
the labeled parent object with the name of _this_ object appended to the end of the
|
||||
label set.
|
||||
@@ -100,7 +100,7 @@
|
||||
<dependency>
|
||||
<groupId>com.fasterxml.jackson.jaxrs</groupId>
|
||||
<artifactId>jackson-jaxrs-json-provider</artifactId>
|
||||
<version>2.14.2</version>
|
||||
<version>2.15.0</version>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright (c) 2022 nosqlbench
|
||||
* Copyright (c) 2022-2023 nosqlbench
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -17,7 +17,9 @@
|
||||
package io.nosqlbench.engine.api.activityapi.core;
|
||||
|
||||
import com.codahale.metrics.Timer;
|
||||
import io.nosqlbench.api.config.NBNamedElement;
|
||||
import io.nosqlbench.api.config.NBLabeledElement;
|
||||
import io.nosqlbench.api.engine.activityimpl.ActivityDef;
|
||||
import io.nosqlbench.api.engine.activityimpl.ParameterMap;
|
||||
import io.nosqlbench.engine.api.activityapi.core.progress.ProgressCapable;
|
||||
import io.nosqlbench.engine.api.activityapi.core.progress.StateCapable;
|
||||
import io.nosqlbench.engine.api.activityapi.cyclelog.filters.IntPredicateDispenser;
|
||||
@@ -25,8 +27,6 @@ import io.nosqlbench.engine.api.activityapi.errorhandling.ErrorMetrics;
|
||||
import io.nosqlbench.engine.api.activityapi.input.InputDispenser;
|
||||
import io.nosqlbench.engine.api.activityapi.output.OutputDispenser;
|
||||
import io.nosqlbench.engine.api.activityapi.ratelimits.RateLimiter;
|
||||
import io.nosqlbench.api.engine.activityimpl.ActivityDef;
|
||||
import io.nosqlbench.api.engine.activityimpl.ParameterMap;
|
||||
import io.nosqlbench.engine.api.activityimpl.SimpleActivity;
|
||||
import io.nosqlbench.engine.api.activityimpl.motor.RunStateTally;
|
||||
|
||||
@@ -38,7 +38,7 @@ import java.util.function.Supplier;
|
||||
* Provides the components needed to build and run an activity a runtime.
|
||||
* The easiest way to build a useful Activity is to extend {@link SimpleActivity}.
|
||||
*/
|
||||
public interface Activity extends Comparable<Activity>, ActivityDefObserver, ProgressCapable, StateCapable, NBNamedElement {
|
||||
public interface Activity extends Comparable<Activity>, ActivityDefObserver, ProgressCapable, StateCapable, NBLabeledElement {
|
||||
|
||||
/**
|
||||
* Provide the activity with the controls needed to stop itself.
|
||||
@@ -59,11 +59,11 @@ public interface Activity extends Comparable<Activity>, ActivityDefObserver, Pro
|
||||
ActivityDef getActivityDef();
|
||||
|
||||
default String getAlias() {
|
||||
return getActivityDef().getAlias();
|
||||
return this.getActivityDef().getAlias();
|
||||
}
|
||||
|
||||
default ParameterMap getParams() {
|
||||
return getActivityDef().getParams();
|
||||
return this.getActivityDef().getParams();
|
||||
}
|
||||
|
||||
default void initActivity() {
|
||||
@@ -94,6 +94,7 @@ public interface Activity extends Comparable<Activity>, ActivityDefObserver, Pro
|
||||
|
||||
void setOutputDispenserDelegate(OutputDispenser outputDispenser);
|
||||
|
||||
@Override
|
||||
RunState getRunState();
|
||||
|
||||
void setRunState(RunState runState);
|
||||
@@ -104,7 +105,7 @@ public interface Activity extends Comparable<Activity>, ActivityDefObserver, Pro
|
||||
}
|
||||
|
||||
default String getCycleSummary() {
|
||||
return getActivityDef().getCycleSummary();
|
||||
return this.getActivityDef().getCycleSummary();
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -214,7 +215,7 @@ public interface Activity extends Comparable<Activity>, ActivityDefObserver, Pro
|
||||
int getMaxTries();
|
||||
|
||||
default int getHdrDigits() {
|
||||
return getParams().getOptionalInteger("hdr_digits").orElse(4);
|
||||
return this.getParams().getOptionalInteger("hdr_digits").orElse(4);
|
||||
}
|
||||
|
||||
RunStateTally getRunStateTally();
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright (c) 2022 nosqlbench
|
||||
* Copyright (c) 2022-2023 nosqlbench
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -16,6 +16,7 @@
|
||||
|
||||
package io.nosqlbench.engine.api.activityapi.core;
|
||||
|
||||
import io.nosqlbench.api.config.NBLabeledElement;
|
||||
import io.nosqlbench.engine.api.activityapi.input.InputDispenser;
|
||||
import io.nosqlbench.engine.api.activityapi.output.OutputDispenser;
|
||||
import io.nosqlbench.api.engine.activityimpl.ActivityDef;
|
||||
@@ -46,8 +47,8 @@ public interface ActivityType<A extends Activity> {
|
||||
* @return a distinct Activity instance for each call
|
||||
*/
|
||||
@SuppressWarnings("unchecked")
|
||||
default A getActivity(ActivityDef activityDef) {
|
||||
SimpleActivity activity = new SimpleActivity(activityDef);
|
||||
default A getActivity(final ActivityDef activityDef, final NBLabeledElement parentLabels) {
|
||||
final SimpleActivity activity = new SimpleActivity(activityDef, parentLabels);
|
||||
return (A) activity;
|
||||
}
|
||||
|
||||
@@ -59,31 +60,25 @@ public interface ActivityType<A extends Activity> {
|
||||
* @param activities a map of existing activities
|
||||
* @return a distinct activity instance for each call
|
||||
*/
|
||||
default Activity getAssembledActivity(ActivityDef activityDef, Map<String, Activity> activities) {
|
||||
A activity = getActivity(activityDef);
|
||||
default Activity getAssembledActivity(final ActivityDef activityDef, final Map<String, Activity> activities, final NBLabeledElement labels) {
|
||||
final A activity = this.getActivity(activityDef, labels);
|
||||
|
||||
InputDispenser inputDispenser = getInputDispenser(activity);
|
||||
if (inputDispenser instanceof ActivitiesAware) {
|
||||
((ActivitiesAware) inputDispenser).setActivitiesMap(activities);
|
||||
}
|
||||
final InputDispenser inputDispenser = this.getInputDispenser(activity);
|
||||
if (inputDispenser instanceof ActivitiesAware) ((ActivitiesAware) inputDispenser).setActivitiesMap(activities);
|
||||
activity.setInputDispenserDelegate(inputDispenser);
|
||||
|
||||
ActionDispenser actionDispenser = getActionDispenser(activity);
|
||||
if (actionDispenser instanceof ActivitiesAware) {
|
||||
final ActionDispenser actionDispenser = this.getActionDispenser(activity);
|
||||
if (actionDispenser instanceof ActivitiesAware)
|
||||
((ActivitiesAware) actionDispenser).setActivitiesMap(activities);
|
||||
}
|
||||
activity.setActionDispenserDelegate(actionDispenser);
|
||||
|
||||
OutputDispenser outputDispenser = getOutputDispenser(activity).orElse(null);
|
||||
if (outputDispenser !=null && outputDispenser instanceof ActivitiesAware) {
|
||||
final OutputDispenser outputDispenser = this.getOutputDispenser(activity).orElse(null);
|
||||
if ((null != outputDispenser) && (outputDispenser instanceof ActivitiesAware))
|
||||
((ActivitiesAware) outputDispenser).setActivitiesMap(activities);
|
||||
}
|
||||
activity.setOutputDispenserDelegate(outputDispenser);
|
||||
|
||||
MotorDispenser motorDispenser = getMotorDispenser(activity, inputDispenser, actionDispenser, outputDispenser);
|
||||
if (motorDispenser instanceof ActivitiesAware) {
|
||||
((ActivitiesAware) motorDispenser).setActivitiesMap(activities);
|
||||
}
|
||||
final MotorDispenser motorDispenser = this.getMotorDispenser(activity, inputDispenser, actionDispenser, outputDispenser);
|
||||
if (motorDispenser instanceof ActivitiesAware) ((ActivitiesAware) motorDispenser).setActivitiesMap(activities);
|
||||
activity.setMotorDispenserDelegate(motorDispenser);
|
||||
|
||||
return activity;
|
||||
@@ -95,7 +90,7 @@ public interface ActivityType<A extends Activity> {
|
||||
* @param activity The activity instance that will parameterize the returned MarkerDispenser instance.
|
||||
* @return an instance of MarkerDispenser
|
||||
*/
|
||||
default Optional<OutputDispenser> getOutputDispenser(A activity) {
|
||||
default Optional<OutputDispenser> getOutputDispenser(final A activity) {
|
||||
return CoreServices.getOutputDispenser(activity);
|
||||
}
|
||||
|
||||
@@ -105,7 +100,7 @@ public interface ActivityType<A extends Activity> {
|
||||
* @param activity The activity instance that will parameterize the returned ActionDispenser instance.
|
||||
* @return an instance of ActionDispenser
|
||||
*/
|
||||
default ActionDispenser getActionDispenser(A activity) {
|
||||
default ActionDispenser getActionDispenser(final A activity) {
|
||||
return new CoreActionDispenser(activity);
|
||||
}
|
||||
|
||||
@@ -116,15 +111,15 @@ public interface ActivityType<A extends Activity> {
|
||||
* @param activity the Activity instance which will parameterize this InputDispenser
|
||||
* @return the InputDispenser for the associated activity
|
||||
*/
|
||||
default InputDispenser getInputDispenser(A activity) {
|
||||
default InputDispenser getInputDispenser(final A activity) {
|
||||
return CoreServices.getInputDispenser(activity);
|
||||
}
|
||||
|
||||
default <T> MotorDispenser<T> getMotorDispenser(
|
||||
A activity,
|
||||
InputDispenser inputDispenser,
|
||||
ActionDispenser actionDispenser,
|
||||
OutputDispenser outputDispenser) {
|
||||
final A activity,
|
||||
final InputDispenser inputDispenser,
|
||||
final ActionDispenser actionDispenser,
|
||||
final OutputDispenser outputDispenser) {
|
||||
return new CoreMotorDispenser<T> (activity, inputDispenser, actionDispenser, outputDispenser);
|
||||
}
|
||||
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright (c) 2022 nosqlbench
|
||||
* Copyright (c) 2022-2023 nosqlbench
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -37,84 +37,80 @@ public class CoreActivityInstrumentation implements ActivityInstrumentation {
|
||||
private final String svcTimeSuffix;
|
||||
private final boolean strictNaming;
|
||||
|
||||
public CoreActivityInstrumentation(Activity activity) {
|
||||
public CoreActivityInstrumentation(final Activity activity) {
|
||||
this.activity = activity;
|
||||
this.def = activity.getActivityDef();
|
||||
this.params = def.getParams();
|
||||
this.strictNaming = params.getOptionalBoolean(STRICTMETRICNAMES).orElse(true);
|
||||
svcTimeSuffix = strictNaming ? SERVICE_TIME : "";
|
||||
def = activity.getActivityDef();
|
||||
params = this.def.getParams();
|
||||
strictNaming = this.params.getOptionalBoolean(CoreActivityInstrumentation.STRICTMETRICNAMES).orElse(true);
|
||||
this.svcTimeSuffix = this.strictNaming ? CoreActivityInstrumentation.SERVICE_TIME : "";
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public synchronized Timer getOrCreateInputTimer() {
|
||||
String metricName = "read_input";
|
||||
return ActivityMetrics.timer(def, metricName, activity.getHdrDigits());
|
||||
final String metricName = "read_input";
|
||||
return ActivityMetrics.timer(this.activity, metricName, this.activity.getHdrDigits());
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public synchronized Timer getOrCreateStridesServiceTimer() {
|
||||
return ActivityMetrics.timer(def, "strides" + SERVICE_TIME, activity.getHdrDigits());
|
||||
return ActivityMetrics.timer(this.activity, "strides" + CoreActivityInstrumentation.SERVICE_TIME, this.activity.getHdrDigits());
|
||||
}
|
||||
|
||||
@Override
|
||||
public synchronized Timer getStridesResponseTimerOrNull() {
|
||||
if (activity.getStrideLimiter()==null) {
|
||||
return null;
|
||||
}
|
||||
return ActivityMetrics.timer(def, "strides" + RESPONSE_TIME, activity.getHdrDigits());
|
||||
if (null == activity.getStrideLimiter()) return null;
|
||||
return ActivityMetrics.timer(this.activity, "strides" + CoreActivityInstrumentation.RESPONSE_TIME, this.activity.getHdrDigits());
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public synchronized Timer getOrCreateCyclesServiceTimer() {
|
||||
return ActivityMetrics.timer(def, "cycles" + svcTimeSuffix, activity.getHdrDigits());
|
||||
return ActivityMetrics.timer(this.activity, "cycles" + this.svcTimeSuffix, this.activity.getHdrDigits());
|
||||
}
|
||||
|
||||
@Override
|
||||
public synchronized Timer getCyclesResponseTimerOrNull() {
|
||||
if (activity.getCycleLimiter()==null) {
|
||||
return null;
|
||||
}
|
||||
String metricName = "cycles" + RESPONSE_TIME;
|
||||
return ActivityMetrics.timer(def, metricName, activity.getHdrDigits());
|
||||
if (null == activity.getCycleLimiter()) return null;
|
||||
final String metricName = "cycles" + CoreActivityInstrumentation.RESPONSE_TIME;
|
||||
return ActivityMetrics.timer(this.activity, metricName, this.activity.getHdrDigits());
|
||||
}
|
||||
|
||||
@Override
|
||||
public synchronized Counter getOrCreatePendingOpCounter() {
|
||||
String metricName = "pending_ops";
|
||||
return ActivityMetrics.counter(def, metricName);
|
||||
final String metricName = "pending_ops";
|
||||
return ActivityMetrics.counter(this.activity, metricName);
|
||||
}
|
||||
|
||||
@Override
|
||||
public synchronized Counter getOrCreateOpTrackerBlockedCounter() {
|
||||
String metricName = "optracker_blocked";
|
||||
return ActivityMetrics.counter(def, metricName);
|
||||
final String metricName = "optracker_blocked";
|
||||
return ActivityMetrics.counter(this.activity, metricName);
|
||||
}
|
||||
|
||||
@Override
|
||||
public synchronized Timer getOrCreateBindTimer() {
|
||||
return ActivityMetrics.timer(def, "bind", activity.getHdrDigits());
|
||||
return ActivityMetrics.timer(this.activity, "bind", this.activity.getHdrDigits());
|
||||
}
|
||||
|
||||
@Override
|
||||
public synchronized Timer getOrCreateExecuteTimer() {
|
||||
return ActivityMetrics.timer(def,"execute", activity.getHdrDigits());
|
||||
return ActivityMetrics.timer(this.activity,"execute", this.activity.getHdrDigits());
|
||||
}
|
||||
|
||||
@Override
|
||||
public synchronized Timer getOrCreateResultTimer() {
|
||||
return ActivityMetrics.timer(def,"result", activity.getHdrDigits());
|
||||
return ActivityMetrics.timer(this.activity,"result", this.activity.getHdrDigits());
|
||||
}
|
||||
|
||||
@Override
|
||||
public synchronized Timer getOrCreateResultSuccessTimer() {
|
||||
return ActivityMetrics.timer(def,"result-success", activity.getHdrDigits());
|
||||
return ActivityMetrics.timer(this.activity,"result-success", this.activity.getHdrDigits());
|
||||
}
|
||||
|
||||
@Override
|
||||
public synchronized Histogram getOrCreateTriesHistogram() {
|
||||
return ActivityMetrics.histogram(def,"tries", activity.getHdrDigits());
|
||||
return ActivityMetrics.histogram(this.activity,"tries", this.activity.getHdrDigits());
|
||||
}
|
||||
}
|
||||
|
||||
@@ -16,6 +16,7 @@
|
||||
|
||||
package io.nosqlbench.engine.api.activityapi.errorhandling;
|
||||
|
||||
import io.nosqlbench.api.config.NBLabeledElement;
|
||||
import io.nosqlbench.api.engine.activityimpl.ActivityDef;
|
||||
import io.nosqlbench.engine.api.metrics.ExceptionCountMetrics;
|
||||
import io.nosqlbench.engine.api.metrics.ExceptionExpectedResultVerificationMetrics;
|
||||
@@ -27,43 +28,37 @@ import java.util.function.Supplier;
|
||||
|
||||
public class ErrorMetrics {
|
||||
|
||||
private final ActivityDef activityDef;
|
||||
private final NBLabeledElement parentLabels;
|
||||
private ExceptionCountMetrics exceptionCountMetrics;
|
||||
private ExceptionHistoMetrics exceptionHistoMetrics;
|
||||
private ExceptionMeterMetrics exceptionMeterMetrics;
|
||||
private ExceptionTimerMetrics exceptionTimerMetrics;
|
||||
private ExceptionExpectedResultVerificationMetrics exceptionExpectedResultVerificationMetrics;
|
||||
|
||||
public ErrorMetrics(ActivityDef activityDef) {
|
||||
this.activityDef = activityDef;
|
||||
public ErrorMetrics(final NBLabeledElement parentLabels) {
|
||||
this.parentLabels = parentLabels;
|
||||
}
|
||||
|
||||
public synchronized ExceptionCountMetrics getExceptionCountMetrics() {
|
||||
if (exceptionCountMetrics == null) {
|
||||
exceptionCountMetrics = new ExceptionCountMetrics(activityDef);
|
||||
}
|
||||
return exceptionCountMetrics;
|
||||
if (null == exceptionCountMetrics) this.exceptionCountMetrics = new ExceptionCountMetrics(this.parentLabels);
|
||||
return this.exceptionCountMetrics;
|
||||
}
|
||||
|
||||
public synchronized ExceptionHistoMetrics getExceptionHistoMetrics() {
|
||||
if (exceptionHistoMetrics == null) {
|
||||
exceptionHistoMetrics = new ExceptionHistoMetrics(activityDef);
|
||||
}
|
||||
return exceptionHistoMetrics;
|
||||
if (null == exceptionHistoMetrics)
|
||||
this.exceptionHistoMetrics = new ExceptionHistoMetrics(this.parentLabels, ActivityDef.parseActivityDef(""));
|
||||
return this.exceptionHistoMetrics;
|
||||
}
|
||||
|
||||
public synchronized ExceptionMeterMetrics getExceptionMeterMetrics() {
|
||||
if (exceptionMeterMetrics == null) {
|
||||
exceptionMeterMetrics = new ExceptionMeterMetrics(activityDef);
|
||||
}
|
||||
return exceptionMeterMetrics;
|
||||
if (null == exceptionMeterMetrics) this.exceptionMeterMetrics = new ExceptionMeterMetrics(this.parentLabels);
|
||||
return this.exceptionMeterMetrics;
|
||||
}
|
||||
|
||||
public synchronized ExceptionTimerMetrics getExceptionTimerMetrics() {
|
||||
if (exceptionTimerMetrics == null) {
|
||||
exceptionTimerMetrics = new ExceptionTimerMetrics(activityDef);
|
||||
}
|
||||
return exceptionTimerMetrics;
|
||||
if (null == exceptionTimerMetrics)
|
||||
this.exceptionTimerMetrics = new ExceptionTimerMetrics(this.parentLabels, ActivityDef.parseActivityDef(""));
|
||||
return this.exceptionTimerMetrics;
|
||||
}
|
||||
|
||||
public synchronized ExceptionExpectedResultVerificationMetrics getExceptionExpectedResultVerificationMetrics() {
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright (c) 2022 nosqlbench
|
||||
* Copyright (c) 2022-2023 nosqlbench
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -17,9 +17,13 @@
|
||||
package io.nosqlbench.engine.api.activityapi.ratelimits;
|
||||
|
||||
import com.codahale.metrics.Gauge;
|
||||
import io.nosqlbench.api.config.NBLabeledElement;
|
||||
import io.nosqlbench.api.config.NBNamedElement;
|
||||
import io.nosqlbench.engine.api.activityapi.core.Startable;
|
||||
import io.nosqlbench.api.engine.metrics.ActivityMetrics;
|
||||
import io.nosqlbench.engine.api.activityapi.ratelimits.RateLimiters.BurstRateGauge;
|
||||
import io.nosqlbench.engine.api.activityapi.ratelimits.RateLimiters.RateGauge;
|
||||
import io.nosqlbench.engine.api.activityapi.ratelimits.RateLimiters.WaitTimeGauge;
|
||||
import io.nosqlbench.nb.annotations.Service;
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
@@ -77,10 +81,10 @@ import java.util.concurrent.atomic.AtomicLong;
|
||||
* </p>
|
||||
*/
|
||||
@Service(value = RateLimiter.class, selector = "hybrid")
|
||||
public class HybridRateLimiter implements Startable, RateLimiter {
|
||||
public class HybridRateLimiter implements RateLimiter {
|
||||
|
||||
private final static Logger logger = LogManager.getLogger(HybridRateLimiter.class);
|
||||
private NBNamedElement named;
|
||||
private static final Logger logger = LogManager.getLogger(HybridRateLimiter.class);
|
||||
private NBLabeledElement named;
|
||||
|
||||
//private volatile TokenFiller filler;
|
||||
private volatile long starttime;
|
||||
@@ -104,93 +108,87 @@ public class HybridRateLimiter implements Startable, RateLimiter {
|
||||
protected HybridRateLimiter() {
|
||||
}
|
||||
|
||||
public HybridRateLimiter(NBNamedElement named, String label, RateSpec rateSpec) {
|
||||
setLabel(label);
|
||||
init(named);
|
||||
public HybridRateLimiter(final NBLabeledElement named, final String label, final RateSpec rateSpec) {
|
||||
this.label = label;
|
||||
this.init(named);
|
||||
this.named = named;
|
||||
this.applyRateSpec(rateSpec);
|
||||
applyRateSpec(rateSpec);
|
||||
}
|
||||
|
||||
protected void setLabel(String label) {
|
||||
protected void setLabel(final String label) {
|
||||
this.label = label;
|
||||
}
|
||||
|
||||
@Override
|
||||
public long maybeWaitForOp() {
|
||||
return tokens.blockAndTake();
|
||||
return this.tokens.blockAndTake();
|
||||
}
|
||||
|
||||
@Override
|
||||
public long getTotalWaitTime() {
|
||||
return this.cumulativeWaitTimeNanos.get() + getWaitTime();
|
||||
return cumulativeWaitTimeNanos.get() + this.getWaitTime();
|
||||
}
|
||||
|
||||
@Override
|
||||
public long getWaitTime() {
|
||||
return tokens.getWaitTime();
|
||||
return this.tokens.getWaitTime();
|
||||
}
|
||||
|
||||
@Override
|
||||
public RateSpec getRateSpec() {
|
||||
return this.rateSpec;
|
||||
return rateSpec;
|
||||
}
|
||||
|
||||
@Override
|
||||
public synchronized void applyRateSpec(RateSpec updatingRateSpec) {
|
||||
public synchronized void applyRateSpec(final RateSpec updatingRateSpec) {
|
||||
|
||||
if (updatingRateSpec == null) {
|
||||
throw new RuntimeException("RateSpec must be defined");
|
||||
}
|
||||
if (null == updatingRateSpec) throw new RuntimeException("RateSpec must be defined");
|
||||
|
||||
if (updatingRateSpec.equals(this.rateSpec) && !updatingRateSpec.isRestart()) {
|
||||
return;
|
||||
}
|
||||
if (updatingRateSpec.equals(rateSpec) && !updatingRateSpec.isRestart()) return;
|
||||
|
||||
this.rateSpec = updatingRateSpec;
|
||||
this.tokens = (this.tokens == null) ? new ThreadDrivenTokenPool(rateSpec, named) : this.tokens.apply(named, rateSpec);
|
||||
rateSpec = updatingRateSpec;
|
||||
tokens = null == this.tokens ? new ThreadDrivenTokenPool(this.rateSpec, this.named) : tokens.apply(this.named, this.rateSpec);
|
||||
// this.filler = (this.filler == null) ? new TokenFiller(rateSpec, activityDef) : filler.apply(rateSpec);
|
||||
// this.tokens = this.filler.getTokenPool();
|
||||
|
||||
if (this.state == State.Idle && updatingRateSpec.isAutoStart()) {
|
||||
this.start();
|
||||
} else if (updatingRateSpec.isRestart()) {
|
||||
this.restart();
|
||||
}
|
||||
if ((State.Idle == this.state) && updatingRateSpec.isAutoStart()) start();
|
||||
else if (updatingRateSpec.isRestart()) restart();
|
||||
}
|
||||
|
||||
|
||||
protected void init(NBNamedElement activityDef) {
|
||||
this.delayGauge = ActivityMetrics.gauge(activityDef, label + ".waittime", new RateLimiters.WaitTimeGauge(this));
|
||||
this.avgRateGauge = ActivityMetrics.gauge(activityDef, label + ".config.cyclerate", new RateLimiters.RateGauge(this));
|
||||
this.burstRateGauge = ActivityMetrics.gauge(activityDef, label + ".config.burstrate", new RateLimiters.BurstRateGauge(this));
|
||||
protected void init(final NBLabeledElement activityDef) {
|
||||
delayGauge = ActivityMetrics.gauge(activityDef, this.label + ".waittime", new WaitTimeGauge(this));
|
||||
avgRateGauge = ActivityMetrics.gauge(activityDef, this.label + ".config.cyclerate", new RateGauge(this));
|
||||
burstRateGauge = ActivityMetrics.gauge(activityDef, this.label + ".config.burstrate", new BurstRateGauge(this));
|
||||
}
|
||||
|
||||
@Override
|
||||
public synchronized void start() {
|
||||
|
||||
switch (state) {
|
||||
switch (this.state) {
|
||||
case Started:
|
||||
// logger.warn("Tried to start a rate limiter that was already started. If this is desired, use restart() instead");
|
||||
// TODO: Find a better way to warn about spurious rate limiter
|
||||
// starts, since the check condition was not properly isolated
|
||||
break;
|
||||
case Idle:
|
||||
long nanos = getNanoClockTime();
|
||||
this.starttime = nanos;
|
||||
this.tokens.start();
|
||||
state = State.Started;
|
||||
final long nanos = this.getNanoClockTime();
|
||||
starttime = nanos;
|
||||
tokens.start();
|
||||
this.state = State.Started;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
public synchronized long restart() {
|
||||
switch (state) {
|
||||
switch (this.state) {
|
||||
case Idle:
|
||||
this.start();
|
||||
start();
|
||||
return 0L;
|
||||
case Started:
|
||||
long accumulatedWaitSinceLastStart = cumulativeWaitTimeNanos.get();
|
||||
cumulativeWaitTimeNanos.set(0L);
|
||||
return this.tokens.restart() + accumulatedWaitSinceLastStart;
|
||||
final long accumulatedWaitSinceLastStart = this.cumulativeWaitTimeNanos.get();
|
||||
this.cumulativeWaitTimeNanos.set(0L);
|
||||
return tokens.restart() + accumulatedWaitSinceLastStart;
|
||||
default:
|
||||
return 0L;
|
||||
}
|
||||
@@ -202,9 +200,9 @@ public class HybridRateLimiter implements Startable, RateLimiter {
|
||||
}
|
||||
|
||||
private synchronized void checkpointCumulativeWaitTime() {
|
||||
long nanos = getNanoClockTime();
|
||||
this.starttime = nanos;
|
||||
cumulativeWaitTimeNanos.addAndGet(getWaitTime());
|
||||
final long nanos = this.getNanoClockTime();
|
||||
starttime = nanos;
|
||||
this.cumulativeWaitTimeNanos.addAndGet(this.getWaitTime());
|
||||
}
|
||||
|
||||
protected long getNanoClockTime() {
|
||||
@@ -213,17 +211,11 @@ public class HybridRateLimiter implements Startable, RateLimiter {
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
StringBuilder sb = new StringBuilder(HybridRateLimiter.class.getSimpleName());
|
||||
final StringBuilder sb = new StringBuilder(HybridRateLimiter.class.getSimpleName());
|
||||
sb.append("{\n");
|
||||
if (this.getRateSpec() != null) {
|
||||
sb.append(" spec:").append(this.getRateSpec().toString());
|
||||
}
|
||||
if (this.tokens != null) {
|
||||
sb.append(",\n tokenpool:").append(this.tokens.toString());
|
||||
}
|
||||
if (this.state != null) {
|
||||
sb.append(",\n state:'").append(this.state).append("'");
|
||||
}
|
||||
if (null != this.getRateSpec()) sb.append(" spec:").append(rateSpec.toString());
|
||||
if (null != this.tokens) sb.append(",\n tokenpool:").append(tokens);
|
||||
if (null != this.state) sb.append(",\n state:'").append(state).append('\'');
|
||||
sb.append("\n}");
|
||||
return sb.toString();
|
||||
}
|
||||
@@ -240,16 +232,14 @@ public class HybridRateLimiter implements Startable, RateLimiter {
|
||||
private class PoolGauge implements Gauge<Long> {
|
||||
private final HybridRateLimiter rl;
|
||||
|
||||
public PoolGauge(HybridRateLimiter hybridRateLimiter) {
|
||||
this.rl = hybridRateLimiter;
|
||||
public PoolGauge(final HybridRateLimiter hybridRateLimiter) {
|
||||
rl = hybridRateLimiter;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Long getValue() {
|
||||
TokenPool pool = rl.tokens;
|
||||
if (pool==null) {
|
||||
return 0L;
|
||||
}
|
||||
final TokenPool pool = this.rl.tokens;
|
||||
if (null == pool) return 0L;
|
||||
return pool.getWaitTime();
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright (c) 2022 nosqlbench
|
||||
* Copyright (c) 2022-2023 nosqlbench
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -17,6 +17,7 @@
|
||||
package io.nosqlbench.engine.api.activityapi.ratelimits;
|
||||
|
||||
import com.codahale.metrics.Timer;
|
||||
import io.nosqlbench.api.config.NBLabeledElement;
|
||||
import io.nosqlbench.api.engine.activityimpl.ActivityDef;
|
||||
import io.nosqlbench.api.engine.metrics.ActivityMetrics;
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
@@ -25,6 +26,7 @@ import org.apache.logging.log4j.Logger;
|
||||
import java.io.RandomAccessFile;
|
||||
import java.nio.ByteBuffer;
|
||||
import java.nio.channels.FileChannel;
|
||||
import java.nio.channels.FileChannel.MapMode;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
import java.util.concurrent.locks.Condition;
|
||||
import java.util.concurrent.locks.Lock;
|
||||
@@ -55,9 +57,10 @@ import static io.nosqlbench.engine.api.util.Colors.*;
|
||||
*/
|
||||
public class InlineTokenPool {
|
||||
|
||||
private final static Logger logger = LogManager.getLogger(InlineTokenPool.class);
|
||||
private static final Logger logger = LogManager.getLogger(InlineTokenPool.class);
|
||||
|
||||
public static final double MIN_CONCURRENT_OPS = 5;
|
||||
private final NBLabeledElement parentLabels;
|
||||
|
||||
// Size limit of active pool
|
||||
private long maxActivePoolSize;
|
||||
@@ -83,7 +86,7 @@ public class InlineTokenPool {
|
||||
// metrics for refill
|
||||
private final Timer refillTimer;
|
||||
// update rate for refiller
|
||||
private final long interval = (long) 1E6;
|
||||
private final long interval = (long) 1.0E6;
|
||||
|
||||
|
||||
private RateSpec rateSpec;
|
||||
@@ -91,10 +94,10 @@ public class InlineTokenPool {
|
||||
// private long debugRate=1000000000;
|
||||
|
||||
// Total number of thread blocks that occured since this token pool was started
|
||||
private long blocks = 0L;
|
||||
private long blocks;
|
||||
|
||||
private final Lock lock = new ReentrantLock();
|
||||
private final Condition lockheld = lock.newCondition();
|
||||
private final Condition lockheld = this.lock.newCondition();
|
||||
|
||||
/**
|
||||
* This constructor tries to pick reasonable defaults for the token pool for
|
||||
@@ -103,20 +106,22 @@ public class InlineTokenPool {
|
||||
*
|
||||
* @param rateSpec a {@link RateSpec}
|
||||
*/
|
||||
public InlineTokenPool(RateSpec rateSpec, ActivityDef def) {
|
||||
ByteBuffer logbuf = getBuffer();
|
||||
apply(rateSpec);
|
||||
logger.debug("initialized token pool: " + this + " for rate:" + rateSpec);
|
||||
this.refillTimer = ActivityMetrics.timer(def, "tokenfiller",4);
|
||||
public InlineTokenPool(final RateSpec rateSpec, final ActivityDef def, final NBLabeledElement parentLabels) {
|
||||
this.parentLabels = parentLabels;
|
||||
final ByteBuffer logbuf = this.getBuffer();
|
||||
this.apply(rateSpec);
|
||||
InlineTokenPool.logger.debug("initialized token pool: {} for rate:{}", this, rateSpec);
|
||||
refillTimer = ActivityMetrics.timer(parentLabels, "tokenfiller",4);
|
||||
}
|
||||
|
||||
public InlineTokenPool(long poolsize, double burstRatio, ActivityDef def) {
|
||||
ByteBuffer logbuf = getBuffer();
|
||||
this.maxActivePoolSize = poolsize;
|
||||
public InlineTokenPool(final long poolsize, final double burstRatio, final ActivityDef def, final NBLabeledElement parentLabels) {
|
||||
this.parentLabels = parentLabels;
|
||||
final ByteBuffer logbuf = this.getBuffer();
|
||||
maxActivePoolSize = poolsize;
|
||||
this.burstRatio = burstRatio;
|
||||
this.maxActiveAndBurstSize = (long) (maxActivePoolSize * burstRatio);
|
||||
this.maxBurstPoolSize = maxActiveAndBurstSize - maxActivePoolSize;
|
||||
this.refillTimer = ActivityMetrics.timer(def, "tokenfiller",4);
|
||||
maxActiveAndBurstSize = (long) (this.maxActivePoolSize * burstRatio);
|
||||
maxBurstPoolSize = this.maxActiveAndBurstSize - this.maxActivePoolSize;
|
||||
refillTimer = ActivityMetrics.timer(parentLabels, "tokenfiller",4);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -125,21 +130,21 @@ public class InlineTokenPool {
|
||||
*
|
||||
* @param rateSpec The rate specifier.
|
||||
*/
|
||||
public synchronized void apply(RateSpec rateSpec) {
|
||||
public synchronized void apply(final RateSpec rateSpec) {
|
||||
this.rateSpec = rateSpec;
|
||||
// maxActivePool is set to the higher of 1M or however many nanos are needed for 2 ops to be buffered
|
||||
this.maxActivePoolSize = Math.max((long) 1E6, (long) ((double) rateSpec.getNanosPerOp() * MIN_CONCURRENT_OPS));
|
||||
this.maxActiveAndBurstSize = (long) (maxActivePoolSize * rateSpec.getBurstRatio());
|
||||
this.burstRatio = rateSpec.getBurstRatio();
|
||||
maxActivePoolSize = Math.max((long) 1.0E6, (long) (rateSpec.getNanosPerOp() * InlineTokenPool.MIN_CONCURRENT_OPS));
|
||||
maxActiveAndBurstSize = (long) (this.maxActivePoolSize * rateSpec.getBurstRatio());
|
||||
burstRatio = rateSpec.getBurstRatio();
|
||||
|
||||
this.maxBurstPoolSize = maxActiveAndBurstSize - maxActivePoolSize;
|
||||
this.nanosPerOp = rateSpec.getNanosPerOp();
|
||||
notifyAll();
|
||||
maxBurstPoolSize = this.maxActiveAndBurstSize - this.maxActivePoolSize;
|
||||
nanosPerOp = rateSpec.getNanosPerOp();
|
||||
this.notifyAll();
|
||||
}
|
||||
|
||||
|
||||
public double getBurstRatio() {
|
||||
return burstRatio;
|
||||
return this.burstRatio;
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -149,9 +154,9 @@ public class InlineTokenPool {
|
||||
* @param amt tokens requested
|
||||
* @return actual number of tokens removed, greater to or equal to zero
|
||||
*/
|
||||
public synchronized long takeUpTo(long amt) {
|
||||
long take = Math.min(amt, activePool);
|
||||
activePool -= take;
|
||||
public synchronized long takeUpTo(final long amt) {
|
||||
final long take = Math.min(amt, this.activePool);
|
||||
this.activePool -= take;
|
||||
return take;
|
||||
}
|
||||
|
||||
@@ -163,30 +168,23 @@ public class InlineTokenPool {
|
||||
*/
|
||||
public long blockAndTake() {
|
||||
synchronized (this) {
|
||||
if (activePool >= nanosPerOp) {
|
||||
activePool -= nanosPerOp;
|
||||
return waitingPool + activePool;
|
||||
if (this.activePool >= this.nanosPerOp) {
|
||||
this.activePool -= this.nanosPerOp;
|
||||
return this.waitingPool + this.activePool;
|
||||
}
|
||||
}
|
||||
while (true) {
|
||||
if (lock.tryLock()) {
|
||||
try {
|
||||
while (activePool < nanosPerOp) {
|
||||
dorefill();
|
||||
}
|
||||
lockheld.signal();
|
||||
lockheld.signal();
|
||||
} finally {
|
||||
lock.unlock();
|
||||
}
|
||||
} else {
|
||||
try {
|
||||
lockheld.await();
|
||||
} catch (InterruptedException e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
}
|
||||
while (true) if (this.lock.tryLock()) try {
|
||||
while (this.activePool < this.nanosPerOp) this.dorefill();
|
||||
this.lockheld.signal();
|
||||
this.lockheld.signal();
|
||||
} finally {
|
||||
this.lock.unlock();
|
||||
}
|
||||
else try {
|
||||
this.lockheld.await();
|
||||
} catch (final InterruptedException e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
// while (activePool < nanosPerOp) {
|
||||
// blocks++;
|
||||
// //System.out.println(ANSI_BrightRed + "waiting for " + amt + "/" + activePool + " of max " + maxActivePool + ANSI_Reset);
|
||||
@@ -205,54 +203,52 @@ public class InlineTokenPool {
|
||||
// return waitingPool + activePool;
|
||||
}
|
||||
|
||||
public synchronized long blockAndTakeOps(long ops) {
|
||||
long totalNanosNeeded = ops * nanosPerOp;
|
||||
while (activePool < totalNanosNeeded) {
|
||||
blocks++;
|
||||
public synchronized long blockAndTakeOps(final long ops) {
|
||||
final long totalNanosNeeded = ops * this.nanosPerOp;
|
||||
while (this.activePool < totalNanosNeeded) {
|
||||
this.blocks++;
|
||||
//System.out.println(ANSI_BrightRed + "waiting for " + amt + "/" + activePool + " of max " + maxActivePool + ANSI_Reset);
|
||||
try {
|
||||
wait();
|
||||
this.wait();
|
||||
// wait(maxActivePoolSize / 1000000, (int) maxActivePoolSize % 1000000);
|
||||
} catch (InterruptedException ignored) {
|
||||
} catch (Exception e) {
|
||||
} catch (final InterruptedException ignored) {
|
||||
} catch (final Exception e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
//System.out.println("waited for " + amt + "/" + activePool + " tokens");
|
||||
}
|
||||
//System.out.println(ANSI_BrightYellow + "taking " + amt + "/" + activePool + ANSI_Reset);
|
||||
|
||||
activePool -= totalNanosNeeded;
|
||||
return waitingPool + activePool;
|
||||
this.activePool -= totalNanosNeeded;
|
||||
return this.waitingPool + this.activePool;
|
||||
}
|
||||
|
||||
public synchronized long blockAndTake(long tokens) {
|
||||
while (activePool < tokens) {
|
||||
//System.out.println(ANSI_BrightRed + "waiting for " + amt + "/" + activePool + " of max " + maxActivePool + ANSI_Reset);
|
||||
try {
|
||||
wait();
|
||||
public synchronized long blockAndTake(final long tokens) {
|
||||
//System.out.println(ANSI_BrightRed + "waiting for " + amt + "/" + activePool + " of max " + maxActivePool + ANSI_Reset);
|
||||
//System.out.println("waited for " + amt + "/" + activePool + " tokens");
|
||||
while (this.activePool < tokens) try {
|
||||
this.wait();
|
||||
// wait(maxActivePoolSize / 1000000, (int) maxActivePoolSize % 1000000);
|
||||
} catch (InterruptedException ignored) {
|
||||
} catch (Exception e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
//System.out.println("waited for " + amt + "/" + activePool + " tokens");
|
||||
} catch (final InterruptedException ignored) {
|
||||
} catch (final Exception e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
//System.out.println(ANSI_BrightYellow + "taking " + amt + "/" + activePool + ANSI_Reset);
|
||||
|
||||
activePool -= tokens;
|
||||
return waitingPool + activePool;
|
||||
this.activePool -= tokens;
|
||||
return this.waitingPool + this.activePool;
|
||||
}
|
||||
|
||||
public long getWaitTime() {
|
||||
return activePool + waitingPool;
|
||||
return this.activePool + this.waitingPool;
|
||||
}
|
||||
|
||||
public long getWaitPool() {
|
||||
return waitingPool;
|
||||
return this.waitingPool;
|
||||
}
|
||||
|
||||
public long getActivePool() {
|
||||
return activePool;
|
||||
return this.activePool;
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -269,70 +265,67 @@ public class InlineTokenPool {
|
||||
* @param newTokens The number of new tokens to add to the token pools
|
||||
* @return the total number of tokens in all pools
|
||||
*/
|
||||
public synchronized long refill(long newTokens) {
|
||||
boolean debugthis = false;
|
||||
public synchronized long refill(final long newTokens) {
|
||||
final boolean debugthis = false;
|
||||
// long debugAt = System.nanoTime();
|
||||
// if (debugAt>debugTrigger+debugRate) {
|
||||
// debugTrigger=debugAt;
|
||||
// debugthis=true;
|
||||
// }
|
||||
|
||||
long needed = Math.max(maxActivePoolSize - activePool, 0L);
|
||||
long allocatedToActivePool = Math.min(newTokens, needed);
|
||||
activePool += allocatedToActivePool;
|
||||
final long needed = Math.max(this.maxActivePoolSize - this.activePool, 0L);
|
||||
final long allocatedToActivePool = Math.min(newTokens, needed);
|
||||
this.activePool += allocatedToActivePool;
|
||||
|
||||
|
||||
// overflow logic
|
||||
long allocatedToOverflowPool = newTokens - allocatedToActivePool;
|
||||
waitingPool += allocatedToOverflowPool;
|
||||
final long allocatedToOverflowPool = newTokens - allocatedToActivePool;
|
||||
this.waitingPool += allocatedToOverflowPool;
|
||||
|
||||
// backfill logic
|
||||
double refillFactor = Math.min((double) newTokens / maxActivePoolSize, 1.0D);
|
||||
long burstFillAllowed = (long) (refillFactor * maxBurstPoolSize);
|
||||
final double refillFactor = Math.min((double) newTokens / this.maxActivePoolSize, 1.0D);
|
||||
long burstFillAllowed = (long) (refillFactor * this.maxBurstPoolSize);
|
||||
|
||||
burstFillAllowed = Math.min(maxActiveAndBurstSize - activePool, burstFillAllowed);
|
||||
long burstFill = Math.min(burstFillAllowed, waitingPool);
|
||||
burstFillAllowed = Math.min(this.maxActiveAndBurstSize - this.activePool, burstFillAllowed);
|
||||
final long burstFill = Math.min(burstFillAllowed, this.waitingPool);
|
||||
|
||||
waitingPool -= burstFill;
|
||||
activePool += burstFill;
|
||||
this.waitingPool -= burstFill;
|
||||
this.activePool += burstFill;
|
||||
|
||||
if (debugthis) {
|
||||
System.out.print(this);
|
||||
System.out.print(ANSI_BrightBlue + " adding=" + allocatedToActivePool);
|
||||
if (allocatedToOverflowPool > 0) {
|
||||
if (0 < allocatedToOverflowPool)
|
||||
System.out.print(ANSI_Red + " OVERFLOW:" + allocatedToOverflowPool + ANSI_Reset);
|
||||
}
|
||||
if (burstFill > 0) {
|
||||
System.out.print(ANSI_BrightGreen + " BACKFILL:" + burstFill + ANSI_Reset);
|
||||
}
|
||||
if (0 < burstFill) System.out.print(ANSI_BrightGreen + " BACKFILL:" + burstFill + ANSI_Reset);
|
||||
System.out.println();
|
||||
}
|
||||
|
||||
//System.out.println(this);
|
||||
notifyAll();
|
||||
this.notifyAll();
|
||||
|
||||
return activePool + waitingPool;
|
||||
return this.activePool + this.waitingPool;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return "Tokens: active=" + activePool + "/" + maxActivePoolSize
|
||||
return "Tokens: active=" + this.activePool + '/' + this.maxActivePoolSize
|
||||
+ String.format(
|
||||
" (%3.1f%%)A (%3.1f%%)B ",
|
||||
(((double) activePool / (double) maxActivePoolSize) * 100.0),
|
||||
(((double) activePool / (double) maxActiveAndBurstSize) * 100.0)) + " waiting=" + waitingPool +
|
||||
" blocks=" + blocks +
|
||||
" rateSpec:" + ((rateSpec != null) ? rateSpec.toString() : "NULL");
|
||||
(double) this.activePool / this.maxActivePoolSize * 100.0,
|
||||
(double) this.activePool / this.maxActiveAndBurstSize * 100.0) + " waiting=" + this.waitingPool +
|
||||
" blocks=" + this.blocks +
|
||||
" rateSpec:" + (null != rateSpec ? this.rateSpec.toString() : "NULL");
|
||||
}
|
||||
|
||||
public RateSpec getRateSpec() {
|
||||
return rateSpec;
|
||||
return this.rateSpec;
|
||||
}
|
||||
|
||||
public synchronized long restart() {
|
||||
long wait = activePool + waitingPool;
|
||||
activePool = 0L;
|
||||
waitingPool = 0L;
|
||||
final long wait = this.activePool + this.waitingPool;
|
||||
this.activePool = 0L;
|
||||
this.waitingPool = 0L;
|
||||
return wait;
|
||||
}
|
||||
|
||||
@@ -340,33 +333,33 @@ public class InlineTokenPool {
|
||||
RandomAccessFile image = null;
|
||||
try {
|
||||
image = new RandomAccessFile("tokenbucket.binlog", "rw");
|
||||
ByteBuffer mbb = image.getChannel().map(FileChannel.MapMode.READ_WRITE, 0, image.length());
|
||||
final ByteBuffer mbb = image.getChannel().map(MapMode.READ_WRITE, 0, image.length());
|
||||
return mbb;
|
||||
} catch (Exception e) {
|
||||
} catch (final Exception e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
}
|
||||
|
||||
public synchronized void dorefill() {
|
||||
lastRefillAt = System.nanoTime();
|
||||
long nextRefillTime = lastRefillAt + interval;
|
||||
this.lastRefillAt = System.nanoTime();
|
||||
final long nextRefillTime = this.lastRefillAt + this.interval;
|
||||
long thisRefillTime = System.nanoTime();
|
||||
while (thisRefillTime < nextRefillTime) {
|
||||
// while (thisRefillTime < lastRefillAt + interval) {
|
||||
long parkfor = Math.max(nextRefillTime - thisRefillTime, 0L);
|
||||
final long parkfor = Math.max(nextRefillTime - thisRefillTime, 0L);
|
||||
//System.out.println(ANSI_Blue + "parking for " + parkfor + "ns" + ANSI_Reset);
|
||||
LockSupport.parkNanos(parkfor);
|
||||
thisRefillTime = System.nanoTime();
|
||||
}
|
||||
|
||||
// this.times[iteration]=thisRefillTime;
|
||||
long delta = thisRefillTime - lastRefillAt;
|
||||
final long delta = thisRefillTime - this.lastRefillAt;
|
||||
// this.amounts[iteration]=delta;
|
||||
lastRefillAt = thisRefillTime;
|
||||
this.lastRefillAt = thisRefillTime;
|
||||
|
||||
//System.out.println(this);
|
||||
refill(delta);
|
||||
refillTimer.update(delta, TimeUnit.NANOSECONDS);
|
||||
this.refill(delta);
|
||||
this.refillTimer.update(delta, TimeUnit.NANOSECONDS);
|
||||
// iteration++;
|
||||
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright (c) 2022 nosqlbench
|
||||
* Copyright (c) 2022-2023 nosqlbench
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -17,68 +17,68 @@
|
||||
package io.nosqlbench.engine.api.activityapi.ratelimits;
|
||||
|
||||
import com.codahale.metrics.Gauge;
|
||||
import io.nosqlbench.api.config.NBNamedElement;
|
||||
import io.nosqlbench.api.config.NBLabeledElement;
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
|
||||
public class RateLimiters {
|
||||
private final static Logger logger = LogManager.getLogger(RateLimiters.class);
|
||||
public enum RateLimiters {
|
||||
;
|
||||
private static final Logger logger = LogManager.getLogger(RateLimiters.class);
|
||||
|
||||
public static synchronized RateLimiter createOrUpdate(NBNamedElement def, String label, RateLimiter extant, RateSpec spec) {
|
||||
public static synchronized RateLimiter createOrUpdate(final NBLabeledElement def, final String label, final RateLimiter extant, final RateSpec spec) {
|
||||
|
||||
if (extant == null) {
|
||||
RateLimiter rateLimiter= new HybridRateLimiter(def, label, spec);
|
||||
if (null == extant) {
|
||||
final RateLimiter rateLimiter= new HybridRateLimiter(def, label, spec);
|
||||
|
||||
logger.info(() -> "Using rate limiter: " + rateLimiter);
|
||||
RateLimiters.logger.info(() -> "Using rate limiter: " + rateLimiter);
|
||||
return rateLimiter;
|
||||
} else {
|
||||
extant.applyRateSpec(spec);
|
||||
logger.info(() -> "Updated rate limiter: " + extant);
|
||||
return extant;
|
||||
}
|
||||
extant.applyRateSpec(spec);
|
||||
RateLimiters.logger.info(() -> "Updated rate limiter: " + extant);
|
||||
return extant;
|
||||
}
|
||||
|
||||
public static synchronized RateLimiter create(NBNamedElement def, String label, String specString) {
|
||||
return createOrUpdate(def, label, null, new RateSpec(specString));
|
||||
public static synchronized RateLimiter create(final NBLabeledElement def, final String label, final String specString) {
|
||||
return RateLimiters.createOrUpdate(def, label, null, new RateSpec(specString));
|
||||
}
|
||||
|
||||
public static class WaitTimeGauge implements Gauge<Long> {
|
||||
|
||||
private final RateLimiter rateLimiter;
|
||||
|
||||
public WaitTimeGauge(RateLimiter rateLimiter) {
|
||||
public WaitTimeGauge(final RateLimiter rateLimiter) {
|
||||
this.rateLimiter = rateLimiter;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Long getValue() {
|
||||
return rateLimiter.getTotalWaitTime();
|
||||
return this.rateLimiter.getTotalWaitTime();
|
||||
}
|
||||
}
|
||||
|
||||
public static class RateGauge implements Gauge<Double> {
|
||||
private final RateLimiter rateLimiter;
|
||||
|
||||
public RateGauge(RateLimiter rateLimiter) {
|
||||
public RateGauge(final RateLimiter rateLimiter) {
|
||||
this.rateLimiter = rateLimiter;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Double getValue() {
|
||||
return rateLimiter.getRateSpec().opsPerSec;
|
||||
return this.rateLimiter.getRateSpec().opsPerSec;
|
||||
}
|
||||
}
|
||||
|
||||
public static class BurstRateGauge implements Gauge<Double> {
|
||||
private final RateLimiter rateLimiter;
|
||||
|
||||
public BurstRateGauge(RateLimiter rateLimiter) {
|
||||
public BurstRateGauge(final RateLimiter rateLimiter) {
|
||||
this.rateLimiter = rateLimiter;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Double getValue() {
|
||||
return rateLimiter.getRateSpec().getBurstRatio() * rateLimiter.getRateSpec().getRate();
|
||||
return this.rateLimiter.getRateSpec().getBurstRatio() * this.rateLimiter.getRateSpec().getRate();
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright (c) 2022 nosqlbench
|
||||
* Copyright (c) 2022-2023 nosqlbench
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -16,7 +16,7 @@
|
||||
|
||||
package io.nosqlbench.engine.api.activityapi.ratelimits;
|
||||
|
||||
import io.nosqlbench.api.config.NBNamedElement;
|
||||
import io.nosqlbench.api.config.NBLabeledElement;
|
||||
import io.nosqlbench.nb.annotations.Service;
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
@@ -46,7 +46,7 @@ import static io.nosqlbench.engine.api.util.Colors.*;
|
||||
@Service(value= TokenPool.class, selector="threaded")
|
||||
public class ThreadDrivenTokenPool implements TokenPool {
|
||||
|
||||
private final static Logger logger = LogManager.getLogger(ThreadDrivenTokenPool.class);
|
||||
private static final Logger logger = LogManager.getLogger(ThreadDrivenTokenPool.class);
|
||||
|
||||
public static final double MIN_CONCURRENT_OPS = 2;
|
||||
|
||||
@@ -59,7 +59,7 @@ public class ThreadDrivenTokenPool implements TokenPool {
|
||||
private volatile long waitingPool;
|
||||
private RateSpec rateSpec;
|
||||
private long nanosPerOp;
|
||||
private long blocks = 0L;
|
||||
private long blocks;
|
||||
|
||||
private TokenFiller filler;
|
||||
|
||||
@@ -70,9 +70,9 @@ public class ThreadDrivenTokenPool implements TokenPool {
|
||||
*
|
||||
* @param rateSpec a {@link RateSpec}
|
||||
*/
|
||||
public ThreadDrivenTokenPool(RateSpec rateSpec, NBNamedElement named) {
|
||||
apply(named,rateSpec);
|
||||
logger.debug(() -> "initialized token pool: " + this + " for rate:" + rateSpec);
|
||||
public ThreadDrivenTokenPool(final RateSpec rateSpec, final NBLabeledElement named) {
|
||||
this.apply(named,rateSpec);
|
||||
ThreadDrivenTokenPool.logger.debug(() -> "initialized token pool: " + this + " for rate:" + rateSpec);
|
||||
// filler.start();
|
||||
}
|
||||
|
||||
@@ -83,23 +83,23 @@ public class ThreadDrivenTokenPool implements TokenPool {
|
||||
* @param rateSpec The rate specifier.
|
||||
*/
|
||||
@Override
|
||||
public synchronized TokenPool apply(NBNamedElement named, RateSpec rateSpec) {
|
||||
public synchronized TokenPool apply(final NBLabeledElement labeled, final RateSpec rateSpec) {
|
||||
this.rateSpec = rateSpec;
|
||||
this.maxActivePool = Math.max((long) 1E6, (long) ((double) rateSpec.getNanosPerOp() * MIN_CONCURRENT_OPS));
|
||||
this.maxOverActivePool = (long) (maxActivePool * rateSpec.getBurstRatio());
|
||||
this.burstRatio = rateSpec.getBurstRatio();
|
||||
maxActivePool = Math.max((long) 1.0E6, (long) (rateSpec.getNanosPerOp() * ThreadDrivenTokenPool.MIN_CONCURRENT_OPS));
|
||||
maxOverActivePool = (long) (this.maxActivePool * rateSpec.getBurstRatio());
|
||||
burstRatio = rateSpec.getBurstRatio();
|
||||
|
||||
this.burstPoolSize = maxOverActivePool - maxActivePool;
|
||||
this.nanosPerOp = rateSpec.getNanosPerOp();
|
||||
this.filler = (this.filler == null) ? new TokenFiller(rateSpec, this, named, 3) : filler.apply(rateSpec);
|
||||
notifyAll();
|
||||
burstPoolSize = this.maxOverActivePool - this.maxActivePool;
|
||||
nanosPerOp = rateSpec.getNanosPerOp();
|
||||
filler = null == this.filler ? new TokenFiller(rateSpec, this, labeled, 3) : this.filler.apply(rateSpec);
|
||||
this.notifyAll();
|
||||
return this;
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public double getBurstRatio() {
|
||||
return burstRatio;
|
||||
return this.burstRatio;
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -110,9 +110,9 @@ public class ThreadDrivenTokenPool implements TokenPool {
|
||||
* @return actual number of tokens removed, greater to or equal to zero
|
||||
*/
|
||||
@Override
|
||||
public synchronized long takeUpTo(long amt) {
|
||||
long take = Math.min(amt, activePool);
|
||||
activePool -= take;
|
||||
public synchronized long takeUpTo(final long amt) {
|
||||
final long take = Math.min(amt, this.activePool);
|
||||
this.activePool -= take;
|
||||
return take;
|
||||
}
|
||||
|
||||
@@ -124,55 +124,53 @@ public class ThreadDrivenTokenPool implements TokenPool {
|
||||
*/
|
||||
@Override
|
||||
public synchronized long blockAndTake() {
|
||||
while (activePool < nanosPerOp) {
|
||||
blocks++;
|
||||
while (this.activePool < this.nanosPerOp) {
|
||||
this.blocks++;
|
||||
//System.out.println(ANSI_BrightRed + "waiting for " + amt + "/" + activePool + " of max " + maxActivePool + ANSI_Reset);
|
||||
try {
|
||||
wait(1000);
|
||||
this.wait(1000);
|
||||
// wait(maxActivePool / 1000000, 0);
|
||||
} catch (InterruptedException ignored) {
|
||||
} catch (Exception e) {
|
||||
} catch (final InterruptedException ignored) {
|
||||
} catch (final Exception e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
//System.out.println("waited for " + amt + "/" + activePool + " tokens");
|
||||
}
|
||||
//System.out.println(ANSI_BrightYellow + "taking " + amt + "/" + activePool + ANSI_Reset);
|
||||
|
||||
activePool -= nanosPerOp;
|
||||
return waitingPool + activePool;
|
||||
this.activePool -= this.nanosPerOp;
|
||||
return this.waitingPool + this.activePool;
|
||||
}
|
||||
|
||||
@Override
|
||||
public synchronized long blockAndTake(long tokens) {
|
||||
while (activePool < tokens) {
|
||||
//System.out.println(ANSI_BrightRed + "waiting for " + amt + "/" + activePool + " of max " + maxActivePool + ANSI_Reset);
|
||||
try {
|
||||
wait(maxActivePool / 1000000, (int) maxActivePool % 1000000);
|
||||
} catch (InterruptedException ignored) {
|
||||
} catch (Exception e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
//System.out.println("waited for " + amt + "/" + activePool + " tokens");
|
||||
public synchronized long blockAndTake(final long tokens) {
|
||||
//System.out.println(ANSI_BrightRed + "waiting for " + amt + "/" + activePool + " of max " + maxActivePool + ANSI_Reset);
|
||||
//System.out.println("waited for " + amt + "/" + activePool + " tokens");
|
||||
while (this.activePool < tokens) try {
|
||||
this.wait(this.maxActivePool / 1000000, (int) this.maxActivePool % 1000000);
|
||||
} catch (final InterruptedException ignored) {
|
||||
} catch (final Exception e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
//System.out.println(ANSI_BrightYellow + "taking " + amt + "/" + activePool + ANSI_Reset);
|
||||
|
||||
activePool -= tokens;
|
||||
return waitingPool + activePool;
|
||||
this.activePool -= tokens;
|
||||
return this.waitingPool + this.activePool;
|
||||
}
|
||||
|
||||
@Override
|
||||
public long getWaitTime() {
|
||||
return activePool + waitingPool;
|
||||
return this.activePool + this.waitingPool;
|
||||
}
|
||||
|
||||
@Override
|
||||
public long getWaitPool() {
|
||||
return waitingPool;
|
||||
return this.waitingPool;
|
||||
}
|
||||
|
||||
@Override
|
||||
public long getActivePool() {
|
||||
return activePool;
|
||||
return this.activePool;
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -189,77 +187,74 @@ public class ThreadDrivenTokenPool implements TokenPool {
|
||||
* @param newTokens The number of new tokens to add to the token pools
|
||||
* @return the total number of tokens in all pools
|
||||
*/
|
||||
public synchronized long refill(long newTokens) {
|
||||
boolean debugthis = false;
|
||||
public synchronized long refill(final long newTokens) {
|
||||
final boolean debugthis = false;
|
||||
// long debugAt = System.nanoTime();
|
||||
// if (debugAt>debugTrigger+debugRate) {
|
||||
// debugTrigger=debugAt;
|
||||
// debugthis=true;
|
||||
// }
|
||||
|
||||
long needed = Math.max(maxActivePool - activePool, 0L);
|
||||
long allocatedToActivePool = Math.min(newTokens, needed);
|
||||
activePool += allocatedToActivePool;
|
||||
final long needed = Math.max(this.maxActivePool - this.activePool, 0L);
|
||||
final long allocatedToActivePool = Math.min(newTokens, needed);
|
||||
this.activePool += allocatedToActivePool;
|
||||
|
||||
|
||||
// overflow logic
|
||||
long allocatedToOverflowPool = newTokens - allocatedToActivePool;
|
||||
waitingPool += allocatedToOverflowPool;
|
||||
final long allocatedToOverflowPool = newTokens - allocatedToActivePool;
|
||||
this.waitingPool += allocatedToOverflowPool;
|
||||
|
||||
// backfill logic
|
||||
double refillFactor = Math.min((double) newTokens / maxActivePool, 1.0D);
|
||||
long burstFillAllowed = (long) (refillFactor * burstPoolSize);
|
||||
final double refillFactor = Math.min((double) newTokens / this.maxActivePool, 1.0D);
|
||||
long burstFillAllowed = (long) (refillFactor * this.burstPoolSize);
|
||||
|
||||
burstFillAllowed = Math.min(maxOverActivePool - activePool, burstFillAllowed);
|
||||
long burstFill = Math.min(burstFillAllowed, waitingPool);
|
||||
burstFillAllowed = Math.min(this.maxOverActivePool - this.activePool, burstFillAllowed);
|
||||
final long burstFill = Math.min(burstFillAllowed, this.waitingPool);
|
||||
|
||||
waitingPool -= burstFill;
|
||||
activePool += burstFill;
|
||||
this.waitingPool -= burstFill;
|
||||
this.activePool += burstFill;
|
||||
|
||||
if (debugthis) {
|
||||
System.out.print(this);
|
||||
System.out.print(ANSI_BrightBlue + " adding=" + allocatedToActivePool);
|
||||
if (allocatedToOverflowPool > 0) {
|
||||
if (0 < allocatedToOverflowPool)
|
||||
System.out.print(ANSI_Red + " OVERFLOW:" + allocatedToOverflowPool + ANSI_Reset);
|
||||
}
|
||||
if (burstFill > 0) {
|
||||
System.out.print(ANSI_BrightGreen + " BACKFILL:" + burstFill + ANSI_Reset);
|
||||
}
|
||||
if (0 < burstFill) System.out.print(ANSI_BrightGreen + " BACKFILL:" + burstFill + ANSI_Reset);
|
||||
System.out.println();
|
||||
}
|
||||
//System.out.println(this);
|
||||
notifyAll();
|
||||
this.notifyAll();
|
||||
|
||||
return activePool + waitingPool;
|
||||
return this.activePool + this.waitingPool;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return String.format(
|
||||
"{ active:%d, max:%d, fill:'(%,3.1f%%)A (%,3.1f%%)B', wait_ns:%,d, blocks:%,d }",
|
||||
activePool, maxActivePool,
|
||||
(((double) activePool / (double) maxActivePool) * 100.0),
|
||||
(((double) activePool / (double) maxOverActivePool) * 100.0),
|
||||
waitingPool,
|
||||
blocks
|
||||
this.activePool, this.maxActivePool,
|
||||
(double) this.activePool / this.maxActivePool * 100.0,
|
||||
(double) this.activePool / this.maxOverActivePool * 100.0,
|
||||
this.waitingPool,
|
||||
this.blocks
|
||||
);
|
||||
}
|
||||
|
||||
@Override
|
||||
public RateSpec getRateSpec() {
|
||||
return rateSpec;
|
||||
return this.rateSpec;
|
||||
}
|
||||
|
||||
@Override
|
||||
public synchronized long restart() {
|
||||
long wait = activePool + waitingPool;
|
||||
activePool = 0L;
|
||||
waitingPool = 0L;
|
||||
final long wait = this.activePool + this.waitingPool;
|
||||
this.activePool = 0L;
|
||||
this.waitingPool = 0L;
|
||||
return wait;
|
||||
}
|
||||
|
||||
@Override
|
||||
public synchronized void start() {
|
||||
filler.start();
|
||||
this.filler.start();
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright (c) 2022 nosqlbench
|
||||
* Copyright (c) 2022-2023 nosqlbench
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -17,7 +17,7 @@
|
||||
package io.nosqlbench.engine.api.activityapi.ratelimits;
|
||||
|
||||
import com.codahale.metrics.Timer;
|
||||
import io.nosqlbench.api.config.NBNamedElement;
|
||||
import io.nosqlbench.api.config.NBLabeledElement;
|
||||
import io.nosqlbench.api.engine.metrics.ActivityMetrics;
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
@@ -26,13 +26,13 @@ import java.util.concurrent.TimeUnit;
|
||||
import java.util.concurrent.locks.LockSupport;
|
||||
|
||||
public class TokenFiller implements Runnable {
|
||||
private final static Logger logger = LogManager.getLogger(TokenFiller.class);
|
||||
private static final Logger logger = LogManager.getLogger(TokenFiller.class);
|
||||
|
||||
public final static double MIN_PER_SECOND = 10D;
|
||||
public final static double MAX_PER_SECOND = 1000D;
|
||||
public static final double MIN_PER_SECOND = 10.0D;
|
||||
public static final double MAX_PER_SECOND = 1000.0D;
|
||||
// private final SysPerfData PERFDATA = SysPerf.get().getPerfData
|
||||
// (false);
|
||||
private final long interval = (long) 1E5;
|
||||
private final long interval = (long) 1.0E5;
|
||||
|
||||
private final ThreadDrivenTokenPool tokenPool;
|
||||
private volatile boolean running = true;
|
||||
@@ -47,34 +47,34 @@ public class TokenFiller implements Runnable {
|
||||
* in the JVM.
|
||||
*
|
||||
*/
|
||||
public TokenFiller(RateSpec rateSpec, ThreadDrivenTokenPool tokenPool, NBNamedElement named, int hdrdigits) {
|
||||
public TokenFiller(final RateSpec rateSpec, final ThreadDrivenTokenPool tokenPool, final NBLabeledElement labeled, final int hdrdigits) {
|
||||
this.rateSpec = rateSpec;
|
||||
this.tokenPool = tokenPool;
|
||||
this.timer = ActivityMetrics.timer(named, "tokenfiller", hdrdigits);
|
||||
timer = ActivityMetrics.timer(labeled, "tokenfiller", hdrdigits);
|
||||
}
|
||||
|
||||
public TokenFiller apply(RateSpec rateSpec) {
|
||||
public TokenFiller apply(final RateSpec rateSpec) {
|
||||
this.rateSpec = rateSpec;
|
||||
return this;
|
||||
}
|
||||
|
||||
private void stop() {
|
||||
this.running=false;
|
||||
running=false;
|
||||
}
|
||||
|
||||
public TokenPool getTokenPool() {
|
||||
return tokenPool;
|
||||
return this.tokenPool;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void run() {
|
||||
lastRefillAt = System.nanoTime();
|
||||
while (running) {
|
||||
long nextRefillTime = lastRefillAt + interval;
|
||||
this.lastRefillAt = System.nanoTime();
|
||||
while (this.running) {
|
||||
final long nextRefillTime = this.lastRefillAt + this.interval;
|
||||
long thisRefillTime = System.nanoTime();
|
||||
while (thisRefillTime < nextRefillTime) {
|
||||
// while (thisRefillTime < lastRefillAt + interval) {
|
||||
long parkfor = Math.max(nextRefillTime - thisRefillTime, 0L);
|
||||
final long parkfor = Math.max(nextRefillTime - thisRefillTime, 0L);
|
||||
// System.out.println(ANSI_Blue + " parking for " + parkfor + "ns" + ANSI_Reset); System.out.flush();
|
||||
LockSupport.parkNanos(parkfor);
|
||||
// System.out.println(ANSI_Blue + "unparking for " + parkfor + "ns" + ANSI_Reset); System.out.flush();
|
||||
@@ -82,33 +82,33 @@ public class TokenFiller implements Runnable {
|
||||
}
|
||||
|
||||
// this.times[iteration]=thisRefillTime;
|
||||
long delta = thisRefillTime - lastRefillAt;
|
||||
final long delta = thisRefillTime - this.lastRefillAt;
|
||||
// this.amounts[iteration]=delta;
|
||||
lastRefillAt = thisRefillTime;
|
||||
this.lastRefillAt = thisRefillTime;
|
||||
|
||||
// System.out.println(ANSI_Blue + this + ANSI_Reset); System.out.flush();
|
||||
tokenPool.refill(delta);
|
||||
timer.update(delta, TimeUnit.NANOSECONDS);
|
||||
this.tokenPool.refill(delta);
|
||||
this.timer.update(delta, TimeUnit.NANOSECONDS);
|
||||
// iteration++;
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
public synchronized TokenFiller start() {
|
||||
this.tokenPool.refill(rateSpec.getNanosPerOp());
|
||||
tokenPool.refill(this.rateSpec.getNanosPerOp());
|
||||
|
||||
thread = new Thread(this);
|
||||
thread.setName(this.toString());
|
||||
thread.setPriority(Thread.MAX_PRIORITY);
|
||||
thread.setDaemon(true);
|
||||
thread.start();
|
||||
logger.debug("Starting token filler thread: " + this);
|
||||
this.thread = new Thread(this);
|
||||
this.thread.setName(toString());
|
||||
this.thread.setPriority(Thread.MAX_PRIORITY);
|
||||
this.thread.setDaemon(true);
|
||||
this.thread.start();
|
||||
TokenFiller.logger.debug("Starting token filler thread: {}", this);
|
||||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return "TokenFiller spec=" + rateSpec + " interval=" + this.interval + "ns pool:" + tokenPool +" running=" + running;
|
||||
return "TokenFiller spec=" + this.rateSpec + " interval=" + interval + "ns pool:" + this.tokenPool +" running=" + this.running;
|
||||
}
|
||||
|
||||
// public String getRefillLog() {
|
||||
@@ -120,9 +120,9 @@ public class TokenFiller implements Runnable {
|
||||
// }
|
||||
|
||||
public synchronized long restart() {
|
||||
this.lastRefillAt=System.nanoTime();
|
||||
logger.debug("Restarting token filler at " + lastRefillAt + " thread: " + this);
|
||||
long wait = this.tokenPool.restart();
|
||||
lastRefillAt=System.nanoTime();
|
||||
TokenFiller.logger.debug("Restarting token filler at {} thread: {}", this.lastRefillAt, this);
|
||||
final long wait = tokenPool.restart();
|
||||
return wait;
|
||||
}
|
||||
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright (c) 2022 nosqlbench
|
||||
* Copyright (c) 2022-2023 nosqlbench
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -16,11 +16,11 @@
|
||||
|
||||
package io.nosqlbench.engine.api.activityapi.ratelimits;
|
||||
|
||||
import io.nosqlbench.api.config.NBNamedElement;
|
||||
import io.nosqlbench.api.config.NBLabeledElement;
|
||||
|
||||
public interface TokenPool {
|
||||
|
||||
TokenPool apply(NBNamedElement named, RateSpec rateSpec);
|
||||
TokenPool apply(NBLabeledElement labeled, RateSpec rateSpec);
|
||||
|
||||
double getBurstRatio();
|
||||
|
||||
|
||||
@@ -17,6 +17,8 @@
|
||||
package io.nosqlbench.engine.api.activityimpl;
|
||||
|
||||
import com.codahale.metrics.Timer;
|
||||
import io.nosqlbench.api.config.NBLabeledElement;
|
||||
import io.nosqlbench.api.config.NBLabels;
|
||||
import io.nosqlbench.api.config.standard.NBConfiguration;
|
||||
import io.nosqlbench.api.engine.activityimpl.ActivityDef;
|
||||
import io.nosqlbench.api.engine.metrics.ActivityMetrics;
|
||||
@@ -54,6 +56,7 @@ import org.apache.logging.log4j.Logger;
|
||||
import java.io.InputStream;
|
||||
import java.io.PrintWriter;
|
||||
import java.lang.reflect.AnnotatedType;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.util.*;
|
||||
import java.util.function.Function;
|
||||
import java.util.function.Supplier;
|
||||
@@ -62,8 +65,9 @@ import java.util.stream.Collectors;
|
||||
/**
|
||||
* A default implementation of an Activity, suitable for building upon.
|
||||
*/
|
||||
public class SimpleActivity implements Activity, ProgressCapable, ActivityDefObserver {
|
||||
private final static Logger logger = LogManager.getLogger("ACTIVITY");
|
||||
public class SimpleActivity implements Activity {
|
||||
private static final Logger logger = LogManager.getLogger("ACTIVITY");
|
||||
private final NBLabeledElement parentLabels;
|
||||
|
||||
protected ActivityDef activityDef;
|
||||
private final List<AutoCloseable> closeables = new ArrayList<>();
|
||||
@@ -80,15 +84,18 @@ public class SimpleActivity implements Activity, ProgressCapable, ActivityDefObs
|
||||
private ActivityInstrumentation activityInstrumentation;
|
||||
private PrintWriter console;
|
||||
private long startedAtMillis;
|
||||
private int nameEnumerator = 0;
|
||||
private int nameEnumerator;
|
||||
private ErrorMetrics errorMetrics;
|
||||
private NBErrorHandler errorHandler;
|
||||
private ActivityMetricProgressMeter progressMeter;
|
||||
private String workloadSource = "unspecified";
|
||||
private final RunStateTally tally = new RunStateTally();
|
||||
private final NBLabels labels;
|
||||
|
||||
public SimpleActivity(ActivityDef activityDef) {
|
||||
public SimpleActivity(ActivityDef activityDef, NBLabeledElement parentLabels) {
|
||||
labels = parentLabels.getLabels().and("activity",activityDef.getAlias());
|
||||
this.activityDef = activityDef;
|
||||
this.parentLabels = parentLabels;
|
||||
if (activityDef.getAlias().equals(ActivityDef.DEFAULT_ALIAS)) {
|
||||
Optional<String> workloadOpt = activityDef.getParams().getOptionalString(
|
||||
"workload",
|
||||
@@ -99,13 +106,14 @@ public class SimpleActivity implements Activity, ProgressCapable, ActivityDefObs
|
||||
} else {
|
||||
activityDef.getParams().set("alias",
|
||||
activityDef.getActivityType().toUpperCase(Locale.ROOT)
|
||||
+ nameEnumerator++);
|
||||
+ nameEnumerator);
|
||||
nameEnumerator++;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public SimpleActivity(String activityDefString) {
|
||||
this(ActivityDef.parseActivityDef(activityDefString));
|
||||
public SimpleActivity(String activityDefString, NBLabeledElement parentLabels) {
|
||||
this(ActivityDef.parseActivityDef(activityDefString),parentLabels);
|
||||
}
|
||||
|
||||
@Override
|
||||
@@ -114,7 +122,7 @@ public class SimpleActivity implements Activity, ProgressCapable, ActivityDefObs
|
||||
}
|
||||
|
||||
public synchronized NBErrorHandler getErrorHandler() {
|
||||
if (errorHandler == null) {
|
||||
if (null == this.errorHandler) {
|
||||
errorHandler = new NBErrorHandler(
|
||||
() -> activityDef.getParams().getOptionalString("errors").orElse("stop"),
|
||||
() -> getExceptionMetrics());
|
||||
@@ -122,13 +130,15 @@ public class SimpleActivity implements Activity, ProgressCapable, ActivityDefObs
|
||||
return errorHandler;
|
||||
}
|
||||
|
||||
@Override
|
||||
public synchronized RunState getRunState() {
|
||||
return runState;
|
||||
}
|
||||
|
||||
@Override
|
||||
public synchronized void setRunState(RunState runState) {
|
||||
this.runState = runState;
|
||||
if (runState == RunState.Running) {
|
||||
if (RunState.Running == runState) {
|
||||
this.startedAtMillis = System.currentTimeMillis();
|
||||
}
|
||||
}
|
||||
@@ -194,7 +204,7 @@ public class SimpleActivity implements Activity, ProgressCapable, ActivityDefObs
|
||||
}
|
||||
|
||||
public String toString() {
|
||||
return getAlias() + ":" + getRunState() + ":" + getRunStateTally().toString();
|
||||
return getAlias() + ':' + this.runState + ':' + this.tally;
|
||||
}
|
||||
|
||||
@Override
|
||||
@@ -243,7 +253,7 @@ public class SimpleActivity implements Activity, ProgressCapable, ActivityDefObs
|
||||
|
||||
@Override
|
||||
public synchronized RateLimiter getCycleRateLimiter(Supplier<? extends RateLimiter> s) {
|
||||
if (cycleLimiter == null) {
|
||||
if (null == this.cycleLimiter) {
|
||||
cycleLimiter = s.get();
|
||||
}
|
||||
return cycleLimiter;
|
||||
@@ -261,7 +271,7 @@ public class SimpleActivity implements Activity, ProgressCapable, ActivityDefObs
|
||||
|
||||
@Override
|
||||
public synchronized RateLimiter getStrideRateLimiter(Supplier<? extends RateLimiter> s) {
|
||||
if (strideLimiter == null) {
|
||||
if (null == this.strideLimiter) {
|
||||
strideLimiter = s.get();
|
||||
}
|
||||
return strideLimiter;
|
||||
@@ -275,7 +285,7 @@ public class SimpleActivity implements Activity, ProgressCapable, ActivityDefObs
|
||||
|
||||
@Override
|
||||
public Timer getResultTimer() {
|
||||
return ActivityMetrics.timer(getActivityDef(), "result", getParams().getOptionalInteger("hdr_digits").orElse(4));
|
||||
return ActivityMetrics.timer(this, "result", getParams().getOptionalInteger("hdr_digits").orElse(4));
|
||||
}
|
||||
|
||||
@Override
|
||||
@@ -285,7 +295,7 @@ public class SimpleActivity implements Activity, ProgressCapable, ActivityDefObs
|
||||
|
||||
@Override
|
||||
public synchronized RateLimiter getPhaseRateLimiter(Supplier<? extends RateLimiter> supplier) {
|
||||
if (phaseLimiter == null) {
|
||||
if (null == this.phaseLimiter) {
|
||||
phaseLimiter = supplier.get();
|
||||
}
|
||||
return phaseLimiter;
|
||||
@@ -293,7 +303,7 @@ public class SimpleActivity implements Activity, ProgressCapable, ActivityDefObs
|
||||
|
||||
@Override
|
||||
public synchronized ActivityInstrumentation getInstrumentation() {
|
||||
if (activityInstrumentation == null) {
|
||||
if (null == this.activityInstrumentation) {
|
||||
activityInstrumentation = new CoreActivityInstrumentation(this);
|
||||
}
|
||||
return activityInstrumentation;
|
||||
@@ -301,8 +311,8 @@ public class SimpleActivity implements Activity, ProgressCapable, ActivityDefObs
|
||||
|
||||
@Override
|
||||
public synchronized PrintWriter getConsoleOut() {
|
||||
if (this.console == null) {
|
||||
this.console = new PrintWriter(System.out);
|
||||
if (null == console) {
|
||||
this.console = new PrintWriter(System.out, false, StandardCharsets.UTF_8);
|
||||
}
|
||||
return this.console;
|
||||
}
|
||||
@@ -319,8 +329,8 @@ public class SimpleActivity implements Activity, ProgressCapable, ActivityDefObs
|
||||
|
||||
@Override
|
||||
public synchronized ErrorMetrics getExceptionMetrics() {
|
||||
if (errorMetrics == null) {
|
||||
errorMetrics = new ErrorMetrics(this.getActivityDef());
|
||||
if (null == this.errorMetrics) {
|
||||
errorMetrics = new ErrorMetrics(this);
|
||||
}
|
||||
return errorMetrics;
|
||||
}
|
||||
@@ -334,15 +344,15 @@ public class SimpleActivity implements Activity, ProgressCapable, ActivityDefObs
|
||||
|
||||
activityDef.getParams().getOptionalNamedParameter("striderate")
|
||||
.map(RateSpec::new)
|
||||
.ifPresent(spec -> strideLimiter = RateLimiters.createOrUpdate(this.getActivityDef(), "strides", strideLimiter, spec));
|
||||
.ifPresent(spec -> strideLimiter = RateLimiters.createOrUpdate(this, "strides", strideLimiter, spec));
|
||||
|
||||
activityDef.getParams().getOptionalNamedParameter("cyclerate", "targetrate", "rate")
|
||||
.map(RateSpec::new).ifPresent(
|
||||
spec -> cycleLimiter = RateLimiters.createOrUpdate(this.getActivityDef(), "cycles", cycleLimiter, spec));
|
||||
spec -> cycleLimiter = RateLimiters.createOrUpdate(this, "cycles", cycleLimiter, spec));
|
||||
|
||||
activityDef.getParams().getOptionalNamedParameter("phaserate")
|
||||
.map(RateSpec::new)
|
||||
.ifPresent(spec -> phaseLimiter = RateLimiters.createOrUpdate(this.getActivityDef(), "phases", phaseLimiter, spec));
|
||||
.ifPresent(spec -> phaseLimiter = RateLimiters.createOrUpdate(this, "phases", phaseLimiter, spec));
|
||||
|
||||
}
|
||||
|
||||
@@ -369,13 +379,13 @@ public class SimpleActivity implements Activity, ProgressCapable, ActivityDefObs
|
||||
// getParams().set("cycles", getParams().getOptionalString("stride").orElseThrow());
|
||||
getParams().setSilently("cycles", getParams().getOptionalString("stride").orElseThrow());
|
||||
} else {
|
||||
if (getActivityDef().getCycleCount() == 0) {
|
||||
if (0 == activityDef.getCycleCount()) {
|
||||
throw new RuntimeException(
|
||||
"You specified cycles, but the range specified means zero cycles: " + getParams().get("cycles")
|
||||
);
|
||||
}
|
||||
long stride = getParams().getOptionalLong("stride").orElseThrow();
|
||||
long cycles = getActivityDef().getCycleCount();
|
||||
long cycles = this.activityDef.getCycleCount();
|
||||
if (cycles < stride) {
|
||||
throw new RuntimeException(
|
||||
"The specified cycles (" + cycles + ") are less than the stride (" + stride + "). This means there aren't enough cycles to cause a stride to be executed." +
|
||||
@@ -384,25 +394,25 @@ public class SimpleActivity implements Activity, ProgressCapable, ActivityDefObs
|
||||
}
|
||||
}
|
||||
|
||||
long cycleCount = getActivityDef().getCycleCount();
|
||||
long stride = getActivityDef().getParams().getOptionalLong("stride").orElseThrow();
|
||||
long cycleCount = this.activityDef.getCycleCount();
|
||||
long stride = this.activityDef.getParams().getOptionalLong("stride").orElseThrow();
|
||||
|
||||
if (stride > 0 && (cycleCount % stride) != 0) {
|
||||
if (0 < stride && 0 != cycleCount % stride) {
|
||||
logger.warn(() -> "The stride does not evenly divide cycles. Only full strides will be executed," +
|
||||
"leaving some cycles unused. (stride=" + stride + ", cycles=" + cycleCount + ")");
|
||||
"leaving some cycles unused. (stride=" + stride + ", cycles=" + cycleCount + ')');
|
||||
}
|
||||
|
||||
Optional<String> threadSpec = activityDef.getParams().getOptionalString("threads");
|
||||
if (threadSpec.isPresent()) {
|
||||
String spec = threadSpec.get();
|
||||
int processors = Runtime.getRuntime().availableProcessors();
|
||||
if (spec.equalsIgnoreCase("auto")) {
|
||||
if ("auto".equalsIgnoreCase(spec)) {
|
||||
int threads = processors * 10;
|
||||
if (threads > activityDef.getCycleCount()) {
|
||||
threads = (int) activityDef.getCycleCount();
|
||||
logger.info("setting threads to " + threads + " (auto) [10xCORES, cycle count limited]");
|
||||
logger.info("setting threads to {} (auto) [10xCORES, cycle count limited]", threads);
|
||||
} else {
|
||||
logger.info("setting threads to " + threads + " (auto) [10xCORES]");
|
||||
logger.info("setting threads to {} (auto) [10xCORES]", threads);
|
||||
}
|
||||
// activityDef.setThreads(threads);
|
||||
activityDef.getParams().setSilently("threads", threads);
|
||||
@@ -423,18 +433,15 @@ public class SimpleActivity implements Activity, ProgressCapable, ActivityDefObs
|
||||
+ ", you should have more cycles than threads.");
|
||||
}
|
||||
|
||||
} else {
|
||||
if (cycleCount > 1000) {
|
||||
logger.warn(() -> "For testing at scale, it is highly recommended that you " +
|
||||
"set threads to a value higher than the default of 1." +
|
||||
" hint: you can use threads=auto for reasonable default, or" +
|
||||
" consult the topic on threads with `help threads` for" +
|
||||
" more information.");
|
||||
|
||||
}
|
||||
} else if (1000 < cycleCount) {
|
||||
logger.warn(() -> "For testing at scale, it is highly recommended that you " +
|
||||
"set threads to a value higher than the default of 1." +
|
||||
" hint: you can use threads=auto for reasonable default, or" +
|
||||
" consult the topic on threads with `help threads` for" +
|
||||
" more information.");
|
||||
}
|
||||
|
||||
if (activityDef.getCycleCount() > 0 && seq.getOps().size() == 0) {
|
||||
if (0 < this.activityDef.getCycleCount() && 0 == seq.getOps().size()) {
|
||||
throw new BasicError("You have configured a zero-length sequence and non-zero cycles. Tt is not possible to continue with this activity.");
|
||||
}
|
||||
}
|
||||
@@ -443,7 +450,7 @@ public class SimpleActivity implements Activity, ProgressCapable, ActivityDefObs
|
||||
* Given a function that can create an op of type <O> from a CommandTemplate, generate
|
||||
* an indexed sequence of ready to call operations.
|
||||
*
|
||||
* This method works almost exactly like the {@link #createOpSequenceFromCommands(Function, boolean)},
|
||||
* This method works almost exactly like the ,
|
||||
* except that it uses the {@link CommandTemplate} semantics, which are more general and allow
|
||||
* for map-based specification of operations with bindings in each field.
|
||||
*
|
||||
@@ -491,12 +498,12 @@ public class SimpleActivity implements Activity, ProgressCapable, ActivityDefObs
|
||||
for (int i = 0; i < pops.size(); i++) {
|
||||
long ratio = ratios.get(i);
|
||||
ParsedOp pop = pops.get(i);
|
||||
if (ratio == 0) {
|
||||
logger.info(() -> "skipped mapping op '" + pop.getName() + "'");
|
||||
if (0 == ratio) {
|
||||
logger.info(() -> "skipped mapping op '" + pop.getName() + '\'');
|
||||
continue;
|
||||
}
|
||||
String dryrunSpec = pop.takeStaticConfigOr("dryrun", "none");
|
||||
boolean dryrun = dryrunSpec.equalsIgnoreCase("op");
|
||||
boolean dryrun = "op".equalsIgnoreCase(dryrunSpec);
|
||||
|
||||
DriverAdapter adapter = adapters.get(i);
|
||||
OpMapper opMapper = adapter.getOpMapper();
|
||||
@@ -512,8 +519,8 @@ public class SimpleActivity implements Activity, ProgressCapable, ActivityDefObs
|
||||
// }
|
||||
planner.addOp((OpDispenser<? extends O>) dispenser, ratio);
|
||||
}
|
||||
if (dryrunCount > 0) {
|
||||
logger.warn("initialized " + dryrunCount + " op templates for dry run only. These ops will be synthesized for each cycle, but will not be executed.");
|
||||
if (0 < dryrunCount) {
|
||||
logger.warn("initialized {} op templates for dry run only. These ops will be synthesized for each cycle, but will not be executed.", dryrunCount);
|
||||
}
|
||||
|
||||
|
||||
@@ -533,7 +540,7 @@ public class SimpleActivity implements Activity, ProgressCapable, ActivityDefObs
|
||||
List<Function<Map<String, Object>, Map<String, Object>>> parsers,
|
||||
boolean strict
|
||||
) {
|
||||
Function<OpTemplate, ParsedOp> f = t -> new ParsedOp(t, cfg, parsers);
|
||||
Function<OpTemplate, ParsedOp> f = t -> new ParsedOp(t, cfg, parsers, this);
|
||||
Function<OpTemplate, OpDispenser<? extends O>> opTemplateOFunction = f.andThen(opinit);
|
||||
|
||||
return createOpSequence(opTemplateOFunction, strict, Optional.empty());
|
||||
@@ -541,7 +548,7 @@ public class SimpleActivity implements Activity, ProgressCapable, ActivityDefObs
|
||||
|
||||
protected List<ParsedOp> loadParsedOps(NBConfiguration cfg, Optional<DriverAdapter> defaultAdapter) {
|
||||
List<ParsedOp> parsedOps = loadOpTemplates(defaultAdapter).stream().map(
|
||||
ot -> new ParsedOp(ot, cfg, List.of())
|
||||
ot -> new ParsedOp(ot, cfg, List.of(), this)
|
||||
).toList();
|
||||
return parsedOps;
|
||||
}
|
||||
@@ -555,35 +562,35 @@ public class SimpleActivity implements Activity, ProgressCapable, ActivityDefObs
|
||||
List<OpTemplate> unfilteredOps = opsDocList.getOps();
|
||||
List<OpTemplate> filteredOps = opsDocList.getOps(tagfilter);
|
||||
|
||||
if (filteredOps.size() == 0) {
|
||||
if (unfilteredOps.size() > 0) { // There were no ops, and it was because they were all filtered out
|
||||
if (0 == filteredOps.size()) {
|
||||
// There were no ops, and it *wasn't* because they were all filtered out.
|
||||
// In this case, let's try to synthesize the ops as long as at least a default driver was provided
|
||||
// But if there were no ops, and there was no default driver provided, we can't continue
|
||||
// There were no ops, and it was because they were all filtered out
|
||||
if (0 < unfilteredOps.size()) {
|
||||
throw new BasicError("There were no active op templates with tag filter '"
|
||||
+ tagfilter + "', since all " + unfilteredOps.size() + " were filtered out.");
|
||||
} else {
|
||||
// There were no ops, and it *wasn't* because they were all filtered out.
|
||||
|
||||
// In this case, let's try to synthesize the ops as long as at least a default driver was provided
|
||||
if (defaultDriverAdapter.isPresent() && defaultDriverAdapter.get() instanceof SyntheticOpTemplateProvider sotp) {
|
||||
filteredOps = sotp.getSyntheticOpTemplates(opsDocList, getActivityDef().getParams());
|
||||
Objects.requireNonNull(filteredOps);
|
||||
if (filteredOps.size() == 0) {
|
||||
throw new BasicError("Attempted to create synthetic ops from driver '" + defaultDriverAdapter.get().getAdapterName() + "'" +
|
||||
" but no ops were created. You must provide either a workload or an op parameter. Activities require op templates.");
|
||||
}
|
||||
} else { // But if there were no ops, and there was no default driver provided, we can't continue
|
||||
throw new BasicError("""
|
||||
No op templates were provided. You must provide one of these activity parameters:
|
||||
1) workload=some.yaml
|
||||
2) op='inline template'
|
||||
3) driver=stdout (or any other drive that can synthesize ops)""");
|
||||
}
|
||||
}
|
||||
if (filteredOps.size() == 0) {
|
||||
throw new BasicError("There were no active op templates with tag filter '" + tagfilter + "'");
|
||||
if (defaultDriverAdapter.isPresent() && defaultDriverAdapter.get() instanceof SyntheticOpTemplateProvider sotp) {
|
||||
filteredOps = sotp.getSyntheticOpTemplates(opsDocList, this.activityDef.getParams());
|
||||
Objects.requireNonNull(filteredOps);
|
||||
if (0 == filteredOps.size()) {
|
||||
throw new BasicError("Attempted to create synthetic ops from driver '" + defaultDriverAdapter.get().getAdapterName() + '\'' +
|
||||
" but no ops were created. You must provide either a workload or an op parameter. Activities require op templates.");
|
||||
}
|
||||
} else {
|
||||
throw new BasicError("""
|
||||
No op templates were provided. You must provide one of these activity parameters:
|
||||
1) workload=some.yaml
|
||||
2) op='inline template'
|
||||
3) driver=stdout (or any other drive that can synthesize ops)""");
|
||||
}
|
||||
if (0 == filteredOps.size()) {
|
||||
throw new BasicError("There were no active op templates with tag filter '" + tagfilter + '\'');
|
||||
}
|
||||
}
|
||||
|
||||
if (filteredOps.size() == 0) {
|
||||
if (0 == filteredOps.size()) {
|
||||
throw new OpConfigError("No op templates found. You must provide either workload=... or op=..., or use " +
|
||||
"a default driver (driver=___). This includes " +
|
||||
ServiceLoader.load(DriverAdapter.class).stream()
|
||||
@@ -670,7 +677,8 @@ public class SimpleActivity implements Activity, ProgressCapable, ActivityDefObs
|
||||
if (stmt.isPresent()) {
|
||||
workloadSource = "commandline:" + stmt.get();
|
||||
return OpsLoader.loadString(stmt.get(), OpTemplateFormat.inline, activityDef.getParams(), null);
|
||||
} else if (op_yaml_loc.isPresent()) {
|
||||
}
|
||||
if (op_yaml_loc.isPresent()) {
|
||||
workloadSource = "yaml:" + op_yaml_loc.get();
|
||||
return OpsLoader.loadPath(op_yaml_loc.get(), activityDef.getParams(), "activities");
|
||||
}
|
||||
@@ -685,7 +693,7 @@ public class SimpleActivity implements Activity, ProgressCapable, ActivityDefObs
|
||||
|
||||
@Override
|
||||
public synchronized ProgressMeterDisplay getProgressMeter() {
|
||||
if (progressMeter == null) {
|
||||
if (null == this.progressMeter) {
|
||||
this.progressMeter = new ActivityMetricProgressMeter(this);
|
||||
}
|
||||
return this.progressMeter;
|
||||
@@ -700,7 +708,7 @@ public class SimpleActivity implements Activity, ProgressCapable, ActivityDefObs
|
||||
*/
|
||||
@Override
|
||||
public int getMaxTries() {
|
||||
return getActivityDef().getParams().getOptionalInteger("maxtries").orElse(10);
|
||||
return this.activityDef.getParams().getOptionalInteger("maxtries").orElse(10);
|
||||
}
|
||||
|
||||
@Override
|
||||
@@ -708,9 +716,8 @@ public class SimpleActivity implements Activity, ProgressCapable, ActivityDefObs
|
||||
return tally;
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public String getName() {
|
||||
return this.activityDef.getAlias();
|
||||
public NBLabels getLabels() {
|
||||
return this.labels;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -16,6 +16,8 @@
|
||||
|
||||
package io.nosqlbench.engine.api.activityimpl.uniform;
|
||||
|
||||
import io.nosqlbench.api.config.NBLabeledElement;
|
||||
import io.nosqlbench.api.config.NBLabels;
|
||||
import io.nosqlbench.api.config.standard.*;
|
||||
import io.nosqlbench.api.engine.activityimpl.ActivityDef;
|
||||
import io.nosqlbench.api.errors.BasicError;
|
||||
@@ -46,15 +48,15 @@ import java.util.concurrent.ConcurrentHashMap;
|
||||
* @param <S> The context type for the activity, AKA the 'space' for a named driver instance and its associated object graph
|
||||
*/
|
||||
public class StandardActivity<R extends Op, S> extends SimpleActivity implements SyntheticOpTemplateProvider {
|
||||
private final static Logger logger = LogManager.getLogger("ACTIVITY");
|
||||
private static final Logger logger = LogManager.getLogger("ACTIVITY");
|
||||
|
||||
private final OpSequence<OpDispenser<? extends Op>> sequence;
|
||||
private final NBConfigModel yamlmodel;
|
||||
private final ConcurrentHashMap<String, DriverAdapter> adapters = new ConcurrentHashMap<>();
|
||||
private final ConcurrentHashMap<String, OpMapper<Op>> mappers = new ConcurrentHashMap<>();
|
||||
|
||||
public StandardActivity(ActivityDef activityDef) {
|
||||
super(activityDef);
|
||||
public StandardActivity(ActivityDef activityDef, NBLabeledElement parentLabels) {
|
||||
super(activityDef, parentLabels);
|
||||
OpsDocList workload;
|
||||
|
||||
Optional<String> yaml_loc = activityDef.getParams().getOptionalString("yaml", "workload");
|
||||
@@ -72,7 +74,7 @@ public class StandardActivity<R extends Op, S> extends SimpleActivity implements
|
||||
.flatMap(s -> ServiceSelector.of(s, adapterLoader).get());
|
||||
|
||||
if (defaultDriverName.isPresent() && defaultAdapter.isEmpty()) {
|
||||
throw new BasicError("Unable to load default driver adapter '" + defaultDriverName.get() + "'");
|
||||
throw new BasicError("Unable to load default driver adapter '" + defaultDriverName.get() + '\'');
|
||||
}
|
||||
|
||||
// HERE, op templates are loaded before drivers are loaded
|
||||
@@ -85,7 +87,7 @@ public class StandardActivity<R extends Op, S> extends SimpleActivity implements
|
||||
|
||||
Optional<String> defaultDriverOption = activityDef.getParams().getOptionalString("driver");
|
||||
for (OpTemplate ot : opTemplates) {
|
||||
ParsedOp incompleteOpDef = new ParsedOp(ot, NBConfiguration.empty(), List.of());
|
||||
ParsedOp incompleteOpDef = new ParsedOp(ot, NBConfiguration.empty(), List.of(), this);
|
||||
String driverName = incompleteOpDef.takeOptionalStaticValue("driver", String.class)
|
||||
.or(() -> incompleteOpDef.takeOptionalStaticValue("type",String.class))
|
||||
.or(() -> defaultDriverOption)
|
||||
@@ -97,7 +99,7 @@ public class StandardActivity<R extends Op, S> extends SimpleActivity implements
|
||||
|
||||
if (!adapters.containsKey(driverName)) {
|
||||
DriverAdapter adapter = ServiceSelector.of(driverName, adapterLoader).get().orElseThrow(
|
||||
() -> new OpConfigError("Unable to load driver adapter for name '" + driverName + "'")
|
||||
() -> new OpConfigError("Unable to load driver adapter for name '" + driverName + '\'')
|
||||
);
|
||||
|
||||
NBConfigModel combinedModel = yamlmodel;
|
||||
@@ -119,15 +121,15 @@ public class StandardActivity<R extends Op, S> extends SimpleActivity implements
|
||||
|
||||
DriverAdapter adapter = adapters.get(driverName);
|
||||
adapterlist.add(adapter);
|
||||
ParsedOp pop = new ParsedOp(ot, adapter.getConfiguration(), List.of(adapter.getPreprocessor()));
|
||||
ParsedOp pop = new ParsedOp(ot, adapter.getConfiguration(), List.of(adapter.getPreprocessor()), this);
|
||||
Optional<String> discard = pop.takeOptionalStaticValue("driver", String.class);
|
||||
pops.add(pop);
|
||||
}
|
||||
|
||||
if (defaultDriverOption.isPresent()) {
|
||||
long matchingDefault = mappers.keySet().stream().filter(n -> n.equals(defaultDriverOption.get())).count();
|
||||
if (matchingDefault==0) {
|
||||
logger.warn("All op templates used a different driver than the default '" + defaultDriverOption.get()+"'");
|
||||
if (0 == matchingDefault) {
|
||||
logger.warn("All op templates used a different driver than the default '{}'", defaultDriverOption.get());
|
||||
}
|
||||
}
|
||||
|
||||
@@ -137,9 +139,8 @@ public class StandardActivity<R extends Op, S> extends SimpleActivity implements
|
||||
} catch (Exception e) {
|
||||
if (e instanceof OpConfigError) {
|
||||
throw e;
|
||||
} else {
|
||||
throw new OpConfigError("Error mapping workload template to operations: " + e.getMessage(), null, e);
|
||||
}
|
||||
throw new OpConfigError("Error mapping workload template to operations: " + e.getMessage(), null, e);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -212,4 +213,9 @@ public class StandardActivity<R extends Op, S> extends SimpleActivity implements
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public NBLabels getLabels() {
|
||||
return super.getLabels();
|
||||
}
|
||||
}
|
||||
|
||||
@@ -16,6 +16,7 @@
|
||||
|
||||
package io.nosqlbench.engine.api.activityimpl.uniform;
|
||||
|
||||
import io.nosqlbench.api.config.NBLabeledElement;
|
||||
import io.nosqlbench.api.config.standard.NBConfigModel;
|
||||
import io.nosqlbench.api.config.standard.NBConfiguration;
|
||||
import io.nosqlbench.api.config.standard.NBReconfigurable;
|
||||
@@ -35,55 +36,52 @@ import java.util.Optional;
|
||||
|
||||
public class StandardActivityType<A extends StandardActivity<?,?>> extends SimpleActivity implements ActivityType<A> {
|
||||
|
||||
private final static Logger logger = LogManager.getLogger("ACTIVITY");
|
||||
private static final Logger logger = LogManager.getLogger("ACTIVITY");
|
||||
private final Map<String,DriverAdapter> adapters = new HashMap<>();
|
||||
|
||||
public StandardActivityType(DriverAdapter<?,?> adapter, ActivityDef activityDef) {
|
||||
public StandardActivityType(final DriverAdapter<?,?> adapter, final ActivityDef activityDef, final NBLabeledElement parentLabels) {
|
||||
super(activityDef
|
||||
.deprecate("type","driver")
|
||||
.deprecate("yaml", "workload")
|
||||
.deprecate("yaml", "workload"),
|
||||
parentLabels
|
||||
);
|
||||
this.adapters.put(adapter.getAdapterName(),adapter);
|
||||
if (adapter instanceof ActivityDefAware) {
|
||||
((ActivityDefAware) adapter).setActivityDef(activityDef);
|
||||
}
|
||||
adapters.put(adapter.getAdapterName(),adapter);
|
||||
if (adapter instanceof ActivityDefAware) ((ActivityDefAware) adapter).setActivityDef(activityDef);
|
||||
}
|
||||
|
||||
public StandardActivityType(ActivityDef activityDef) {
|
||||
super(activityDef);
|
||||
public StandardActivityType(final ActivityDef activityDef, final NBLabeledElement parentLabels) {
|
||||
super(activityDef, parentLabels);
|
||||
}
|
||||
|
||||
@Override
|
||||
public A getActivity(ActivityDef activityDef) {
|
||||
if (activityDef.getParams().getOptionalString("async").isPresent()) {
|
||||
public A getActivity(final ActivityDef activityDef, final NBLabeledElement parentLabels) {
|
||||
if (activityDef.getParams().getOptionalString("async").isPresent())
|
||||
throw new RuntimeException("This driver does not support async mode yet.");
|
||||
}
|
||||
|
||||
return (A) new StandardActivity(activityDef);
|
||||
return (A) new StandardActivity(activityDef, parentLabels);
|
||||
}
|
||||
|
||||
@Override
|
||||
public synchronized void onActivityDefUpdate(ActivityDef activityDef) {
|
||||
public synchronized void onActivityDefUpdate(final ActivityDef activityDef) {
|
||||
super.onActivityDefUpdate(activityDef);
|
||||
|
||||
for (DriverAdapter adapter : adapters.values()) {
|
||||
for (final DriverAdapter adapter : this.adapters.values())
|
||||
if (adapter instanceof NBReconfigurable reconfigurable) {
|
||||
NBConfigModel cfgModel = reconfigurable.getReconfigModel();
|
||||
Optional<String> op_yaml_loc = activityDef.getParams().getOptionalString("yaml", "workload");
|
||||
final Optional<String> op_yaml_loc = activityDef.getParams().getOptionalString("yaml", "workload");
|
||||
if (op_yaml_loc.isPresent()) {
|
||||
Map<String,Object> disposable = new LinkedHashMap<>(activityDef.getParams());
|
||||
OpsDocList workload = OpsLoader.loadPath(op_yaml_loc.get(), disposable, "activities");
|
||||
cfgModel=cfgModel.add(workload.getConfigModel());
|
||||
final Map<String, Object> disposable = new LinkedHashMap<>(activityDef.getParams());
|
||||
final OpsDocList workload = OpsLoader.loadPath(op_yaml_loc.get(), disposable, "activities");
|
||||
cfgModel = cfgModel.add(workload.getConfigModel());
|
||||
}
|
||||
NBConfiguration cfg = cfgModel.apply(activityDef.getParams());
|
||||
final NBConfiguration cfg = cfgModel.apply(activityDef.getParams());
|
||||
reconfigurable.applyReconfig(cfg);
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@Override
|
||||
public ActionDispenser getActionDispenser(A activity) {
|
||||
public ActionDispenser getActionDispenser(final A activity) {
|
||||
return new StandardActionDispenser(activity);
|
||||
}
|
||||
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright (c) 2022 nosqlbench
|
||||
* Copyright (c) 2022-2023 nosqlbench
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -17,11 +17,10 @@
|
||||
package io.nosqlbench.engine.api.extensions;
|
||||
|
||||
import com.codahale.metrics.MetricRegistry;
|
||||
import io.nosqlbench.api.config.LabeledScenarioContext;
|
||||
import io.nosqlbench.nb.annotations.Service;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
|
||||
import javax.script.ScriptContext;
|
||||
|
||||
/**
|
||||
* Any implementation of a SandboxExtension that is found in the runtime
|
||||
* can be automatically loaded into the scenario scripting sandbox.
|
||||
@@ -44,13 +43,13 @@ public interface ScriptingPluginInfo<T> {
|
||||
* @param scriptContext The scripting context object, useful for interacting with the sandbox directly
|
||||
* @return a new instance of an extension. The extension is given a logger if it desires.
|
||||
*/
|
||||
T getExtensionObject(Logger logger, MetricRegistry metricRegistry, ScriptContext scriptContext);
|
||||
T getExtensionObject(Logger logger, MetricRegistry metricRegistry, LabeledScenarioContext scriptContext);
|
||||
|
||||
/**
|
||||
* @return a simple name at the root of the variable namespace to anchor this extension.
|
||||
*/
|
||||
default String getBaseVariableName() {
|
||||
return getClass().getAnnotation(Service.class).selector();
|
||||
return this.getClass().getAnnotation(Service.class).selector();
|
||||
}
|
||||
|
||||
/**
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright (c) 2022 nosqlbench
|
||||
* Copyright (c) 2022-2023 nosqlbench
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -17,7 +17,7 @@
|
||||
package io.nosqlbench.engine.api.metrics;
|
||||
|
||||
import com.codahale.metrics.Counter;
|
||||
import io.nosqlbench.api.engine.activityimpl.ActivityDef;
|
||||
import io.nosqlbench.api.config.NBLabeledElement;
|
||||
import io.nosqlbench.api.engine.metrics.ActivityMetrics;
|
||||
|
||||
import java.util.ArrayList;
|
||||
@@ -30,28 +30,26 @@ import java.util.concurrent.ConcurrentHashMap;
|
||||
public class ExceptionCountMetrics {
|
||||
private final ConcurrentHashMap<String, Counter> counters = new ConcurrentHashMap<>();
|
||||
private final Counter allerrors;
|
||||
private final ActivityDef activityDef;
|
||||
private final NBLabeledElement parentLabels;
|
||||
|
||||
public ExceptionCountMetrics(ActivityDef activityDef) {
|
||||
this.activityDef = activityDef;
|
||||
allerrors=ActivityMetrics.counter(activityDef, "errorcounts.ALL");
|
||||
public ExceptionCountMetrics(final NBLabeledElement parentLabels) {
|
||||
this.parentLabels = parentLabels;
|
||||
this.allerrors =ActivityMetrics.counter(parentLabels, "errorcounts.ALL");
|
||||
}
|
||||
|
||||
public void count(String name) {
|
||||
Counter c = counters.get(name);
|
||||
if (c == null) {
|
||||
synchronized (counters) {
|
||||
c = counters.computeIfAbsent(
|
||||
name,
|
||||
k -> ActivityMetrics.counter(activityDef, "errorcounts." + name)
|
||||
);
|
||||
}
|
||||
public void count(final String name) {
|
||||
Counter c = this.counters.get(name);
|
||||
if (null == c) synchronized (this.counters) {
|
||||
c = this.counters.computeIfAbsent(
|
||||
name,
|
||||
k -> ActivityMetrics.counter(this.parentLabels, "errorcounts." + name)
|
||||
);
|
||||
}
|
||||
c.inc();
|
||||
allerrors.inc();
|
||||
this.allerrors.inc();
|
||||
}
|
||||
|
||||
public List<Counter> getCounters() {
|
||||
return new ArrayList<>(counters.values());
|
||||
return new ArrayList<>(this.counters.values());
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright (c) 2022 nosqlbench
|
||||
* Copyright (c) 2022-2023 nosqlbench
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -17,6 +17,7 @@
|
||||
package io.nosqlbench.engine.api.metrics;
|
||||
|
||||
import com.codahale.metrics.Histogram;
|
||||
import io.nosqlbench.api.config.NBLabeledElement;
|
||||
import io.nosqlbench.api.engine.activityimpl.ActivityDef;
|
||||
import io.nosqlbench.api.engine.metrics.ActivityMetrics;
|
||||
|
||||
@@ -32,29 +33,29 @@ import java.util.concurrent.ConcurrentHashMap;
|
||||
public class ExceptionHistoMetrics {
|
||||
private final ConcurrentHashMap<String, Histogram> histos = new ConcurrentHashMap<>();
|
||||
private final Histogram allerrors;
|
||||
private final NBLabeledElement parentLabels;
|
||||
private final ActivityDef activityDef;
|
||||
|
||||
public ExceptionHistoMetrics(ActivityDef activityDef) {
|
||||
public ExceptionHistoMetrics(final NBLabeledElement parentLabels, final ActivityDef activityDef) {
|
||||
this.parentLabels = parentLabels;
|
||||
this.activityDef = activityDef;
|
||||
allerrors = ActivityMetrics.histogram(activityDef, "errorhistos.ALL", activityDef.getParams().getOptionalInteger("hdr_digits").orElse(4));
|
||||
this.allerrors = ActivityMetrics.histogram(parentLabels, "errorhistos.ALL", activityDef.getParams().getOptionalInteger("hdr_digits").orElse(4));
|
||||
}
|
||||
|
||||
public void update(String name, long magnitude) {
|
||||
Histogram h = histos.get(name);
|
||||
if (h == null) {
|
||||
synchronized (histos) {
|
||||
h = histos.computeIfAbsent(
|
||||
name,
|
||||
k -> ActivityMetrics.histogram(activityDef, "errorhistos." + name, activityDef.getParams().getOptionalInteger("hdr_digits").orElse(4))
|
||||
);
|
||||
}
|
||||
public void update(final String name, final long magnitude) {
|
||||
Histogram h = this.histos.get(name);
|
||||
if (null == h) synchronized (this.histos) {
|
||||
h = this.histos.computeIfAbsent(
|
||||
name,
|
||||
k -> ActivityMetrics.histogram(this.parentLabels, "errorhistos." + name, this.activityDef.getParams().getOptionalInteger("hdr_digits").orElse(4))
|
||||
);
|
||||
}
|
||||
h.update(magnitude);
|
||||
allerrors.update(magnitude);
|
||||
this.allerrors.update(magnitude);
|
||||
}
|
||||
|
||||
|
||||
public List<Histogram> getHistograms() {
|
||||
return new ArrayList<>(histos.values());
|
||||
return new ArrayList<>(this.histos.values());
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright (c) 2022 nosqlbench
|
||||
* Copyright (c) 2022-2023 nosqlbench
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -17,7 +17,7 @@
|
||||
package io.nosqlbench.engine.api.metrics;
|
||||
|
||||
import com.codahale.metrics.Meter;
|
||||
import io.nosqlbench.api.engine.activityimpl.ActivityDef;
|
||||
import io.nosqlbench.api.config.NBLabeledElement;
|
||||
import io.nosqlbench.api.engine.metrics.ActivityMetrics;
|
||||
|
||||
import java.util.ArrayList;
|
||||
@@ -30,28 +30,26 @@ import java.util.concurrent.ConcurrentHashMap;
|
||||
public class ExceptionMeterMetrics {
|
||||
private final ConcurrentHashMap<String, Meter> meters = new ConcurrentHashMap<>();
|
||||
private final Meter allerrors;
|
||||
private final ActivityDef activityDef;
|
||||
private final NBLabeledElement parentLabels;
|
||||
|
||||
public ExceptionMeterMetrics(ActivityDef activityDef) {
|
||||
this.activityDef = activityDef;
|
||||
allerrors = ActivityMetrics.meter(activityDef, "errormeters.ALL");
|
||||
public ExceptionMeterMetrics(final NBLabeledElement parentLabels) {
|
||||
this.parentLabels = parentLabels;
|
||||
this.allerrors = ActivityMetrics.meter(parentLabels, "errormeters.ALL");
|
||||
}
|
||||
|
||||
public void mark(String name) {
|
||||
Meter c = meters.get(name);
|
||||
if (c == null) {
|
||||
synchronized (meters) {
|
||||
c = meters.computeIfAbsent(
|
||||
name,
|
||||
k -> ActivityMetrics.meter(activityDef, "errormeters." + name)
|
||||
);
|
||||
}
|
||||
public void mark(final String name) {
|
||||
Meter c = this.meters.get(name);
|
||||
if (null == c) synchronized (this.meters) {
|
||||
c = this.meters.computeIfAbsent(
|
||||
name,
|
||||
k -> ActivityMetrics.meter(this.parentLabels, "errormeters." + name)
|
||||
);
|
||||
}
|
||||
c.mark();
|
||||
allerrors.mark();
|
||||
this.allerrors.mark();
|
||||
}
|
||||
|
||||
public List<Meter> getMeters() {
|
||||
return new ArrayList<>(meters.values());
|
||||
return new ArrayList<>(this.meters.values());
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright (c) 2022 nosqlbench
|
||||
* Copyright (c) 2022-2023 nosqlbench
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -17,6 +17,7 @@
|
||||
package io.nosqlbench.engine.api.metrics;
|
||||
|
||||
import com.codahale.metrics.Timer;
|
||||
import io.nosqlbench.api.config.NBLabeledElement;
|
||||
import io.nosqlbench.api.engine.activityimpl.ActivityDef;
|
||||
import io.nosqlbench.api.engine.metrics.ActivityMetrics;
|
||||
|
||||
@@ -32,31 +33,32 @@ public class ExceptionTimerMetrics {
|
||||
private final ConcurrentHashMap<String, Timer> timers = new ConcurrentHashMap<>();
|
||||
private final Timer allerrors;
|
||||
private final ActivityDef activityDef;
|
||||
private final NBLabeledElement parentLabels;
|
||||
|
||||
public ExceptionTimerMetrics(ActivityDef activityDef) {
|
||||
public ExceptionTimerMetrics(final NBLabeledElement parentLabels, final ActivityDef activityDef) {
|
||||
this.activityDef = activityDef;
|
||||
allerrors = ActivityMetrics.timer(
|
||||
activityDef,
|
||||
this.parentLabels = parentLabels;
|
||||
|
||||
this.allerrors = ActivityMetrics.timer(
|
||||
parentLabels,
|
||||
"errortimers.ALL",
|
||||
activityDef.getParams().getOptionalInteger("hdr_digits").orElse(4)
|
||||
);
|
||||
}
|
||||
|
||||
public void update(String name, long nanosDuration) {
|
||||
Timer timer = timers.get(name);
|
||||
if (timer == null) {
|
||||
synchronized (timers) {
|
||||
timer = timers.computeIfAbsent(
|
||||
name,
|
||||
k -> ActivityMetrics.timer(activityDef, "errortimers." + name, activityDef.getParams().getOptionalInteger("hdr_digits").orElse(4))
|
||||
);
|
||||
}
|
||||
public void update(final String name, final long nanosDuration) {
|
||||
Timer timer = this.timers.get(name);
|
||||
if (null == timer) synchronized (this.timers) {
|
||||
timer = this.timers.computeIfAbsent(
|
||||
name,
|
||||
k -> ActivityMetrics.timer(this.parentLabels, "errortimers." + name, this.activityDef.getParams().getOptionalInteger("hdr_digits").orElse(4))
|
||||
);
|
||||
}
|
||||
timer.update(nanosDuration, TimeUnit.NANOSECONDS);
|
||||
allerrors.update(nanosDuration, TimeUnit.NANOSECONDS);
|
||||
this.allerrors.update(nanosDuration, TimeUnit.NANOSECONDS);
|
||||
}
|
||||
|
||||
public List<Timer> getTimers() {
|
||||
return new ArrayList<>(timers.values());
|
||||
return new ArrayList<>(this.timers.values());
|
||||
}
|
||||
}
|
||||
|
||||
@@ -20,6 +20,7 @@ import com.codahale.metrics.Counter;
|
||||
import com.codahale.metrics.Histogram;
|
||||
import com.codahale.metrics.Meter;
|
||||
import com.codahale.metrics.Timer;
|
||||
import io.nosqlbench.api.config.NBLabeledElement;
|
||||
import io.nosqlbench.api.engine.activityimpl.ActivityDef;
|
||||
import io.nosqlbench.api.errors.ExpectedResultVerificationError;
|
||||
import io.nosqlbench.engine.api.activityapi.errorhandling.ErrorMetrics;
|
||||
@@ -27,6 +28,7 @@ import io.nosqlbench.engine.api.activityapi.errorhandling.modular.handlers.Count
|
||||
import io.nosqlbench.engine.api.activityapi.errorhandling.modular.handlers.CounterErrorHandler;
|
||||
import io.nosqlbench.engine.api.activityapi.errorhandling.modular.handlers.ExpectedResultVerificationErrorHandler;
|
||||
import io.nosqlbench.util.NBMock;
|
||||
import io.nosqlbench.util.NBMock.LogAppender;
|
||||
import org.apache.logging.log4j.Level;
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
import org.apache.logging.log4j.core.Logger;
|
||||
@@ -49,28 +51,28 @@ class NBErrorHandlerTest {
|
||||
|
||||
@Test
|
||||
void testNullConfig() {
|
||||
ErrorMetrics errorMetrics = new ErrorMetrics(ActivityDef.parseActivityDef("alias=testalias_stop"));
|
||||
NBErrorHandler errhandler = new NBErrorHandler(() -> "stop", () -> errorMetrics);
|
||||
final ErrorMetrics errorMetrics = new ErrorMetrics(NBLabeledElement.forKV("activity","testalias_stop"));
|
||||
final NBErrorHandler errhandler = new NBErrorHandler(() -> "stop", () -> errorMetrics);
|
||||
assertThatExceptionOfType(RuntimeException.class)
|
||||
.isThrownBy(() -> errhandler.handleError(runtimeException, 1, 2));
|
||||
.isThrownBy(() -> errhandler.handleError(this.runtimeException, 1, 2));
|
||||
}
|
||||
|
||||
@Test
|
||||
void testMultipleWithRetry() {
|
||||
ErrorMetrics errorMetrics = new ErrorMetrics(ActivityDef.parseActivityDef("alias=testalias_wr"));
|
||||
NBErrorHandler eh = new NBErrorHandler(() -> "warn,retry", () -> errorMetrics);
|
||||
ErrorDetail detail = eh.handleError(runtimeException, 1, 2);
|
||||
final ErrorMetrics errorMetrics = new ErrorMetrics(NBLabeledElement.forKV("activity","testalias_wr"));
|
||||
final NBErrorHandler eh = new NBErrorHandler(() -> "warn,retry", () -> errorMetrics);
|
||||
final ErrorDetail detail = eh.handleError(this.runtimeException, 1, 2);
|
||||
assertThat(detail.isRetryable()).isTrue();
|
||||
}
|
||||
|
||||
@Test
|
||||
void testWarnErrorHandler() {
|
||||
Logger logger = (Logger) LogManager.getLogger("ERRORS");
|
||||
NBMock.LogAppender appender = NBMock.registerTestLogger(ERROR_HANDLER_APPENDER_NAME, logger, Level.WARN);
|
||||
final Logger logger = (Logger) LogManager.getLogger("ERRORS");
|
||||
final LogAppender appender = NBMock.registerTestLogger(NBErrorHandlerTest.ERROR_HANDLER_APPENDER_NAME, logger, Level.WARN);
|
||||
|
||||
ErrorMetrics errorMetrics = new ErrorMetrics(ActivityDef.parseActivityDef("alias=testalias_warn"));
|
||||
NBErrorHandler eh = new NBErrorHandler(() -> "warn", () -> errorMetrics);
|
||||
ErrorDetail detail = eh.handleError(runtimeException, 1, 2);
|
||||
final ErrorMetrics errorMetrics = new ErrorMetrics(NBLabeledElement.forKV("activity","testalias_warn"));
|
||||
final NBErrorHandler eh = new NBErrorHandler(() -> "warn", () -> errorMetrics);
|
||||
final ErrorDetail detail = eh.handleError(this.runtimeException, 1, 2);
|
||||
|
||||
logger.getContext().stop(); // force any async appenders to flush
|
||||
logger.getContext().start(); // resume processing
|
||||
@@ -83,34 +85,34 @@ class NBErrorHandlerTest {
|
||||
|
||||
@Test
|
||||
void testHistogramErrorHandler() {
|
||||
ErrorMetrics errorMetrics = new ErrorMetrics(ActivityDef.parseActivityDef("alias=testalias_histos"));
|
||||
NBErrorHandler eh = new NBErrorHandler(() -> "histogram", () -> errorMetrics);
|
||||
ErrorDetail detail = eh.handleError(runtimeException, 1, 2);
|
||||
final ErrorMetrics errorMetrics = new ErrorMetrics(NBLabeledElement.forKV("activity","testalias_histos"));
|
||||
final NBErrorHandler eh = new NBErrorHandler(() -> "histogram", () -> errorMetrics);
|
||||
final ErrorDetail detail = eh.handleError(this.runtimeException, 1, 2);
|
||||
assertThat(detail.isRetryable()).isFalse();
|
||||
List<Histogram> histograms = errorMetrics.getExceptionHistoMetrics().getHistograms();
|
||||
final List<Histogram> histograms = errorMetrics.getExceptionHistoMetrics().getHistograms();
|
||||
assertThat(histograms).hasSize(1);
|
||||
}
|
||||
|
||||
@Test
|
||||
void testTimerErrorHandler() {
|
||||
ErrorMetrics errorMetrics = new ErrorMetrics(ActivityDef.parseActivityDef("alias=testalias_timers"));
|
||||
NBErrorHandler eh = new NBErrorHandler(() -> "timer", () -> errorMetrics);
|
||||
ErrorDetail detail = eh.handleError(runtimeException, 1, 2);
|
||||
final ErrorMetrics errorMetrics = new ErrorMetrics(NBLabeledElement.forKV("activity","testalias_timers"));
|
||||
final NBErrorHandler eh = new NBErrorHandler(() -> "timer", () -> errorMetrics);
|
||||
final ErrorDetail detail = eh.handleError(this.runtimeException, 1, 2);
|
||||
assertThat(detail.isRetryable()).isFalse();
|
||||
List<Timer> histograms = errorMetrics.getExceptionTimerMetrics().getTimers();
|
||||
final List<Timer> histograms = errorMetrics.getExceptionTimerMetrics().getTimers();
|
||||
assertThat(histograms).hasSize(1);
|
||||
}
|
||||
|
||||
@Test
|
||||
void testCounterErrorHandler() {
|
||||
Logger logger = (Logger) LogManager.getLogger(CounterErrorHandler.class);
|
||||
NBMock.LogAppender appender = NBMock.registerTestLogger(ERROR_HANDLER_APPENDER_NAME, logger, Level.INFO);
|
||||
final Logger logger = (Logger) LogManager.getLogger(CounterErrorHandler.class);
|
||||
final LogAppender appender = NBMock.registerTestLogger(NBErrorHandlerTest.ERROR_HANDLER_APPENDER_NAME, logger, Level.INFO);
|
||||
|
||||
ErrorMetrics errorMetrics = new ErrorMetrics(ActivityDef.parseActivityDef("alias=testalias_counters"));
|
||||
NBErrorHandler eh = new NBErrorHandler(() -> "counter", () -> errorMetrics);
|
||||
ErrorDetail detail = eh.handleError(runtimeException, 1, 2);
|
||||
final ErrorMetrics errorMetrics = new ErrorMetrics(NBLabeledElement.forKV("activity","testalias_counters"));
|
||||
final NBErrorHandler eh = new NBErrorHandler(() -> "counter", () -> errorMetrics);
|
||||
final ErrorDetail detail = eh.handleError(this.runtimeException, 1, 2);
|
||||
assertThat(detail.isRetryable()).isFalse();
|
||||
List<Counter> histograms = errorMetrics.getExceptionCountMetrics().getCounters();
|
||||
final List<Counter> histograms = errorMetrics.getExceptionCountMetrics().getCounters();
|
||||
assertThat(histograms).hasSize(1);
|
||||
|
||||
logger.getContext().stop(); // force any async appenders to flush
|
||||
@@ -122,14 +124,14 @@ class NBErrorHandlerTest {
|
||||
|
||||
@Test
|
||||
void testCountErrorHandler() {
|
||||
Logger logger = (Logger) LogManager.getLogger(CountErrorHandler.class);
|
||||
NBMock.LogAppender appender = NBMock.registerTestLogger(ERROR_HANDLER_APPENDER_NAME, logger, Level.WARN);
|
||||
final Logger logger = (Logger) LogManager.getLogger(CountErrorHandler.class);
|
||||
final LogAppender appender = NBMock.registerTestLogger(NBErrorHandlerTest.ERROR_HANDLER_APPENDER_NAME, logger, Level.WARN);
|
||||
|
||||
ErrorMetrics errorMetrics = new ErrorMetrics(ActivityDef.parseActivityDef("alias=testalias_count"));
|
||||
NBErrorHandler eh = new NBErrorHandler(() -> "count", () -> errorMetrics);
|
||||
ErrorDetail detail = eh.handleError(runtimeException, 1, 2);
|
||||
final ErrorMetrics errorMetrics = new ErrorMetrics(NBLabeledElement.forKV("activity","testalias_count"));
|
||||
final NBErrorHandler eh = new NBErrorHandler(() -> "count", () -> errorMetrics);
|
||||
final ErrorDetail detail = eh.handleError(this.runtimeException, 1, 2);
|
||||
assertThat(detail.isRetryable()).isFalse();
|
||||
List<Counter> histograms = errorMetrics.getExceptionCountMetrics().getCounters();
|
||||
final List<Counter> histograms = errorMetrics.getExceptionCountMetrics().getCounters();
|
||||
assertThat(histograms).hasSize(1);
|
||||
|
||||
logger.getContext().stop(); // force any async appenders to flush
|
||||
@@ -142,19 +144,19 @@ class NBErrorHandlerTest {
|
||||
|
||||
@Test
|
||||
void testMeterErrorHandler() {
|
||||
ErrorMetrics errorMetrics = new ErrorMetrics(ActivityDef.parseActivityDef("alias=testalias_meters"));
|
||||
NBErrorHandler eh = new NBErrorHandler(() -> "meter", () -> errorMetrics);
|
||||
ErrorDetail detail = eh.handleError(runtimeException, 1, 2);
|
||||
final ErrorMetrics errorMetrics = new ErrorMetrics(NBLabeledElement.forKV("activity","testalias_meters"));
|
||||
final NBErrorHandler eh = new NBErrorHandler(() -> "meter", () -> errorMetrics);
|
||||
final ErrorDetail detail = eh.handleError(this.runtimeException, 1, 2);
|
||||
assertThat(detail.isRetryable()).isFalse();
|
||||
List<Meter> histograms = errorMetrics.getExceptionMeterMetrics().getMeters();
|
||||
final List<Meter> histograms = errorMetrics.getExceptionMeterMetrics().getMeters();
|
||||
assertThat(histograms).hasSize(1);
|
||||
}
|
||||
|
||||
@Test
|
||||
void testCodeShorthand() {
|
||||
ErrorMetrics errorMetrics = new ErrorMetrics(ActivityDef.parseActivityDef("alias=testalias_meters"));
|
||||
NBErrorHandler eh = new NBErrorHandler(() -> "handler=code code=42", () -> errorMetrics);
|
||||
ErrorDetail detail = eh.handleError(runtimeException, 1, 2);
|
||||
final ErrorMetrics errorMetrics = new ErrorMetrics(NBLabeledElement.forKV("activity","testalias_meters"));
|
||||
final NBErrorHandler eh = new NBErrorHandler(() -> "handler=code code=42", () -> errorMetrics);
|
||||
final ErrorDetail detail = eh.handleError(this.runtimeException, 1, 2);
|
||||
assertThat(detail.isRetryable()).isFalse();
|
||||
assertThat(detail.resultCode).isEqualTo(42);
|
||||
}
|
||||
@@ -162,8 +164,8 @@ class NBErrorHandlerTest {
|
||||
@Test
|
||||
void testErrorLogAppender() {
|
||||
|
||||
Logger logger = (Logger) LogManager.getLogger(ErrorHandler.class);
|
||||
NBMock.LogAppender appender = NBMock.registerTestLogger(ERROR_HANDLER_APPENDER_NAME, logger, Level.DEBUG);
|
||||
final Logger logger = (Logger) LogManager.getLogger(ErrorHandler.class);
|
||||
final LogAppender appender = NBMock.registerTestLogger(NBErrorHandlerTest.ERROR_HANDLER_APPENDER_NAME, logger, Level.DEBUG);
|
||||
|
||||
logger.debug("NBErrorHandler is cool.");
|
||||
logger.debug("I second that.");
|
||||
@@ -171,7 +173,7 @@ class NBErrorHandlerTest {
|
||||
logger.getContext().stop(); // force any async appenders to flush
|
||||
logger.getContext().start(); // resume processing
|
||||
|
||||
List<String> entries = appender.getEntries();
|
||||
final List<String> entries = appender.getEntries();
|
||||
assertThat(entries).hasSize(2);
|
||||
assertThat(appender.getFirstEntry()).isEqualTo("NBErrorHandler is cool.");
|
||||
assertThat(entries.get(1)).isEqualTo("I second that.");
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright (c) 2022 nosqlbench
|
||||
* Copyright (c) 2022-2023 nosqlbench
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -16,11 +16,13 @@
|
||||
|
||||
package io.nosqlbench.engine.api.activityapi.ratelimits;
|
||||
|
||||
import io.nosqlbench.api.config.NBLabels;
|
||||
import io.nosqlbench.api.engine.metrics.DeltaHdrHistogramReservoir;
|
||||
import io.nosqlbench.api.testutils.Bounds;
|
||||
import io.nosqlbench.api.testutils.Perf;
|
||||
import io.nosqlbench.api.testutils.Result;
|
||||
|
||||
import java.lang.Thread.UncaughtExceptionHandler;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
@@ -42,39 +44,39 @@ public class RateLimiterPerfTestMethods {
|
||||
// return perf;
|
||||
// }
|
||||
|
||||
public Result systemTimeOverhead(RateLimiter rl) {
|
||||
Bounds bounds = new Bounds(1000, 2);
|
||||
Perf perf = new Perf("nanotime");
|
||||
public Result systemTimeOverhead(final RateLimiter rl) {
|
||||
final Bounds bounds = new Bounds(1000, 2);
|
||||
final Perf perf = new Perf("nanotime");
|
||||
|
||||
while (!perf.isConverged(Result::getOpsPerSec, 0.01d, 3)) {
|
||||
System.out.println("testing with opcount=" + bounds.getNextValue());
|
||||
long start = System.nanoTime();
|
||||
final long start = System.nanoTime();
|
||||
for (long iter = 0; iter < bounds.getValue(); iter++) {
|
||||
long result = System.nanoTime();
|
||||
final long result = System.nanoTime();
|
||||
}
|
||||
long end = System.nanoTime();
|
||||
final long end = System.nanoTime();
|
||||
|
||||
perf.add("nanotime/" + bounds.getValue(), start, end, bounds.getValue());
|
||||
}
|
||||
|
||||
double[] deltas = perf.getDeltas(Result::getOpsPerSec);
|
||||
final double[] deltas = perf.getDeltas(Result::getOpsPerSec);
|
||||
return perf.getLastResult();
|
||||
}
|
||||
|
||||
public Result rateLimiterSingleThreadedConvergence(Function<RateSpec, RateLimiter> rlf, RateSpec rs, long startingCycles, double margin) {
|
||||
public Result rateLimiterSingleThreadedConvergence(final Function<RateSpec, RateLimiter> rlf, final RateSpec rs, final long startingCycles, final double margin) {
|
||||
//rl.applyRateSpec(rl.getRateSpec().withOpsPerSecond(1E9));
|
||||
Bounds bounds = new Bounds(startingCycles, 2);
|
||||
Perf perf = new Perf("nanotime");
|
||||
final Bounds bounds = new Bounds(startingCycles, 2);
|
||||
final Perf perf = new Perf("nanotime");
|
||||
|
||||
while (!perf.isConverged(Result::getOpsPerSec, margin, 3)) {
|
||||
System.out.println("testing with opcount=" + bounds.getNextValue() + " spec=" + rs);
|
||||
|
||||
RateLimiter rl = rlf.apply(rs);
|
||||
long start = System.nanoTime();
|
||||
final RateLimiter rl = rlf.apply(rs);
|
||||
final long start = System.nanoTime();
|
||||
for (long iter = 0; iter < bounds.getValue(); iter++) {
|
||||
long result = rl.maybeWaitForOp();
|
||||
final long result = rl.maybeWaitForOp();
|
||||
}
|
||||
long end = System.nanoTime();
|
||||
final long end = System.nanoTime();
|
||||
|
||||
perf.add("rl/" + bounds.getValue(), start, end, bounds.getValue());
|
||||
System.out.println(perf.getLastResult());
|
||||
@@ -99,28 +101,28 @@ public class RateLimiterPerfTestMethods {
|
||||
* @param count_rate_division_clientrate
|
||||
* @return
|
||||
*/
|
||||
long[] testRateChanges(RateLimiter rl, int... count_rate_division_clientrate) {
|
||||
long[] testRateChanges(final RateLimiter rl, final int... count_rate_division_clientrate) {
|
||||
System.out.println("Running " + Thread.currentThread().getStackTrace()[1].getMethodName());
|
||||
|
||||
List<Long> results = new ArrayList<>();
|
||||
final List<Long> results = new ArrayList<>();
|
||||
|
||||
for (int idx = 0; idx < count_rate_division_clientrate.length; idx += 4) {
|
||||
int count = count_rate_division_clientrate[idx];
|
||||
int rate = count_rate_division_clientrate[idx + 1];
|
||||
int divisions = count_rate_division_clientrate[idx + 2];
|
||||
int clientrate = count_rate_division_clientrate[idx + 3];
|
||||
long clientnanos = (long) (1_000_000_000.0D / clientrate);
|
||||
final int count = count_rate_division_clientrate[idx];
|
||||
final int rate = count_rate_division_clientrate[idx + 1];
|
||||
final int divisions = count_rate_division_clientrate[idx + 2];
|
||||
final int clientrate = count_rate_division_clientrate[idx + 3];
|
||||
final long clientnanos = (long) (1_000_000_000.0D / clientrate);
|
||||
|
||||
if (rl instanceof DiagUpdateRate) {
|
||||
((DiagUpdateRate) rl).setDiagModulo(count / divisions);
|
||||
System.out.println("updating every " + (count / divisions) + " calls (" + count + "/" + divisions + ")");
|
||||
System.out.println("updating every " + count / divisions + " calls (" + count + '/' + divisions + ')');
|
||||
}
|
||||
System.out.println("count=" + count + ", getOpsPerSec=" + rate + ", div=" + divisions + ", clientrate=" + clientrate);
|
||||
System.out.println("client nanos: " + clientnanos);
|
||||
|
||||
long startAt = System.nanoTime();
|
||||
final long startAt = System.nanoTime();
|
||||
rl.applyRateSpec(rl.getRateSpec().withOpsPerSecond(rate));
|
||||
int perDivision = count / divisions;
|
||||
final int perDivision = count / divisions;
|
||||
long divDelay = 0L;
|
||||
for (int div = 0; div < divisions; div++) {
|
||||
long then = System.nanoTime();
|
||||
@@ -134,25 +136,25 @@ public class RateLimiterPerfTestMethods {
|
||||
results.add(divDelay);
|
||||
}
|
||||
|
||||
long endAt = System.nanoTime();
|
||||
double duration = (endAt - startAt) / 1000000000.0d;
|
||||
double acqops = (count / duration);
|
||||
final long endAt = System.nanoTime();
|
||||
final double duration = (endAt - startAt) / 1000000000.0d;
|
||||
final double acqops = count / duration;
|
||||
|
||||
System.out.println(rl);
|
||||
|
||||
System.out.println(ANSI_Blue +
|
||||
String.format(
|
||||
"spec: %s\n count: %9d, duration %.5fS, acquires/s %.3f, nanos/op: %f\n delay: %d (%.5fS)",
|
||||
rl.getRateSpec(),
|
||||
count, duration, acqops, (1_000_000_000.0d / acqops), divDelay, (divDelay / 1_000_000_000.0d)) +
|
||||
ANSI_Reset);
|
||||
String.format(
|
||||
"spec: %s\n count: %9d, duration %.5fS, acquires/s %.3f, nanos/op: %f\n delay: %d (%.5fS)",
|
||||
rl.getRateSpec(),
|
||||
count, duration, acqops, 1_000_000_000.0d / acqops, divDelay, divDelay / 1_000_000_000.0d) +
|
||||
ANSI_Reset);
|
||||
|
||||
}
|
||||
|
||||
long[] delays = results.stream().mapToLong(Long::longValue).toArray();
|
||||
final long[] delays = results.stream().mapToLong(Long::longValue).toArray();
|
||||
|
||||
String delaySummary = Arrays.stream(delays).mapToDouble(d -> (double) d / 1_000_000_000.0D).mapToObj(d -> String.format("%.3f", d))
|
||||
.collect(Collectors.joining(","));
|
||||
final String delaySummary = Arrays.stream(delays).mapToDouble(d -> d / 1_000_000_000.0D).mapToObj(d -> String.format("%.3f", d))
|
||||
.collect(Collectors.joining(","));
|
||||
System.out.println("delays in seconds:\n" + delaySummary);
|
||||
System.out.println("delays in ns:\n" + Arrays.toString(delays));
|
||||
|
||||
@@ -160,12 +162,12 @@ public class RateLimiterPerfTestMethods {
|
||||
|
||||
}
|
||||
|
||||
public Result rateLimiterContendedConvergence(int threads, Function<RateSpec, RateLimiter> rlFunc, RateSpec rateSpec, int initialIterations, double margin) {
|
||||
Bounds bounds = new Bounds(initialIterations, 2);
|
||||
Perf perf = new Perf("contended with " + threads + " threads");
|
||||
public Result rateLimiterContendedConvergence(final int threads, final Function<RateSpec, RateLimiter> rlFunc, final RateSpec rateSpec, final int initialIterations, final double margin) {
|
||||
final Bounds bounds = new Bounds(initialIterations, 2);
|
||||
final Perf perf = new Perf("contended with " + threads + " threads");
|
||||
|
||||
while (!perf.isConverged(Result::getOpsPerSec, margin, 3)) {
|
||||
Perf delegateperf = testRateLimiterMultiThreadedContention(rlFunc, rateSpec, initialIterations, threads);
|
||||
final Perf delegateperf = this.testRateLimiterMultiThreadedContention(rlFunc, rateSpec, initialIterations, threads);
|
||||
perf.add(delegateperf.getLastResult());
|
||||
}
|
||||
return perf.getLastResult();
|
||||
@@ -175,48 +177,42 @@ public class RateLimiterPerfTestMethods {
|
||||
* This a low-overhead test for multi-threaded access to the same getOpsPerSec limiter. It calculates the
|
||||
* effective concurrent getOpsPerSec under atomic contention.
|
||||
*/
|
||||
public Perf testRateLimiterMultiThreadedContention(Function<RateSpec, RateLimiter> rlFunc, RateSpec spec, long iterations, int threadCount) {
|
||||
public Perf testRateLimiterMultiThreadedContention(final Function<RateSpec, RateLimiter> rlFunc, final RateSpec spec, final long iterations, final int threadCount) {
|
||||
System.out.println("Running " + Thread.currentThread().getStackTrace()[1].getMethodName());
|
||||
|
||||
RateLimiter rl = rlFunc.apply(spec);
|
||||
double rate = spec.getRate();
|
||||
int iterationsPerThread = (int) (iterations / threadCount);
|
||||
if (iterationsPerThread >= Integer.MAX_VALUE) {
|
||||
throw new RuntimeException("iterations per thread too high with (count,threads)=(" + iterations + "," + threadCount);
|
||||
}
|
||||
RateLimiterPerfTestMethods.TestExceptionHandler errorhandler = new RateLimiterPerfTestMethods.TestExceptionHandler();
|
||||
RateLimiterPerfTestMethods.TestThreadFactory threadFactory = new RateLimiterPerfTestMethods.TestThreadFactory(errorhandler);
|
||||
ExecutorService tp = Executors.newFixedThreadPool(threadCount + 1, threadFactory);
|
||||
final RateLimiter rl = rlFunc.apply(spec);
|
||||
final double rate = spec.getRate();
|
||||
final int iterationsPerThread = (int) (iterations / threadCount);
|
||||
if (Integer.MAX_VALUE <= iterationsPerThread)
|
||||
throw new RuntimeException("iterations per thread too high with (count,threads)=(" + iterations + ',' + threadCount);
|
||||
final TestExceptionHandler errorhandler = new TestExceptionHandler();
|
||||
final TestThreadFactory threadFactory = new TestThreadFactory(errorhandler);
|
||||
final ExecutorService tp = Executors.newFixedThreadPool(threadCount + 1, threadFactory);
|
||||
|
||||
System.out.format("Running %,d iterations split over %,d threads (%,d per) at %,.3f ops/s\n", iterations, threadCount, (iterations / threadCount), rate);
|
||||
RateLimiterPerfTestMethods.Acquirer[] threads = new RateLimiterPerfTestMethods.Acquirer[threadCount];
|
||||
DeltaHdrHistogramReservoir stats = new DeltaHdrHistogramReservoir("times", 5);
|
||||
System.out.format("Running %,d iterations split over %,d threads (%,d per) at %,.3f ops/s\n", iterations, threadCount, iterations / threadCount, rate);
|
||||
final Acquirer[] threads = new Acquirer[threadCount];
|
||||
final DeltaHdrHistogramReservoir stats = new DeltaHdrHistogramReservoir(NBLabels.forKV("name", "times"), 5);
|
||||
|
||||
CyclicBarrier barrier = new CyclicBarrier(threadCount + 1);
|
||||
final CyclicBarrier barrier = new CyclicBarrier(threadCount + 1);
|
||||
|
||||
RateLimiterStarter starter = new RateLimiterStarter(barrier, rl);
|
||||
final RateLimiterStarter starter = new RateLimiterStarter(barrier, rl);
|
||||
|
||||
for (int i = 0; i < threadCount; i++) {
|
||||
threads[i] = new RateLimiterPerfTestMethods.Acquirer(i, rl, iterationsPerThread, stats, barrier);
|
||||
// threads[i] = new RateLimiterPerfTestMethods.Acquirer(i, rl, (int) (iterations / threadCount), stats, barrier);
|
||||
}
|
||||
// threads[i] = new RateLimiterPerfTestMethods.Acquirer(i, rl, (int) (iterations / threadCount), stats, barrier);
|
||||
for (int i = 0; i < threadCount; i++) threads[i] = new Acquirer(i, rl, iterationsPerThread, stats, barrier);
|
||||
|
||||
tp.execute(starter);
|
||||
|
||||
System.out.println(rl);
|
||||
System.out.format("submitting (%d threads)...\n", threads.length);
|
||||
List<Future<Result>> futures = new ArrayList<>();
|
||||
for (int i = 0; i < threadCount; i++) {
|
||||
futures.add(tp.submit((Callable<Result>) threads[i]));
|
||||
}
|
||||
final List<Future<Result>> futures = new ArrayList<>();
|
||||
for (int i = 0; i < threadCount; i++) futures.add(tp.submit((Callable<Result>) threads[i]));
|
||||
System.out.format("submitted (%d threads)...\n", threads.length);
|
||||
|
||||
try {
|
||||
tp.shutdown();
|
||||
if (!tp.awaitTermination(1000, TimeUnit.SECONDS)) {
|
||||
if (!tp.awaitTermination(1000, TimeUnit.SECONDS))
|
||||
throw new RuntimeException("Failed to shutdown thread pool.");
|
||||
}
|
||||
} catch (InterruptedException e) {
|
||||
} catch (final InterruptedException e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
|
||||
@@ -224,11 +220,11 @@ public class RateLimiterPerfTestMethods {
|
||||
|
||||
System.out.println(rl);
|
||||
|
||||
Perf aggregatePerf = new Perf("contended with " + threadCount + " threads for " + iterations + " iterations for " + rl.getRateSpec().toString());
|
||||
final Perf aggregatePerf = new Perf("contended with " + threadCount + " threads for " + iterations + " iterations for " + rl.getRateSpec().toString());
|
||||
futures.stream().map(f -> {
|
||||
try {
|
||||
return f.get();
|
||||
} catch (Exception e) {
|
||||
} catch (final Exception e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
}).forEachOrdered(aggregatePerf::add);
|
||||
@@ -239,7 +235,7 @@ public class RateLimiterPerfTestMethods {
|
||||
// String refillLog = ((HybridRateLimiter) rl).getRefillLog();
|
||||
// System.out.println("refill log:\n" + refillLog);
|
||||
// }
|
||||
Perf perf = aggregatePerf.reduceConcurrent();
|
||||
final Perf perf = aggregatePerf.reduceConcurrent();
|
||||
return perf;
|
||||
|
||||
}
|
||||
@@ -248,7 +244,7 @@ public class RateLimiterPerfTestMethods {
|
||||
private final CyclicBarrier barrier;
|
||||
private final RateLimiter rl;
|
||||
|
||||
public RateLimiterStarter(CyclicBarrier barrier, RateLimiter rl) {
|
||||
public RateLimiterStarter(final CyclicBarrier barrier, final RateLimiter rl) {
|
||||
this.barrier = barrier;
|
||||
this.rl = rl;
|
||||
}
|
||||
@@ -257,31 +253,29 @@ public class RateLimiterPerfTestMethods {
|
||||
public void run() {
|
||||
try {
|
||||
// System.out.println("awaiting barrier (starter) (" + barrier.getNumberWaiting() + " awaiting)");
|
||||
barrier.await(60, TimeUnit.SECONDS);
|
||||
this.barrier.await(60, TimeUnit.SECONDS);
|
||||
// System.out.println("started the rate limiter (starter) (" + barrier.getNumberWaiting() + " awaiting)");
|
||||
|
||||
} catch (Exception e) {
|
||||
} catch (final Exception e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
rl.start();
|
||||
this.rl.start();
|
||||
}
|
||||
}
|
||||
|
||||
private static class TestExceptionHandler implements Thread.UncaughtExceptionHandler {
|
||||
private static class TestExceptionHandler implements UncaughtExceptionHandler {
|
||||
public List<Throwable> throwables = new ArrayList<>();
|
||||
public List<Thread> threads = new ArrayList<>();
|
||||
|
||||
@Override
|
||||
public void uncaughtException(Thread t, Throwable e) {
|
||||
threads.add(t);
|
||||
throwables.add(e);
|
||||
public void uncaughtException(final Thread t, final Throwable e) {
|
||||
this.threads.add(t);
|
||||
this.throwables.add(e);
|
||||
System.out.println("uncaught exception on thread " + t.getName() + ": " + e.toString());
|
||||
}
|
||||
|
||||
public void throwIfAny() {
|
||||
if (throwables.size() > 0) {
|
||||
throw new RuntimeException(throwables.get(0));
|
||||
}
|
||||
if (0 < throwables.size()) throw new RuntimeException(this.throwables.get(0));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -292,8 +286,8 @@ public class RateLimiterPerfTestMethods {
|
||||
private final CyclicBarrier barrier;
|
||||
private final long iterations;
|
||||
|
||||
public Acquirer(int i, RateLimiter limiter, int iterations, DeltaHdrHistogramReservoir reservoir, CyclicBarrier barrier) {
|
||||
this.threadIdx = i;
|
||||
public Acquirer(final int i, final RateLimiter limiter, final int iterations, final DeltaHdrHistogramReservoir reservoir, final CyclicBarrier barrier) {
|
||||
threadIdx = i;
|
||||
this.limiter = limiter;
|
||||
this.iterations = iterations;
|
||||
this.reservoir = reservoir;
|
||||
@@ -304,47 +298,41 @@ public class RateLimiterPerfTestMethods {
|
||||
public Result call() {
|
||||
// synchronized (barrier) {
|
||||
try {
|
||||
if (this.threadIdx == 0) {
|
||||
System.out.println("awaiting barrier");
|
||||
}
|
||||
barrier.await(60, TimeUnit.SECONDS);
|
||||
if (this.threadIdx == 0) {
|
||||
System.out.println("starting all threads");
|
||||
}
|
||||
if (0 == this.threadIdx) System.out.println("awaiting barrier");
|
||||
this.barrier.await(60, TimeUnit.SECONDS);
|
||||
if (0 == this.threadIdx) System.out.println("starting all threads");
|
||||
|
||||
} catch (Exception be) {
|
||||
} catch (final Exception be) {
|
||||
throw new RuntimeException(be); // This should not happen unless the test is broken
|
||||
}
|
||||
// }
|
||||
long startTime = System.nanoTime();
|
||||
for (int i = 0; i < iterations; i++) {
|
||||
long time = limiter.maybeWaitForOp();
|
||||
final long startTime = System.nanoTime();
|
||||
for (int i = 0; i < this.iterations; i++) {
|
||||
final long time = this.limiter.maybeWaitForOp();
|
||||
}
|
||||
long endTime = System.nanoTime();
|
||||
return new Result("thread " + this.threadIdx, startTime, endTime, iterations);
|
||||
final long endTime = System.nanoTime();
|
||||
return new Result("thread " + threadIdx, startTime, endTime, this.iterations);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void run() {
|
||||
for (int i = 0; i < iterations; i++) {
|
||||
limiter.maybeWaitForOp();
|
||||
}
|
||||
for (int i = 0; i < this.iterations; i++) this.limiter.maybeWaitForOp();
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
private static class TestThreadFactory implements ThreadFactory {
|
||||
|
||||
private final Thread.UncaughtExceptionHandler handler;
|
||||
private final UncaughtExceptionHandler handler;
|
||||
|
||||
public TestThreadFactory(Thread.UncaughtExceptionHandler uceh) {
|
||||
this.handler = uceh;
|
||||
public TestThreadFactory(final UncaughtExceptionHandler uceh) {
|
||||
handler = uceh;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Thread newThread(Runnable r) {
|
||||
Thread t = new Thread(r);
|
||||
t.setUncaughtExceptionHandler(handler);
|
||||
public Thread newThread(final Runnable r) {
|
||||
final Thread t = new Thread(r);
|
||||
t.setUncaughtExceptionHandler(this.handler);
|
||||
return t;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright (c) 2022 nosqlbench
|
||||
* Copyright (c) 2022-2023 nosqlbench
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -16,9 +16,10 @@
|
||||
|
||||
package io.nosqlbench.engine.api.activityapi.ratelimits;
|
||||
|
||||
import io.nosqlbench.api.engine.activityimpl.ActivityDef;
|
||||
import io.nosqlbench.api.config.NBLabeledElement;
|
||||
import io.nosqlbench.api.testutils.Perf;
|
||||
import io.nosqlbench.api.testutils.Result;
|
||||
import io.nosqlbench.engine.api.activityapi.ratelimits.RateSpec.Verb;
|
||||
import org.junit.jupiter.api.Disabled;
|
||||
import org.junit.jupiter.api.Test;
|
||||
|
||||
@@ -27,83 +28,83 @@ import java.util.function.Function;
|
||||
|
||||
public class TestHybridRateLimiterPerf {
|
||||
|
||||
private final Function<RateSpec, RateLimiter> rlFunction = rs -> new HybridRateLimiter(ActivityDef.parseActivityDef("alias=tokenrl"),"hybrid", rs.withVerb(RateSpec.Verb.start));
|
||||
private final Function<RateSpec, RateLimiter> rlFunction = rs -> new HybridRateLimiter(NBLabeledElement.EMPTY,"hybrid", rs.withVerb(Verb.start));
|
||||
private final RateLimiterPerfTestMethods methods = new RateLimiterPerfTestMethods();
|
||||
|
||||
@Test
|
||||
@Disabled
|
||||
public void testPerf1e9() {
|
||||
Result result = methods.rateLimiterSingleThreadedConvergence(rlFunction,new RateSpec(1E9, 1.1),10_000_000,0.01d);
|
||||
final Result result = this.methods.rateLimiterSingleThreadedConvergence(this.rlFunction,new RateSpec(1.0E9, 1.1),10_000_000,0.01d);
|
||||
System.out.println(result);
|
||||
}
|
||||
|
||||
@Test
|
||||
@Disabled
|
||||
public void testPerf1e8() {
|
||||
Result result = methods.rateLimiterSingleThreadedConvergence(rlFunction,new RateSpec(1E8, 1.1),50_000_000,0.005d);
|
||||
final Result result = this.methods.rateLimiterSingleThreadedConvergence(this.rlFunction,new RateSpec(1.0E8, 1.1),50_000_000,0.005d);
|
||||
System.out.println(result);
|
||||
}
|
||||
|
||||
@Test
|
||||
@Disabled
|
||||
public void testPerf1e7() {
|
||||
Result result = methods.rateLimiterSingleThreadedConvergence(rlFunction,new RateSpec(1E7, 1.1),5_000_000,0.01d);
|
||||
final Result result = this.methods.rateLimiterSingleThreadedConvergence(this.rlFunction,new RateSpec(1.0E7, 1.1),5_000_000,0.01d);
|
||||
System.out.println(result);
|
||||
}
|
||||
|
||||
@Test
|
||||
@Disabled
|
||||
public void testPerf1e6() {
|
||||
Result result = methods.rateLimiterSingleThreadedConvergence(rlFunction,new RateSpec(1E6, 1.1),500_000,0.005d);
|
||||
final Result result = this.methods.rateLimiterSingleThreadedConvergence(this.rlFunction,new RateSpec(1.0E6, 1.1),500_000,0.005d);
|
||||
System.out.println(result);
|
||||
}
|
||||
|
||||
@Test
|
||||
@Disabled
|
||||
public void testPerf1e5() {
|
||||
Result result = methods.rateLimiterSingleThreadedConvergence(rlFunction,new RateSpec(1E5, 1.1),50_000,0.01d);
|
||||
final Result result = this.methods.rateLimiterSingleThreadedConvergence(this.rlFunction,new RateSpec(1.0E5, 1.1),50_000,0.01d);
|
||||
System.out.println(result);
|
||||
}
|
||||
|
||||
@Test
|
||||
@Disabled
|
||||
public void testPerf1e4() {
|
||||
Result result = methods.rateLimiterSingleThreadedConvergence(rlFunction,new RateSpec(1E4, 1.1),5_000,0.005d);
|
||||
final Result result = this.methods.rateLimiterSingleThreadedConvergence(this.rlFunction,new RateSpec(1.0E4, 1.1),5_000,0.005d);
|
||||
System.out.println(result);
|
||||
}
|
||||
|
||||
@Test
|
||||
@Disabled
|
||||
public void testPerf1e3() {
|
||||
Result result = methods.rateLimiterSingleThreadedConvergence(rlFunction,new RateSpec(1E3, 1.1),500,0.005d);
|
||||
final Result result = this.methods.rateLimiterSingleThreadedConvergence(this.rlFunction,new RateSpec(1.0E3, 1.1),500,0.005d);
|
||||
System.out.println(result);
|
||||
}
|
||||
|
||||
@Test
|
||||
@Disabled
|
||||
public void testPerf1e2() {
|
||||
Result result = methods.rateLimiterSingleThreadedConvergence(rlFunction,new RateSpec(1E2, 1.1),50,0.005d);
|
||||
final Result result = this.methods.rateLimiterSingleThreadedConvergence(this.rlFunction,new RateSpec(1.0E2, 1.1),50,0.005d);
|
||||
System.out.println(result);
|
||||
}
|
||||
|
||||
@Test
|
||||
@Disabled
|
||||
public void testPerf1e1() {
|
||||
Result result = methods.rateLimiterSingleThreadedConvergence(rlFunction,new RateSpec(1E1, 1.1),5,0.005d);
|
||||
final Result result = this.methods.rateLimiterSingleThreadedConvergence(this.rlFunction,new RateSpec(1.0E1, 1.1),5,0.005d);
|
||||
System.out.println(result);
|
||||
}
|
||||
|
||||
@Test
|
||||
@Disabled
|
||||
public void testPerf1e0() {
|
||||
Result result = methods.rateLimiterSingleThreadedConvergence(rlFunction,new RateSpec(1E0, 1.1),2,0.005d);
|
||||
final Result result = this.methods.rateLimiterSingleThreadedConvergence(this.rlFunction,new RateSpec(1.0E0, 1.1),2,0.005d);
|
||||
System.out.println(result);
|
||||
}
|
||||
|
||||
@Test
|
||||
@Disabled
|
||||
public void testePerf1eN1() {
|
||||
Result result = methods.rateLimiterSingleThreadedConvergence(rlFunction,new RateSpec(1E-1, 1.1),1,0.005d);
|
||||
final Result result = this.methods.rateLimiterSingleThreadedConvergence(this.rlFunction,new RateSpec(1.0E-1, 1.1),1,0.005d);
|
||||
System.out.println(result);
|
||||
|
||||
}
|
||||
@@ -111,14 +112,14 @@ public class TestHybridRateLimiterPerf {
|
||||
@Test
|
||||
@Disabled
|
||||
public void test100Mops_160threads() {
|
||||
Perf perf = methods.testRateLimiterMultiThreadedContention(rlFunction, new RateSpec(1E8, 1.1), 10_000_000,160);
|
||||
final Perf perf = this.methods.testRateLimiterMultiThreadedContention(this.rlFunction, new RateSpec(1.0E8, 1.1), 10_000_000,160);
|
||||
System.out.println(perf.getLastResult());
|
||||
}
|
||||
|
||||
@Test
|
||||
@Disabled
|
||||
public void test100Mops_80threads() {
|
||||
Perf perf = methods.testRateLimiterMultiThreadedContention(rlFunction, new RateSpec(1E8, 1.1), 10_000_000,80);
|
||||
final Perf perf = this.methods.testRateLimiterMultiThreadedContention(this.rlFunction, new RateSpec(1.0E8, 1.1), 10_000_000,80);
|
||||
System.out.println(perf.getLastResult());
|
||||
}
|
||||
|
||||
@@ -131,7 +132,7 @@ public class TestHybridRateLimiterPerf {
|
||||
@Test
|
||||
@Disabled
|
||||
public void test100Mops_40threads() {
|
||||
Perf perf = methods.testRateLimiterMultiThreadedContention(rlFunction, new RateSpec(1E8, 1.1), 10_000_000,40);
|
||||
final Perf perf = this.methods.testRateLimiterMultiThreadedContention(this.rlFunction, new RateSpec(1.0E8, 1.1), 10_000_000,40);
|
||||
System.out.println(perf.getLastResult());
|
||||
}
|
||||
|
||||
@@ -151,7 +152,7 @@ public class TestHybridRateLimiterPerf {
|
||||
@Test
|
||||
@Disabled
|
||||
public void test100Mops_20threads() {
|
||||
Perf perf = methods.testRateLimiterMultiThreadedContention(rlFunction, new RateSpec(1E8, 1.1), 10_000_000,20);
|
||||
final Perf perf = this.methods.testRateLimiterMultiThreadedContention(this.rlFunction, new RateSpec(1.0E8, 1.1), 10_000_000,20);
|
||||
System.out.println(perf.getLastResult());
|
||||
}
|
||||
|
||||
@@ -164,7 +165,7 @@ public class TestHybridRateLimiterPerf {
|
||||
@Test
|
||||
@Disabled
|
||||
public void test100Mops_10threads() {
|
||||
Perf perf = methods.testRateLimiterMultiThreadedContention(rlFunction, new RateSpec(1E8, 1.1), 10_000_000,10);
|
||||
final Perf perf = this.methods.testRateLimiterMultiThreadedContention(this.rlFunction, new RateSpec(1.0E8, 1.1), 10_000_000,10);
|
||||
System.out.println(perf.getLastResult());
|
||||
}
|
||||
|
||||
@@ -178,7 +179,7 @@ public class TestHybridRateLimiterPerf {
|
||||
@Test
|
||||
@Disabled
|
||||
public void test100Mops_5threads() {
|
||||
Perf perf = methods.testRateLimiterMultiThreadedContention(rlFunction, new RateSpec(1E8, 1.1), 40_000_000,5);
|
||||
final Perf perf = this.methods.testRateLimiterMultiThreadedContention(this.rlFunction, new RateSpec(1.0E8, 1.1), 40_000_000,5);
|
||||
System.out.println(perf.getLastResult());
|
||||
}
|
||||
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright (c) 2022 nosqlbench
|
||||
* Copyright (c) 2022-2023 nosqlbench
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -16,8 +16,9 @@
|
||||
|
||||
package io.nosqlbench.engine.api.activityapi.ratelimits;
|
||||
|
||||
import io.nosqlbench.api.engine.activityimpl.ActivityDef;
|
||||
import io.nosqlbench.api.config.NBLabeledElement;
|
||||
import io.nosqlbench.api.testutils.Perf;
|
||||
import io.nosqlbench.engine.api.activityapi.ratelimits.RateSpec.Verb;
|
||||
import org.junit.jupiter.api.Disabled;
|
||||
import org.junit.jupiter.api.Test;
|
||||
|
||||
@@ -32,7 +33,7 @@ import java.util.function.Function;
|
||||
*/
|
||||
public class TestRateLimiterPerf1E7 {
|
||||
|
||||
private final Function<RateSpec, RateLimiter> rlFunction = rs -> new HybridRateLimiter(ActivityDef.parseActivityDef("alias=tokenrl"),"hybrid", rs.withVerb(RateSpec.Verb.configure));
|
||||
private final Function<RateSpec, RateLimiter> rlFunction = rs -> new HybridRateLimiter(NBLabeledElement.forKV("alias","tokenrl"),"hybrid", rs.withVerb(Verb.configure));
|
||||
private final RateLimiterPerfTestMethods methods = new RateLimiterPerfTestMethods();
|
||||
|
||||
// 160 threads at 10_000_000 ops/s
|
||||
@@ -41,7 +42,7 @@ public class TestRateLimiterPerf1E7 {
|
||||
@Test
|
||||
@Disabled
|
||||
public void test10Mops_160threads() {
|
||||
Perf perf = methods.testRateLimiterMultiThreadedContention(rlFunction, new RateSpec(1E7, 1.1), 20_000_000,160);
|
||||
final Perf perf = this.methods.testRateLimiterMultiThreadedContention(this.rlFunction, new RateSpec(1.0E7, 1.1), 20_000_000,160);
|
||||
System.out.println(perf.getLastResult());
|
||||
}
|
||||
|
||||
@@ -51,7 +52,7 @@ public class TestRateLimiterPerf1E7 {
|
||||
@Test
|
||||
@Disabled
|
||||
public void test10Mops_80threads() {
|
||||
Perf perf = methods.testRateLimiterMultiThreadedContention(rlFunction, new RateSpec(1E7, 1.1), 20_000_000,80);
|
||||
final Perf perf = this.methods.testRateLimiterMultiThreadedContention(this.rlFunction, new RateSpec(1.0E7, 1.1), 20_000_000,80);
|
||||
System.out.println(perf.getLastResult());
|
||||
}
|
||||
|
||||
@@ -61,7 +62,7 @@ public class TestRateLimiterPerf1E7 {
|
||||
@Test
|
||||
@Disabled
|
||||
public void test10Mops_40threads() {
|
||||
Perf perf = methods.testRateLimiterMultiThreadedContention(rlFunction, new RateSpec(1E7, 1.1), 20_000_000,40);
|
||||
final Perf perf = this.methods.testRateLimiterMultiThreadedContention(this.rlFunction, new RateSpec(1.0E7, 1.1), 20_000_000,40);
|
||||
System.out.println(perf.getLastResult());
|
||||
}
|
||||
|
||||
@@ -71,7 +72,7 @@ public class TestRateLimiterPerf1E7 {
|
||||
@Test
|
||||
@Disabled
|
||||
public void test10Mops_20threads() {
|
||||
Perf perf = methods.testRateLimiterMultiThreadedContention(rlFunction, new RateSpec(1E7, 10), 20_000_000,20);
|
||||
final Perf perf = this.methods.testRateLimiterMultiThreadedContention(this.rlFunction, new RateSpec(1.0E7, 10), 20_000_000,20);
|
||||
System.out.println(perf.getLastResult());
|
||||
|
||||
}
|
||||
@@ -85,7 +86,7 @@ public class TestRateLimiterPerf1E7 {
|
||||
@Test
|
||||
@Disabled
|
||||
public void test10Mops_10threads() {
|
||||
Perf perf = methods.testRateLimiterMultiThreadedContention(rlFunction, new RateSpec(1E7, 1.1), 20_000_000,10);
|
||||
final Perf perf = this.methods.testRateLimiterMultiThreadedContention(this.rlFunction, new RateSpec(1.0E7, 1.1), 20_000_000,10);
|
||||
System.out.println(perf.getLastResult());
|
||||
}
|
||||
|
||||
@@ -100,7 +101,7 @@ public class TestRateLimiterPerf1E7 {
|
||||
@Test
|
||||
@Disabled
|
||||
public void test10Mops_5threads() {
|
||||
Perf perf = methods.testRateLimiterMultiThreadedContention(rlFunction, new RateSpec(1E7, 1.1), 20_000_000,5);
|
||||
final Perf perf = this.methods.testRateLimiterMultiThreadedContention(this.rlFunction, new RateSpec(1.0E7, 1.1), 20_000_000,5);
|
||||
System.out.println(perf.getLastResult());
|
||||
}
|
||||
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright (c) 2022 nosqlbench
|
||||
* Copyright (c) 2022-2023 nosqlbench
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -16,8 +16,9 @@
|
||||
|
||||
package io.nosqlbench.engine.api.activityapi.ratelimits;
|
||||
|
||||
import io.nosqlbench.api.engine.activityimpl.ActivityDef;
|
||||
import io.nosqlbench.api.config.NBLabeledElement;
|
||||
import io.nosqlbench.api.testutils.Perf;
|
||||
import io.nosqlbench.engine.api.activityapi.ratelimits.RateSpec.Verb;
|
||||
import org.junit.jupiter.api.Disabled;
|
||||
import org.junit.jupiter.api.Test;
|
||||
|
||||
@@ -32,20 +33,22 @@ import java.util.function.Function;
|
||||
*/
|
||||
public class TestRateLimiterPerf1E8 {
|
||||
|
||||
NBLabeledElement def = NBLabeledElement.forKV("alias","tokenrl");
|
||||
|
||||
private final Function<RateSpec, RateLimiter> rlFunction =
|
||||
rs -> new HybridRateLimiter(
|
||||
ActivityDef.parseActivityDef("alias=tokenrl"),
|
||||
this.def,
|
||||
"hybrid",
|
||||
rs.withVerb(RateSpec.Verb.configure)
|
||||
rs.withVerb(Verb.configure)
|
||||
);
|
||||
private final RateLimiterPerfTestMethods methods = new RateLimiterPerfTestMethods();
|
||||
|
||||
@Test
|
||||
@Disabled
|
||||
public void test100Mops_4000threads() {
|
||||
Perf perf = methods.testRateLimiterMultiThreadedContention(
|
||||
rlFunction,
|
||||
new RateSpec(1E8, 1.1),
|
||||
final Perf perf = this.methods.testRateLimiterMultiThreadedContention(
|
||||
this.rlFunction,
|
||||
new RateSpec(1.0E8, 1.1),
|
||||
100_000_000,
|
||||
4000
|
||||
);
|
||||
@@ -55,9 +58,9 @@ public class TestRateLimiterPerf1E8 {
|
||||
@Test
|
||||
@Disabled
|
||||
public void test100Mops_2000threads() {
|
||||
Perf perf = methods.testRateLimiterMultiThreadedContention(
|
||||
rlFunction,
|
||||
new RateSpec(1E8, 1.1),
|
||||
final Perf perf = this.methods.testRateLimiterMultiThreadedContention(
|
||||
this.rlFunction,
|
||||
new RateSpec(1.0E8, 1.1),
|
||||
100_000_000,
|
||||
2000
|
||||
);
|
||||
@@ -67,9 +70,9 @@ public class TestRateLimiterPerf1E8 {
|
||||
@Test
|
||||
@Disabled
|
||||
public void test100Mops_1000threads() {
|
||||
Perf perf = methods.testRateLimiterMultiThreadedContention(
|
||||
rlFunction,
|
||||
new RateSpec(1E8, 1.1),
|
||||
final Perf perf = this.methods.testRateLimiterMultiThreadedContention(
|
||||
this.rlFunction,
|
||||
new RateSpec(1.0E8, 1.1),
|
||||
100_000_000,
|
||||
1000
|
||||
);
|
||||
@@ -79,9 +82,9 @@ public class TestRateLimiterPerf1E8 {
|
||||
@Test
|
||||
@Disabled
|
||||
public void test100Mops_320threads() {
|
||||
Perf perf = methods.testRateLimiterMultiThreadedContention(
|
||||
rlFunction,
|
||||
new RateSpec(1E8, 1.1),
|
||||
final Perf perf = this.methods.testRateLimiterMultiThreadedContention(
|
||||
this.rlFunction,
|
||||
new RateSpec(1.0E8, 1.1),
|
||||
100_000_000,
|
||||
320
|
||||
);
|
||||
@@ -98,9 +101,9 @@ public class TestRateLimiterPerf1E8 {
|
||||
@Test
|
||||
@Disabled
|
||||
public void test100Mops_160threads() {
|
||||
Perf perf = methods.testRateLimiterMultiThreadedContention(
|
||||
rlFunction,
|
||||
new RateSpec(1E8, 1.1),
|
||||
final Perf perf = this.methods.testRateLimiterMultiThreadedContention(
|
||||
this.rlFunction,
|
||||
new RateSpec(1.0E8, 1.1),
|
||||
100_000_000,
|
||||
160
|
||||
);
|
||||
@@ -114,7 +117,7 @@ public class TestRateLimiterPerf1E8 {
|
||||
@Test
|
||||
@Disabled
|
||||
public void test100Mops_80threads() {
|
||||
Perf perf = methods.testRateLimiterMultiThreadedContention(rlFunction, new RateSpec(1E8, 1.1), 100_000_000, 80);
|
||||
final Perf perf = this.methods.testRateLimiterMultiThreadedContention(this.rlFunction, new RateSpec(1.0E8, 1.1), 100_000_000, 80);
|
||||
System.out.println(perf.getLastResult());
|
||||
}
|
||||
|
||||
@@ -127,7 +130,7 @@ public class TestRateLimiterPerf1E8 {
|
||||
@Test
|
||||
@Disabled
|
||||
public void test100Mops_40threads() {
|
||||
Perf perf = methods.testRateLimiterMultiThreadedContention(rlFunction, new RateSpec(1E8, 1.1), 100_000_000, 40);
|
||||
final Perf perf = this.methods.testRateLimiterMultiThreadedContention(this.rlFunction, new RateSpec(1.0E8, 1.1), 100_000_000, 40);
|
||||
System.out.println(perf.getLastResult());
|
||||
}
|
||||
|
||||
@@ -147,7 +150,7 @@ public class TestRateLimiterPerf1E8 {
|
||||
@Test
|
||||
@Disabled
|
||||
public void test100Mops_20threads() {
|
||||
Perf perf = methods.testRateLimiterMultiThreadedContention(rlFunction, new RateSpec(1E8, 1.1), 100_000_000, 20);
|
||||
final Perf perf = this.methods.testRateLimiterMultiThreadedContention(this.rlFunction, new RateSpec(1.0E8, 1.1), 100_000_000, 20);
|
||||
System.out.println(perf.getLastResult());
|
||||
}
|
||||
|
||||
@@ -163,7 +166,7 @@ public class TestRateLimiterPerf1E8 {
|
||||
@Test
|
||||
@Disabled
|
||||
public void test100Mops_10threads() {
|
||||
Perf perf = methods.testRateLimiterMultiThreadedContention(rlFunction, new RateSpec(1E8, 1.1), 100_000_000, 10);
|
||||
final Perf perf = this.methods.testRateLimiterMultiThreadedContention(this.rlFunction, new RateSpec(1.0E8, 1.1), 100_000_000, 10);
|
||||
System.out.println(perf.getLastResult());
|
||||
}
|
||||
|
||||
@@ -180,7 +183,7 @@ public class TestRateLimiterPerf1E8 {
|
||||
@Test
|
||||
@Disabled
|
||||
public void test100Mops_5threads() {
|
||||
Perf perf = methods.testRateLimiterMultiThreadedContention(rlFunction, new RateSpec(1E8, 1.1), 100_000_000, 5);
|
||||
final Perf perf = this.methods.testRateLimiterMultiThreadedContention(this.rlFunction, new RateSpec(1.0E8, 1.1), 100_000_000, 5);
|
||||
System.out.println(perf.getLastResult());
|
||||
}
|
||||
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright (c) 2022 nosqlbench
|
||||
* Copyright (c) 2022-2023 nosqlbench
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -16,8 +16,9 @@
|
||||
|
||||
package io.nosqlbench.engine.api.activityapi.ratelimits;
|
||||
|
||||
import io.nosqlbench.api.engine.activityimpl.ActivityDef;
|
||||
import io.nosqlbench.api.config.NBLabeledElement;
|
||||
import io.nosqlbench.api.testutils.Result;
|
||||
import io.nosqlbench.engine.api.activityapi.ratelimits.RateSpec.Verb;
|
||||
import org.junit.jupiter.api.Disabled;
|
||||
import org.junit.jupiter.api.Test;
|
||||
|
||||
@@ -32,83 +33,83 @@ import java.util.function.Function;
|
||||
*/
|
||||
public class TestRateLimiterPerfSingle {
|
||||
|
||||
private final Function<RateSpec, RateLimiter> rlFunction = rs -> new HybridRateLimiter(ActivityDef.parseActivityDef("alias=tokenrl"),"hybrid", rs.withVerb(RateSpec.Verb.start));
|
||||
private final Function<RateSpec, RateLimiter> rlFunction = rs -> new HybridRateLimiter(NBLabeledElement.forKV("alias","tokenrl"),"hybrid", rs.withVerb(Verb.start));
|
||||
private final RateLimiterPerfTestMethods methods = new RateLimiterPerfTestMethods();
|
||||
|
||||
@Test
|
||||
@Disabled
|
||||
public void testPerf1e9() {
|
||||
Result result = methods.rateLimiterSingleThreadedConvergence(rlFunction,new RateSpec(1E9, 1.1),10_000_000,0.01d);
|
||||
final Result result = this.methods.rateLimiterSingleThreadedConvergence(this.rlFunction,new RateSpec(1.0E9, 1.1),10_000_000,0.01d);
|
||||
System.out.println(result);
|
||||
}
|
||||
|
||||
@Test
|
||||
@Disabled
|
||||
public void testPerf1e8() {
|
||||
Result result = methods.rateLimiterSingleThreadedConvergence(rlFunction,new RateSpec(1E8, 1.1),50_000_000,0.005d);
|
||||
final Result result = this.methods.rateLimiterSingleThreadedConvergence(this.rlFunction,new RateSpec(1.0E8, 1.1),50_000_000,0.005d);
|
||||
System.out.println(result);
|
||||
}
|
||||
|
||||
@Test
|
||||
@Disabled
|
||||
public void testPerf1e7() {
|
||||
Result result = methods.rateLimiterSingleThreadedConvergence(rlFunction,new RateSpec(1E7, 1.1),5_000_000,0.01d);
|
||||
final Result result = this.methods.rateLimiterSingleThreadedConvergence(this.rlFunction,new RateSpec(1.0E7, 1.1),5_000_000,0.01d);
|
||||
System.out.println(result);
|
||||
}
|
||||
|
||||
@Test
|
||||
@Disabled
|
||||
public void testPerf1e6() {
|
||||
Result result = methods.rateLimiterSingleThreadedConvergence(rlFunction,new RateSpec(1E6, 1.1),500_000,0.005d);
|
||||
final Result result = this.methods.rateLimiterSingleThreadedConvergence(this.rlFunction,new RateSpec(1.0E6, 1.1),500_000,0.005d);
|
||||
System.out.println(result);
|
||||
}
|
||||
|
||||
@Test
|
||||
@Disabled
|
||||
public void testPerf1e5() {
|
||||
Result result = methods.rateLimiterSingleThreadedConvergence(rlFunction,new RateSpec(1E5, 1.1),50_000,0.01d);
|
||||
final Result result = this.methods.rateLimiterSingleThreadedConvergence(this.rlFunction,new RateSpec(1.0E5, 1.1),50_000,0.01d);
|
||||
System.out.println(result);
|
||||
}
|
||||
|
||||
@Test
|
||||
@Disabled
|
||||
public void testPerf1e4() {
|
||||
Result result = methods.rateLimiterSingleThreadedConvergence(rlFunction,new RateSpec(1E4, 1.1),5_000,0.005d);
|
||||
final Result result = this.methods.rateLimiterSingleThreadedConvergence(this.rlFunction,new RateSpec(1.0E4, 1.1),5_000,0.005d);
|
||||
System.out.println(result);
|
||||
}
|
||||
|
||||
@Test
|
||||
@Disabled
|
||||
public void testPerf1e3() {
|
||||
Result result = methods.rateLimiterSingleThreadedConvergence(rlFunction,new RateSpec(1E3, 1.1),500,0.005d);
|
||||
final Result result = this.methods.rateLimiterSingleThreadedConvergence(this.rlFunction,new RateSpec(1.0E3, 1.1),500,0.005d);
|
||||
System.out.println(result);
|
||||
}
|
||||
|
||||
@Test
|
||||
@Disabled
|
||||
public void testPerf1e2() {
|
||||
Result result = methods.rateLimiterSingleThreadedConvergence(rlFunction,new RateSpec(1E2, 1.1),50,0.005d);
|
||||
final Result result = this.methods.rateLimiterSingleThreadedConvergence(this.rlFunction,new RateSpec(1.0E2, 1.1),50,0.005d);
|
||||
System.out.println(result);
|
||||
}
|
||||
|
||||
@Test
|
||||
@Disabled
|
||||
public void testPerf1e1() {
|
||||
Result result = methods.rateLimiterSingleThreadedConvergence(rlFunction,new RateSpec(1E1, 1.1),5,0.005d);
|
||||
final Result result = this.methods.rateLimiterSingleThreadedConvergence(this.rlFunction,new RateSpec(1.0E1, 1.1),5,0.005d);
|
||||
System.out.println(result);
|
||||
}
|
||||
|
||||
@Test
|
||||
@Disabled
|
||||
public void testPerf1e0() {
|
||||
Result result = methods.rateLimiterSingleThreadedConvergence(rlFunction,new RateSpec(1E0, 1.1),2,0.005d);
|
||||
final Result result = this.methods.rateLimiterSingleThreadedConvergence(this.rlFunction,new RateSpec(1.0E0, 1.1),2,0.005d);
|
||||
System.out.println(result);
|
||||
}
|
||||
|
||||
@Test
|
||||
@Disabled
|
||||
public void testePerf1eN1() {
|
||||
Result result = methods.rateLimiterSingleThreadedConvergence(rlFunction,new RateSpec(1E-1, 1.1),1,0.005d);
|
||||
final Result result = this.methods.rateLimiterSingleThreadedConvergence(this.rlFunction,new RateSpec(1.0E-1, 1.1),1,0.005d);
|
||||
System.out.println(result);
|
||||
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright (c) 2022 nosqlbench
|
||||
* Copyright (c) 2022-2023 nosqlbench
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -16,7 +16,7 @@
|
||||
|
||||
package io.nosqlbench.engine.api.activityapi.ratelimits;
|
||||
|
||||
import io.nosqlbench.api.config.NBNamedElement;
|
||||
import io.nosqlbench.api.config.NBLabeledElement;
|
||||
|
||||
import java.util.concurrent.atomic.AtomicLong;
|
||||
|
||||
@@ -24,27 +24,27 @@ public class TestableHybridRateLimiter extends HybridRateLimiter {
|
||||
|
||||
private final AtomicLong clock;
|
||||
|
||||
public TestableHybridRateLimiter(AtomicLong clock, RateSpec rateSpec, NBNamedElement def) {
|
||||
public TestableHybridRateLimiter(final AtomicLong clock, final RateSpec rateSpec, final NBLabeledElement def) {
|
||||
super(def, "test", rateSpec);
|
||||
applyRateSpec(rateSpec);
|
||||
setLabel("test");
|
||||
this.applyRateSpec(rateSpec);
|
||||
this.setLabel("test");
|
||||
this.clock = clock;
|
||||
init(def);
|
||||
this.init(def);
|
||||
}
|
||||
|
||||
public long setClock(long newValue) {
|
||||
long oldValue = clock.get();
|
||||
clock.set(newValue);
|
||||
public long setClock(final long newValue) {
|
||||
final long oldValue = this.clock.get();
|
||||
this.clock.set(newValue);
|
||||
return oldValue;
|
||||
}
|
||||
|
||||
public long getClock() {
|
||||
return clock.get();
|
||||
return this.clock.get();
|
||||
}
|
||||
|
||||
@Override
|
||||
protected long getNanoClockTime() {
|
||||
return clock.get();
|
||||
return this.clock.get();
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright (c) 2022 nosqlbench
|
||||
* Copyright (c) 2022-2023 nosqlbench
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -16,7 +16,8 @@
|
||||
|
||||
package io.nosqlbench.engine.api.activityapi.ratelimits;
|
||||
|
||||
import io.nosqlbench.api.config.NBNamedElement;
|
||||
import io.nosqlbench.api.config.NBLabeledElement;
|
||||
import io.nosqlbench.api.config.NBLabels;
|
||||
import io.nosqlbench.api.engine.activityimpl.ActivityDef;
|
||||
import io.nosqlbench.api.engine.activityimpl.ParameterMap;
|
||||
import org.junit.jupiter.api.Test;
|
||||
@@ -25,11 +26,13 @@ import static org.assertj.core.api.Assertions.assertThat;
|
||||
|
||||
public class TokenPoolTest {
|
||||
|
||||
ActivityDef def = new ActivityDef(ParameterMap.parseOrException("alias=testing"));
|
||||
ActivityDef adef = new ActivityDef(ParameterMap.parseOrException("alias=testing"));
|
||||
NBLabeledElement def = NBLabeledElement.forMap(this.adef.getParams().getStringStringMap());
|
||||
|
||||
|
||||
@Test
|
||||
public void testBackfillFullRate() {
|
||||
ThreadDrivenTokenPool p = new ThreadDrivenTokenPool(new RateSpec(10000000, 1.1), def);
|
||||
ThreadDrivenTokenPool p = new ThreadDrivenTokenPool(new RateSpec(10000000, 1.1), this.def);
|
||||
assertThat(p.refill(1000000L)).isEqualTo(1000000L);
|
||||
assertThat(p.getWaitPool()).isEqualTo(0L);
|
||||
assertThat(p.refill(100L)).isEqualTo(1000100);
|
||||
@@ -60,10 +63,10 @@ public class TokenPoolTest {
|
||||
assertThat(p.getWaitTime()).isEqualTo(10000000L);
|
||||
|
||||
RateSpec s2 = new RateSpec(1000000L, 1.10D);
|
||||
p.apply(new NBNamedElement() {
|
||||
p.apply(new NBLabeledElement() {
|
||||
@Override
|
||||
public String getName() {
|
||||
return "test";
|
||||
public NBLabels getLabels() {
|
||||
return NBLabels.forKV("name","test");
|
||||
}
|
||||
},s2);
|
||||
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright (c) 2022 nosqlbench
|
||||
* Copyright (c) 2022-2023 nosqlbench
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -16,9 +16,10 @@
|
||||
|
||||
package io.nosqlbench.engine.api.metrics;
|
||||
|
||||
import io.nosqlbench.api.config.NBLabels;
|
||||
import io.nosqlbench.api.engine.metrics.DeltaHdrHistogramReservoir;
|
||||
import io.nosqlbench.api.engine.metrics.HistoIntervalLogger;
|
||||
import io.nosqlbench.api.engine.metrics.NicerHistogram;
|
||||
import io.nosqlbench.api.engine.metrics.instruments.NBMetricHistogram;
|
||||
import org.HdrHistogram.EncodableHistogram;
|
||||
import org.HdrHistogram.Histogram;
|
||||
import org.HdrHistogram.HistogramLogReader;
|
||||
@@ -44,16 +45,16 @@ public class HistoIntervalLoggerTest {
|
||||
|
||||
final int significantDigits = 4;
|
||||
|
||||
NicerHistogram nicerHistogram = new NicerHistogram(
|
||||
"histo1", new DeltaHdrHistogramReservoir("histo1", significantDigits));
|
||||
NBMetricHistogram NBHistogram = new NBMetricHistogram(
|
||||
NBLabels.forKV("name", "histo1"), new DeltaHdrHistogramReservoir(NBLabels.forKV("name", "histo1"), significantDigits));
|
||||
|
||||
hil.onHistogramAdded("histo1",nicerHistogram);
|
||||
hil.onHistogramAdded("histo1", NBHistogram);
|
||||
|
||||
nicerHistogram.update(1L);
|
||||
NBHistogram.update(1L);
|
||||
delay(1001);
|
||||
nicerHistogram.update(1000000L);
|
||||
NBHistogram.update(1000000L);
|
||||
delay(1001);
|
||||
nicerHistogram.update(1000L);
|
||||
NBHistogram.update(1000L);
|
||||
hil.onHistogramRemoved("histo1");
|
||||
|
||||
hil.closeMetrics();
|
||||
@@ -63,7 +64,7 @@ public class HistoIntervalLoggerTest {
|
||||
EncodableHistogram histogram;
|
||||
while (true) {
|
||||
histogram = hlr.nextIntervalHistogram();
|
||||
if (histogram==null) {
|
||||
if (null == histogram) {
|
||||
break;
|
||||
}
|
||||
histos.add(histogram);
|
||||
@@ -71,15 +72,15 @@ public class HistoIntervalLoggerTest {
|
||||
|
||||
assertThat(histos.size()).isEqualTo(2);
|
||||
assertThat(histos.get(0)).isInstanceOf(Histogram.class);
|
||||
assertThat(((Histogram)histos.get(0)).getNumberOfSignificantValueDigits()).isEqualTo(significantDigits);
|
||||
assertThat(((Histogram) histos.get(0)).getNumberOfSignificantValueDigits()).isEqualTo(significantDigits);
|
||||
}
|
||||
|
||||
private void delay(int i) {
|
||||
long now = System.currentTimeMillis();
|
||||
long target = now+i;
|
||||
while (System.currentTimeMillis()<target) {
|
||||
long target = now + i;
|
||||
while (System.currentTimeMillis() < target) {
|
||||
try {
|
||||
Thread.sleep(target-System.currentTimeMillis());
|
||||
Thread.sleep(target - System.currentTimeMillis());
|
||||
} catch (InterruptedException ignored) {
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright (c) 2022 nosqlbench
|
||||
* Copyright (c) 2022-2023 nosqlbench
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -16,26 +16,28 @@
|
||||
|
||||
package io.nosqlbench.engine.api.metrics;
|
||||
|
||||
import io.nosqlbench.api.config.NBLabels;
|
||||
import io.nosqlbench.api.engine.metrics.ConvenientSnapshot;
|
||||
import io.nosqlbench.api.engine.metrics.DeltaHdrHistogramReservoir;
|
||||
import io.nosqlbench.api.engine.metrics.NicerHistogram;
|
||||
import io.nosqlbench.api.engine.metrics.instruments.NBMetricHistogram;
|
||||
import org.junit.jupiter.api.Test;
|
||||
|
||||
import static org.assertj.core.api.Assertions.assertThat;
|
||||
|
||||
public class NicerHistogramTest {
|
||||
public class NBMetricHistogramTest {
|
||||
|
||||
@Test
|
||||
public void testNicerHistogramValues() {
|
||||
NicerHistogram nh = new NicerHistogram("testhisto",new DeltaHdrHistogramReservoir("testhisto",4));
|
||||
for (int i = 1; i <= 100; i++) {
|
||||
NBMetricHistogram nh = new NBMetricHistogram(NBLabels.forKV("name","testhisto"), new DeltaHdrHistogramReservoir(
|
||||
NBLabels.forKV("name", "testhisto"), 4));
|
||||
for (int i = 1; 100 >= i; i++) {
|
||||
nh.update(i);
|
||||
}
|
||||
ConvenientSnapshot snapshot = nh.getSnapshot();
|
||||
assertThat(snapshot.getMax()).isEqualTo(100);
|
||||
|
||||
nh.getDeltaSnapshot(500); // Just to reset
|
||||
for (int i=1; i<= 200; i++ ) {
|
||||
for (int i = 1; 200 >= i; i++) {
|
||||
nh.update(i);
|
||||
}
|
||||
ConvenientSnapshot deltaSnapshot1 = nh.getDeltaSnapshot(500);
|
||||
@@ -43,7 +45,7 @@ public class NicerHistogramTest {
|
||||
|
||||
ConvenientSnapshot cachedSnapshot = nh.getSnapshot();
|
||||
assertThat(cachedSnapshot.getMax()).isEqualTo(200);
|
||||
for (int i=1; i<= 300; i++ ) {
|
||||
for (int i = 1; 300 >= i; i++) {
|
||||
nh.update(i);
|
||||
}
|
||||
ConvenientSnapshot stillCachedSnapshot = nh.getSnapshot();
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright (c) 2022 nosqlbench
|
||||
* Copyright (c) 2022-2023 nosqlbench
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -18,6 +18,7 @@ package io.nosqlbench.engine.api.metrics;
|
||||
|
||||
import com.codahale.metrics.ExponentiallyDecayingReservoir;
|
||||
import com.codahale.metrics.Snapshot;
|
||||
import io.nosqlbench.api.config.NBLabels;
|
||||
import io.nosqlbench.api.engine.metrics.DeltaHdrHistogramReservoir;
|
||||
import org.junit.jupiter.api.Disabled;
|
||||
import org.junit.jupiter.api.Test;
|
||||
@@ -27,33 +28,29 @@ public class TestHistoTypes {
|
||||
@Test
|
||||
@Disabled
|
||||
public void compareHistos() {
|
||||
Clock c = new Clock();
|
||||
final Clock c = new Clock();
|
||||
|
||||
// Use the defaults that you get with "Timer()"
|
||||
ExponentiallyDecayingReservoir expRes = new ExponentiallyDecayingReservoir(1028,0.015,c);
|
||||
DeltaHdrHistogramReservoir hdrRes = new DeltaHdrHistogramReservoir("dr",4);
|
||||
long max=100000000;
|
||||
final ExponentiallyDecayingReservoir expRes = new ExponentiallyDecayingReservoir(1028,0.015,c);
|
||||
final DeltaHdrHistogramReservoir hdrRes = new DeltaHdrHistogramReservoir(NBLabels.forKV("name", "dr"),4);
|
||||
final long max=100000000;
|
||||
|
||||
for (long i = 0; i < max; i++) {
|
||||
expRes.update(i);
|
||||
hdrRes.update(i);
|
||||
if ((i%1000000)==0) {
|
||||
System.out.println(i);
|
||||
}
|
||||
if (0 == (i % 1000000)) System.out.println(i);
|
||||
}
|
||||
|
||||
summary(0L,max, expRes.getSnapshot(), hdrRes.getSnapshot());
|
||||
this.summary(0L,max, expRes.getSnapshot(), hdrRes.getSnapshot());
|
||||
}
|
||||
|
||||
private void summary(long min, long max,Snapshot... snapshots) {
|
||||
for (int i = 0; i <=100; i++) {
|
||||
double pct = (double)i/100.0D;
|
||||
double expectedValue=pct*max;
|
||||
private void summary(final long min, final long max, final Snapshot... snapshots) {
|
||||
for (int i = 0; 100 >= i; i++) {
|
||||
final double pct = i /100.0D;
|
||||
final double expectedValue=pct*max;
|
||||
System.out.format("% 3d %%p is % 11d : ",(long)(pct*100),(long)expectedValue);
|
||||
for (Snapshot snapshot : snapshots) {
|
||||
System.out.format("% 10d ",(long)snapshot.getValue(pct));
|
||||
}
|
||||
System.out.print("\n");
|
||||
for (final Snapshot snapshot : snapshots) System.out.format("% 10d ", (long) snapshot.getValue(pct));
|
||||
System.out.print('\n');
|
||||
}
|
||||
}
|
||||
|
||||
@@ -63,12 +60,12 @@ public class TestHistoTypes {
|
||||
|
||||
@Override
|
||||
public long getTime() {
|
||||
return nanos/1000000;
|
||||
return this.nanos /1000000;
|
||||
}
|
||||
|
||||
@Override
|
||||
public long getTick() {
|
||||
return nanos;
|
||||
return this.nanos;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -21,6 +21,7 @@ import io.nosqlbench.api.annotations.Layer;
|
||||
import io.nosqlbench.api.content.Content;
|
||||
import io.nosqlbench.api.content.NBIO;
|
||||
import io.nosqlbench.api.engine.metrics.ActivityMetrics;
|
||||
import io.nosqlbench.api.engine.metrics.reporters.PromPushReporter;
|
||||
import io.nosqlbench.api.errors.BasicError;
|
||||
import io.nosqlbench.api.logging.NBLogLevel;
|
||||
import io.nosqlbench.api.metadata.SessionNamer;
|
||||
@@ -31,6 +32,8 @@ import io.nosqlbench.engine.api.activityapi.cyclelog.outputs.cyclelog.CycleLogIm
|
||||
import io.nosqlbench.engine.api.activityapi.input.InputType;
|
||||
import io.nosqlbench.engine.api.activityapi.output.OutputType;
|
||||
import io.nosqlbench.engine.api.activityconfig.rawyaml.RawOpsLoader;
|
||||
import io.nosqlbench.engine.cli.NBCLIOptions.LoggerConfigData;
|
||||
import io.nosqlbench.engine.cli.NBCLIOptions.Mode;
|
||||
import io.nosqlbench.engine.core.annotation.Annotators;
|
||||
import io.nosqlbench.engine.core.lifecycle.process.NBCLIErrorHandler;
|
||||
import io.nosqlbench.engine.core.lifecycle.activity.ActivityTypeLoader;
|
||||
@@ -59,6 +62,7 @@ import java.nio.charset.StandardCharsets;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
import java.util.*;
|
||||
import java.util.ServiceLoader.Provider;
|
||||
import java.util.function.Function;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
@@ -72,12 +76,12 @@ public class NBCLI implements Function<String[], Integer> {
|
||||
|
||||
static {
|
||||
loggerConfig = new LoggerConfig();
|
||||
LoggerConfig.setConfigurationFactory(loggerConfig);
|
||||
ConfigurationFactory.setConfigurationFactory(NBCLI.loggerConfig);
|
||||
}
|
||||
|
||||
private final String commandName;
|
||||
|
||||
public NBCLI(String commandName) {
|
||||
public NBCLI(final String commandName) {
|
||||
this.commandName = commandName;
|
||||
}
|
||||
|
||||
@@ -86,14 +90,15 @@ public class NBCLI implements Function<String[], Integer> {
|
||||
* invocations are handled functionally by {@link #apply(String[])}, which allows
|
||||
* for scenario encapsulation and concurrent testing.
|
||||
*
|
||||
* @param args Command Line Args
|
||||
* @param args
|
||||
* Command Line Args
|
||||
*/
|
||||
public static void main(String[] args) {
|
||||
public static void main(final String[] args) {
|
||||
try {
|
||||
NBCLI cli = new NBCLI("nb");
|
||||
int statusCode = cli.apply(args);
|
||||
final NBCLI cli = new NBCLI("nb");
|
||||
final int statusCode = cli.apply(args);
|
||||
System.exit(statusCode);
|
||||
} catch (Exception e) {
|
||||
} catch (final Exception e) {
|
||||
System.out.println("Not expected issue in main: " + e.getMessage());
|
||||
}
|
||||
}
|
||||
@@ -101,40 +106,36 @@ public class NBCLI implements Function<String[], Integer> {
|
||||
/**
|
||||
* return null;
|
||||
* }
|
||||
*
|
||||
* <p>
|
||||
* public static void main(String[] args) {
|
||||
*
|
||||
* @param args
|
||||
* @return
|
||||
*/
|
||||
@Override
|
||||
public Integer apply(String[] args) {
|
||||
public Integer apply(final String[] args) {
|
||||
try {
|
||||
NBCLI cli = new NBCLI("nb");
|
||||
int result = cli.applyDirect(args);
|
||||
final NBCLI cli = new NBCLI("nb");
|
||||
final int result = cli.applyDirect(args);
|
||||
return result;
|
||||
} catch (Exception e) {
|
||||
} catch (final Exception e) {
|
||||
boolean showStackTraces = false;
|
||||
for (String arg : args) {
|
||||
|
||||
if (arg.toLowerCase(Locale.ROOT).startsWith("-v") || (arg.toLowerCase(Locale.ROOT).equals("--show-stacktraces"))) {
|
||||
for (final String arg : args)
|
||||
if (arg.toLowerCase(Locale.ROOT).startsWith("-v") || "--show-stacktraces".equals(arg.toLowerCase(Locale.ROOT))) {
|
||||
showStackTraces = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
String error = NBCLIErrorHandler.handle(e, showStackTraces);
|
||||
final String error = NBCLIErrorHandler.handle(e, showStackTraces);
|
||||
// Commented for now, as the above handler should do everything needed.
|
||||
if (error != null) {
|
||||
System.err.println("Scenario stopped due to error. See logs for details.");
|
||||
}
|
||||
if (null != error) System.err.println("Scenario stopped due to error. See logs for details.");
|
||||
System.err.flush();
|
||||
System.out.flush();
|
||||
return EXIT_ERROR;
|
||||
return NBCLI.EXIT_ERROR;
|
||||
}
|
||||
}
|
||||
|
||||
public Integer applyDirect(String[] args) {
|
||||
public Integer applyDirect(final String[] args) {
|
||||
|
||||
// Initial logging config covers only command line parsing
|
||||
// We don't want anything to go to console here unless it is a real problem
|
||||
@@ -145,12 +146,12 @@ public class NBCLI implements Function<String[], Integer> {
|
||||
// .activate();
|
||||
// logger = LogManager.getLogger("NBCLI");
|
||||
|
||||
loggerConfig.setConsoleLevel(NBLogLevel.ERROR);
|
||||
NBCLI.loggerConfig.setConsoleLevel(NBLogLevel.ERROR);
|
||||
|
||||
NBCLIOptions globalOptions = new NBCLIOptions(args, NBCLIOptions.Mode.ParseGlobalsOnly);
|
||||
String sessionName = SessionNamer.format(globalOptions.getSessionName());
|
||||
final NBCLIOptions globalOptions = new NBCLIOptions(args, Mode.ParseGlobalsOnly);
|
||||
final String sessionName = SessionNamer.format(globalOptions.getSessionName());
|
||||
|
||||
loggerConfig
|
||||
NBCLI.loggerConfig
|
||||
.setSessionName(sessionName)
|
||||
.setConsoleLevel(globalOptions.getConsoleLogLevel())
|
||||
.setConsolePattern(globalOptions.getConsoleLoggingPattern())
|
||||
@@ -162,63 +163,67 @@ public class NBCLI implements Function<String[], Integer> {
|
||||
.setAnsiEnabled(globalOptions.isEnableAnsi())
|
||||
.setDedicatedVerificationLogger(globalOptions.isDedicatedVerificationLogger())
|
||||
.activate();
|
||||
ConfigurationFactory.setConfigurationFactory(loggerConfig);
|
||||
ConfigurationFactory.setConfigurationFactory(NBCLI.loggerConfig);
|
||||
|
||||
logger = LogManager.getLogger("NBCLI");
|
||||
loggerConfig.purgeOldFiles(LogManager.getLogger("SCENARIO"));
|
||||
if (logger.isInfoEnabled()) {
|
||||
logger.info(() -> "Configured scenario log at " + loggerConfig.getLogfileLocation());
|
||||
} else {
|
||||
System.err.println("Configured scenario log at " + loggerConfig.getLogfileLocation());
|
||||
}
|
||||
logger.debug("Scenario log started");
|
||||
NBCLI.logger = LogManager.getLogger("NBCLI");
|
||||
NBCLI.loggerConfig.purgeOldFiles(LogManager.getLogger("SCENARIO"));
|
||||
if (NBCLI.logger.isInfoEnabled())
|
||||
NBCLI.logger.info(() -> "Configured scenario log at " + NBCLI.loggerConfig.getLogfileLocation());
|
||||
else System.err.println("Configured scenario log at " + NBCLI.loggerConfig.getLogfileLocation());
|
||||
NBCLI.logger.debug("Scenario log started");
|
||||
|
||||
// Global only processing
|
||||
if (args.length == 0) {
|
||||
System.out.println(loadHelpFile("commandline.md"));
|
||||
return EXIT_OK;
|
||||
if (0 == args.length) {
|
||||
System.out.println(this.loadHelpFile("commandline.md"));
|
||||
return NBCLI.EXIT_OK;
|
||||
}
|
||||
|
||||
logger.info(() -> "Running NoSQLBench Version " + new VersionInfo().getVersion());
|
||||
logger.info(() -> "command-line: " + Arrays.stream(args).collect(Collectors.joining(" ")));
|
||||
logger.info(() -> "client-hardware: " + SystemId.getHostSummary());
|
||||
NBCLI.logger.info(() -> "Running NoSQLBench Version " + new VersionInfo().getVersion());
|
||||
NBCLI.logger.info(() -> "command-line: " + Arrays.stream(args).collect(Collectors.joining(" ")));
|
||||
NBCLI.logger.info(() -> "client-hardware: " + SystemId.getHostSummary());
|
||||
|
||||
|
||||
// Invoke any bundled app which matches the name of the first non-option argument, if it exists.
|
||||
// If it does not, continue with no fanfare. Let it drop through to other command resolution methods.
|
||||
if (args.length > 0 && args[0].matches("\\w[\\w\\d-_.]+")) {
|
||||
ServiceSelector<BundledApp> apploader = ServiceSelector.of(args[0], ServiceLoader.load(BundledApp.class));
|
||||
BundledApp app = apploader.get().orElse(null);
|
||||
if (app != null) {
|
||||
String[] appargs = Arrays.copyOfRange(args, 1, args.length);
|
||||
logger.info(() -> "invoking bundled app '" + args[0] + "' (" + app.getClass().getSimpleName() + ").");
|
||||
if ((0 < args.length) && args[0].matches("\\w[\\w\\d-_.]+")) {
|
||||
final ServiceSelector<BundledApp> apploader = ServiceSelector.of(args[0], ServiceLoader.load(BundledApp.class));
|
||||
final BundledApp app = apploader.get().orElse(null);
|
||||
if (null != app) {
|
||||
final String[] appargs = Arrays.copyOfRange(args, 1, args.length);
|
||||
NBCLI.logger.info(() -> "invoking bundled app '" + args[0] + "' (" + app.getClass().getSimpleName() + ").");
|
||||
globalOptions.setWantsStackTraces(true);
|
||||
int result = app.applyAsInt(appargs);
|
||||
final int result = app.applyAsInt(appargs);
|
||||
return result;
|
||||
}
|
||||
}
|
||||
|
||||
boolean dockerMetrics = globalOptions.wantsDockerMetrics();
|
||||
String dockerMetricsAt = globalOptions.wantsDockerMetricsAt();
|
||||
|
||||
final boolean dockerMetrics = globalOptions.wantsDockerMetrics();
|
||||
final String dockerMetricsAt = globalOptions.wantsDockerMetricsAt();
|
||||
String reportGraphiteTo = globalOptions.wantsReportGraphiteTo();
|
||||
String annotatorsConfig = globalOptions.getAnnotatorsConfig();
|
||||
final String reportPromPushTo = globalOptions.wantsReportPromPushTo();
|
||||
|
||||
int mOpts = (dockerMetrics ? 1 : 0) + (dockerMetricsAt != null ? 1 : 0) + (reportGraphiteTo != null ? 1 : 0);
|
||||
if (mOpts > 1 && (reportGraphiteTo == null || annotatorsConfig == null)) {
|
||||
|
||||
|
||||
final int mOpts = (dockerMetrics ? 1 : 0)
|
||||
+ ((null != dockerMetricsAt) ? 1 : 0)
|
||||
+ ((null != reportGraphiteTo) ? 1 : 0);
|
||||
|
||||
if ((1 < mOpts) && ((null == reportGraphiteTo) || (null == annotatorsConfig)))
|
||||
throw new BasicError("You have multiple conflicting options which attempt to set\n" +
|
||||
" the destination for metrics and annotations. Please select only one of\n" +
|
||||
" --docker-metrics, --docker-metrics-at <addr>, or other options like \n" +
|
||||
" --report-graphite-to <addr> and --annotators <config>\n" +
|
||||
" For more details, see run 'nb help docker-metrics'");
|
||||
}
|
||||
|
||||
String metricsAddr = null;
|
||||
String graphiteMetricsAddress = null;
|
||||
|
||||
if (dockerMetrics) {
|
||||
// Setup docker stack for local docker metrics
|
||||
logger.info("Docker metrics is enabled. Docker must be installed for this to work");
|
||||
DockerMetricsManager dmh = new DockerMetricsManager();
|
||||
Map<String, String> dashboardOptions = Map.of(
|
||||
NBCLI.logger.info("Docker metrics is enabled. Docker must be installed for this to work");
|
||||
final DockerMetricsManager dmh = new DockerMetricsManager();
|
||||
final Map<String, String> dashboardOptions = Map.of(
|
||||
DockerMetricsManager.GRAFANA_TAG, globalOptions.getDockerGrafanaTag(),
|
||||
DockerMetricsManager.PROM_TAG, globalOptions.getDockerPromTag(),
|
||||
DockerMetricsManager.TSDB_RETENTION, String.valueOf(globalOptions.getDockerPromRetentionDays()),
|
||||
@@ -229,152 +234,143 @@ public class NBCLI implements Function<String[], Integer> {
|
||||
|
||||
);
|
||||
dmh.startMetrics(dashboardOptions);
|
||||
String warn = "Docker Containers are started, for grafana and prometheus, hit" +
|
||||
final String warn = "Docker Containers are started, for grafana and prometheus, hit" +
|
||||
" these urls in your browser: http://<host>:3000 and http://<host>:9090";
|
||||
logger.warn(warn);
|
||||
metricsAddr = "localhost";
|
||||
} else if (dockerMetricsAt != null) {
|
||||
metricsAddr = dockerMetricsAt;
|
||||
}
|
||||
NBCLI.logger.warn(warn);
|
||||
graphiteMetricsAddress = "localhost";
|
||||
} else if (null != dockerMetricsAt) graphiteMetricsAddress = dockerMetricsAt;
|
||||
|
||||
if (metricsAddr != null) {
|
||||
reportGraphiteTo = metricsAddr + ":9109";
|
||||
annotatorsConfig = "[{type:'log',level:'info'},{type:'grafana',baseurl:'http://" + metricsAddr + ":3000" +
|
||||
if (null != graphiteMetricsAddress) {
|
||||
reportGraphiteTo = graphiteMetricsAddress + ":9109";
|
||||
annotatorsConfig = "[{type:'log',level:'info'},{type:'grafana',baseurl:'http://" + graphiteMetricsAddress + ":3000" +
|
||||
"/'," +
|
||||
"tags:'appname:nosqlbench',timeoutms:5000,onerror:'warn'}]";
|
||||
} else {
|
||||
annotatorsConfig = "[{type:'log',level:'info'}]";
|
||||
}
|
||||
} else annotatorsConfig = "[{type:'log',level:'info'}]";
|
||||
|
||||
NBCLIOptions options = new NBCLIOptions(args);
|
||||
logger = LogManager.getLogger("NBCLI");
|
||||
final NBCLIOptions options = new NBCLIOptions(args);
|
||||
NBCLI.logger = LogManager.getLogger("NBCLI");
|
||||
|
||||
NBIO.addGlobalIncludes(options.wantsIncludes());
|
||||
|
||||
ActivityMetrics.setHdrDigits(options.getHdrDigits());
|
||||
|
||||
if (options.wantsBasicHelp()) {
|
||||
System.out.println(loadHelpFile("basic.md"));
|
||||
return EXIT_OK;
|
||||
System.out.println(this.loadHelpFile("basic.md"));
|
||||
return NBCLI.EXIT_OK;
|
||||
}
|
||||
|
||||
if (options.isWantsVersionShort()) {
|
||||
System.out.println(new VersionInfo().getVersion());
|
||||
return EXIT_OK;
|
||||
return NBCLI.EXIT_OK;
|
||||
}
|
||||
|
||||
if (options.wantsVersionCoords()) {
|
||||
System.out.println(new VersionInfo().getArtifactCoordinates());
|
||||
return EXIT_OK;
|
||||
return NBCLI.EXIT_OK;
|
||||
}
|
||||
|
||||
if (options.isWantsListApps()) {
|
||||
ServiceLoader<BundledApp> loader = ServiceLoader.load(BundledApp.class);
|
||||
for (ServiceLoader.Provider<BundledApp> provider : loader.stream().toList()) {
|
||||
Class<? extends BundledApp> appType = provider.type();
|
||||
String name = appType.getAnnotation(Service.class).selector();
|
||||
final ServiceLoader<BundledApp> loader = ServiceLoader.load(BundledApp.class);
|
||||
for (final Provider<BundledApp> provider : loader.stream().toList()) {
|
||||
final Class<? extends BundledApp> appType = provider.type();
|
||||
final String name = appType.getAnnotation(Service.class).selector();
|
||||
System.out.printf("%-40s %s%n", name, appType.getCanonicalName());
|
||||
}
|
||||
return EXIT_OK;
|
||||
return NBCLI.EXIT_OK;
|
||||
}
|
||||
|
||||
if (options.getWantsListCommands()) {
|
||||
NBCLICommandParser.RESERVED_WORDS.forEach(System.out::println);
|
||||
return EXIT_OK;
|
||||
return NBCLI.EXIT_OK;
|
||||
}
|
||||
if (options.wantsActivityTypes()) {
|
||||
new ActivityTypeLoader().getAllSelectors().forEach(System.out::println);
|
||||
return EXIT_OK;
|
||||
return NBCLI.EXIT_OK;
|
||||
}
|
||||
|
||||
if (options.wantsWorkloadsList()) {
|
||||
NBCLIScenarios.printWorkloads(false, options.wantsIncludes());
|
||||
return EXIT_OK;
|
||||
return NBCLI.EXIT_OK;
|
||||
}
|
||||
|
||||
if (options.wantsScenariosList()) {
|
||||
NBCLIScenarios.printWorkloads(true, options.wantsIncludes());
|
||||
return EXIT_OK;
|
||||
return NBCLI.EXIT_OK;
|
||||
}
|
||||
|
||||
if (options.wantsListScripts()) {
|
||||
NBCLIScripts.printScripts(true, options.wantsIncludes());
|
||||
return EXIT_OK;
|
||||
return NBCLI.EXIT_OK;
|
||||
}
|
||||
|
||||
if (options.wantsToCopyResource()) {
|
||||
String resourceToCopy = options.wantsToCopyResourceNamed();
|
||||
logger.debug(() -> "user requests to copy out " + resourceToCopy);
|
||||
final String resourceToCopy = options.wantsToCopyResourceNamed();
|
||||
NBCLI.logger.debug(() -> "user requests to copy out " + resourceToCopy);
|
||||
|
||||
Optional<Content<?>> tocopy = NBIO.classpath()
|
||||
.searchPrefixes("activities")
|
||||
.searchPrefixes(options.wantsIncludes())
|
||||
.pathname(resourceToCopy).extensionSet(RawOpsLoader.YAML_EXTENSIONS).first();
|
||||
|
||||
if (tocopy.isEmpty()) {
|
||||
if (tocopy.isEmpty()) tocopy = NBIO.classpath()
|
||||
.searchPrefixes().searchPrefixes(options.wantsIncludes())
|
||||
.searchPrefixes(options.wantsIncludes())
|
||||
.pathname(resourceToCopy).first();
|
||||
|
||||
tocopy = NBIO.classpath()
|
||||
.searchPrefixes().searchPrefixes(options.wantsIncludes())
|
||||
.searchPrefixes(options.wantsIncludes())
|
||||
.pathname(resourceToCopy).first();
|
||||
}
|
||||
|
||||
Content<?> data = tocopy.orElseThrow(
|
||||
final Content<?> data = tocopy.orElseThrow(
|
||||
() -> new BasicError(
|
||||
"Unable to find " + resourceToCopy +
|
||||
" in classpath to copy out")
|
||||
);
|
||||
|
||||
Path writeTo = Path.of(data.asPath().getFileName().toString());
|
||||
if (Files.exists(writeTo)) {
|
||||
throw new BasicError("A file named " + writeTo + " exists. Remove it first.");
|
||||
}
|
||||
final Path writeTo = Path.of(data.asPath().getFileName().toString());
|
||||
if (Files.exists(writeTo)) throw new BasicError("A file named " + writeTo + " exists. Remove it first.");
|
||||
try {
|
||||
Files.writeString(writeTo, data.getCharBuffer(), StandardCharsets.UTF_8);
|
||||
} catch (IOException e) {
|
||||
} catch (final IOException e) {
|
||||
throw new BasicError("Unable to write to " + writeTo + ": " + e.getMessage());
|
||||
}
|
||||
logger.info(() -> "Copied internal resource '" + data.asPath() + "' to '" + writeTo + "'");
|
||||
return EXIT_OK;
|
||||
NBCLI.logger.info(() -> "Copied internal resource '" + data.asPath() + "' to '" + writeTo + '\'');
|
||||
return NBCLI.EXIT_OK;
|
||||
|
||||
}
|
||||
|
||||
if (options.wantsInputTypes()) {
|
||||
InputType.FINDER.getAllSelectors().forEach((k, v) -> System.out.println(k + " (" + v.name() + ")"));
|
||||
return EXIT_OK;
|
||||
InputType.FINDER.getAllSelectors().forEach((k, v) -> System.out.println(k + " (" + v.name() + ')'));
|
||||
return NBCLI.EXIT_OK;
|
||||
}
|
||||
|
||||
if (options.wantsMarkerTypes()) {
|
||||
OutputType.FINDER.getAllSelectors().forEach((k, v) -> System.out.println(k + " (" + v.name() + ")"));
|
||||
return EXIT_OK;
|
||||
OutputType.FINDER.getAllSelectors().forEach((k, v) -> System.out.println(k + " (" + v.name() + ')'));
|
||||
return NBCLI.EXIT_OK;
|
||||
}
|
||||
|
||||
if (options.wantsToDumpCyclelog()) {
|
||||
CycleLogDumperUtility.main(options.getCycleLogExporterOptions());
|
||||
return EXIT_OK;
|
||||
return NBCLI.EXIT_OK;
|
||||
}
|
||||
|
||||
if (options.wantsToImportCycleLog()) {
|
||||
CycleLogImporterUtility.main(options.getCyclelogImportOptions());
|
||||
return EXIT_OK;
|
||||
return NBCLI.EXIT_OK;
|
||||
}
|
||||
|
||||
if (options.wantsTopicalHelp()) {
|
||||
Optional<String> helpDoc = MarkdownFinder.forHelpTopic(options.wantsTopicalHelpFor());
|
||||
final Optional<String> helpDoc = MarkdownFinder.forHelpTopic(options.wantsTopicalHelpFor());
|
||||
System.out.println(helpDoc.orElseThrow(
|
||||
() -> new RuntimeException("No help could be found for " + options.wantsTopicalHelpFor())
|
||||
));
|
||||
return EXIT_OK;
|
||||
return NBCLI.EXIT_OK;
|
||||
}
|
||||
|
||||
if (options.wantsMetricsForActivity() != null) {
|
||||
String metricsHelp = getMetricsHelpFor(options.wantsMetricsForActivity());
|
||||
System.out.println("Available metric names for activity:" + options.wantsMetricsForActivity() + ":");
|
||||
if (null != options.wantsMetricsForActivity()) {
|
||||
final String metricsHelp = this.getMetricsHelpFor(options.wantsMetricsForActivity());
|
||||
System.out.println("Available metric names for activity:" + options.wantsMetricsForActivity() + ':');
|
||||
System.out.println(metricsHelp);
|
||||
return EXIT_OK;
|
||||
return NBCLI.EXIT_OK;
|
||||
}
|
||||
|
||||
logger.debug("initializing annotators with config:'" + annotatorsConfig + "'");
|
||||
NBCLI.logger.debug("initializing annotators with config:'{}'", annotatorsConfig);
|
||||
Annotators.init(annotatorsConfig);
|
||||
Annotators.recordAnnotation(
|
||||
Annotation.newBuilder()
|
||||
@@ -385,53 +381,48 @@ public class NBCLI implements Function<String[], Integer> {
|
||||
.build()
|
||||
);
|
||||
|
||||
if (reportGraphiteTo != null || options.wantsReportCsvTo() != null) {
|
||||
MetricReporters reporters = MetricReporters.getInstance();
|
||||
if ((null != reportPromPushTo) || (null != reportGraphiteTo) || (null != options.wantsReportCsvTo())) {
|
||||
final MetricReporters reporters = MetricReporters.getInstance();
|
||||
reporters.addRegistry("workloads", ActivityMetrics.getMetricRegistry());
|
||||
|
||||
if (reportGraphiteTo != null) {
|
||||
reporters.addGraphite(reportGraphiteTo, options.wantsMetricsPrefix());
|
||||
}
|
||||
if (options.wantsReportCsvTo() != null) {
|
||||
if (null != reportPromPushTo) reporters.addPromPush(reportPromPushTo, options.wantsMetricsPrefix());
|
||||
if (null != reportGraphiteTo) reporters.addGraphite(reportGraphiteTo, options.wantsMetricsPrefix());
|
||||
if (null != options.wantsReportCsvTo())
|
||||
reporters.addCSVReporter(options.wantsReportCsvTo(), options.wantsMetricsPrefix());
|
||||
}
|
||||
reporters.start(10, options.getReportInterval());
|
||||
}
|
||||
|
||||
if (options.wantsEnableChart()) {
|
||||
logger.info("Charting enabled");
|
||||
if (options.getHistoLoggerConfigs().size() == 0) {
|
||||
logger.info("Adding default histologger configs");
|
||||
String pattern = ".*";
|
||||
String file = options.getChartHdrFileName();
|
||||
String interval = "1s";
|
||||
NBCLI.logger.info("Charting enabled");
|
||||
if (0 == options.getHistoLoggerConfigs().size()) {
|
||||
NBCLI.logger.info("Adding default histologger configs");
|
||||
final String pattern = ".*";
|
||||
final String file = options.getChartHdrFileName();
|
||||
final String interval = "1s";
|
||||
options.setHistoLoggerConfigs(pattern, file, interval);
|
||||
}
|
||||
}
|
||||
|
||||
for (
|
||||
NBCLIOptions.LoggerConfigData histoLogger : options.getHistoLoggerConfigs()) {
|
||||
final LoggerConfigData histoLogger : options.getHistoLoggerConfigs())
|
||||
ActivityMetrics.addHistoLogger(sessionName, histoLogger.pattern, histoLogger.file, histoLogger.interval);
|
||||
}
|
||||
for (
|
||||
NBCLIOptions.LoggerConfigData statsLogger : options.getStatsLoggerConfigs()) {
|
||||
final LoggerConfigData statsLogger : options.getStatsLoggerConfigs())
|
||||
ActivityMetrics.addStatsLogger(sessionName, statsLogger.pattern, statsLogger.file, statsLogger.interval);
|
||||
}
|
||||
for (
|
||||
NBCLIOptions.LoggerConfigData classicConfigs : options.getClassicHistoConfigs()) {
|
||||
final LoggerConfigData classicConfigs : options.getClassicHistoConfigs())
|
||||
ActivityMetrics.addClassicHistos(sessionName, classicConfigs.pattern, classicConfigs.file, classicConfigs.interval);
|
||||
}
|
||||
|
||||
// intentionally not shown for warn-only
|
||||
logger.info(() -> "console logging level is " + options.getConsoleLogLevel());
|
||||
NBCLI.logger.info(() -> "console logging level is " + options.getConsoleLogLevel());
|
||||
|
||||
ScenariosExecutor scenariosExecutor = new ScenariosExecutor("executor-" + sessionName, 1);
|
||||
final ScenariosExecutor scenariosExecutor = new ScenariosExecutor("executor-" + sessionName, 1);
|
||||
if (options.getConsoleLogLevel().isGreaterOrEqualTo(NBLogLevel.WARN)) {
|
||||
options.setWantsStackTraces(true);
|
||||
logger.debug(() -> "enabling stack traces since log level is " + options.getConsoleLogLevel());
|
||||
NBCLI.logger.debug(() -> "enabling stack traces since log level is " + options.getConsoleLogLevel());
|
||||
}
|
||||
|
||||
Scenario scenario = new Scenario(
|
||||
final Scenario scenario = new Scenario(
|
||||
sessionName,
|
||||
options.getScriptFile(),
|
||||
options.getScriptingEngine(),
|
||||
@@ -443,78 +434,73 @@ public class NBCLI implements Function<String[], Integer> {
|
||||
options.getLogsDirectory(),
|
||||
Maturity.Unspecified);
|
||||
|
||||
ScriptBuffer buffer = new BasicScriptBuffer()
|
||||
final ScriptBuffer buffer = new BasicScriptBuffer()
|
||||
.add(options.getCommands()
|
||||
.toArray(new Cmd[0]));
|
||||
String scriptData = buffer.getParsedScript();
|
||||
final String scriptData = buffer.getParsedScript();
|
||||
|
||||
if (options.wantsShowScript()) {
|
||||
System.out.println("// Rendered Script");
|
||||
System.out.println(scriptData);
|
||||
return EXIT_OK;
|
||||
return NBCLI.EXIT_OK;
|
||||
}
|
||||
|
||||
if (options.wantsEnableChart()) {
|
||||
logger.info("Charting enabled");
|
||||
NBCLI.logger.info("Charting enabled");
|
||||
scenario.enableCharting();
|
||||
} else {
|
||||
logger.info("Charting disabled");
|
||||
}
|
||||
} else NBCLI.logger.info("Charting disabled");
|
||||
|
||||
|
||||
// Execute Scenario!
|
||||
if (options.getCommands().size() == 0) {
|
||||
logger.info("No commands provided. Exiting before scenario.");
|
||||
return EXIT_OK;
|
||||
if (0 == options.getCommands().size()) {
|
||||
NBCLI.logger.info("No commands provided. Exiting before scenario.");
|
||||
return NBCLI.EXIT_OK;
|
||||
}
|
||||
|
||||
scenario.addScriptText(scriptData);
|
||||
ScriptParams scriptParams = new ScriptParams();
|
||||
final ScriptParams scriptParams = new ScriptParams();
|
||||
scriptParams.putAll(buffer.getCombinedParams());
|
||||
scenario.addScenarioScriptParams(scriptParams);
|
||||
|
||||
scenariosExecutor.execute(scenario);
|
||||
ScenariosResults scenariosResults = scenariosExecutor.awaitAllResults();
|
||||
logger.debug(() -> "Total of " + scenariosResults.getSize() + " result object returned from ScenariosExecutor");
|
||||
final ScenariosResults scenariosResults = scenariosExecutor.awaitAllResults();
|
||||
NBCLI.logger.debug(() -> "Total of " + scenariosResults.getSize() + " result object returned from ScenariosExecutor");
|
||||
|
||||
ActivityMetrics.closeMetrics(options.wantsEnableChart());
|
||||
scenariosResults.reportToLog();
|
||||
ShutdownManager.shutdown();
|
||||
|
||||
logger.info(scenariosResults.getExecutionSummary());
|
||||
NBCLI.logger.info(scenariosResults.getExecutionSummary());
|
||||
|
||||
if (scenariosResults.hasError()) {
|
||||
Exception exception = scenariosResults.getOne().getException();
|
||||
logger.warn(scenariosResults.getExecutionSummary());
|
||||
final Exception exception = scenariosResults.getOne().getException();
|
||||
NBCLI.logger.warn(scenariosResults.getExecutionSummary());
|
||||
NBCLIErrorHandler.handle(exception, options.wantsStackTraces());
|
||||
System.err.println(exception.getMessage()); // TODO: make this consistent with ConsoleLogging sequencing
|
||||
return EXIT_ERROR;
|
||||
} else {
|
||||
logger.info(scenariosResults.getExecutionSummary());
|
||||
return EXIT_OK;
|
||||
return NBCLI.EXIT_ERROR;
|
||||
}
|
||||
NBCLI.logger.info(scenariosResults.getExecutionSummary());
|
||||
return NBCLI.EXIT_OK;
|
||||
|
||||
}
|
||||
|
||||
private String loadHelpFile(String filename) {
|
||||
ClassLoader cl = getClass().getClassLoader();
|
||||
InputStream resourceAsStream = cl.getResourceAsStream(filename);
|
||||
if (resourceAsStream == null) {
|
||||
throw new RuntimeException("Unable to find " + filename + " in classpath.");
|
||||
}
|
||||
private String loadHelpFile(final String filename) {
|
||||
final ClassLoader cl = this.getClass().getClassLoader();
|
||||
final InputStream resourceAsStream = cl.getResourceAsStream(filename);
|
||||
if (null == resourceAsStream) throw new RuntimeException("Unable to find " + filename + " in classpath.");
|
||||
String basicHelp;
|
||||
try (BufferedReader buffer = new BufferedReader(new InputStreamReader(resourceAsStream))) {
|
||||
try (final BufferedReader buffer = new BufferedReader(new InputStreamReader(resourceAsStream, StandardCharsets.UTF_8))) {
|
||||
basicHelp = buffer.lines().collect(Collectors.joining("\n"));
|
||||
} catch (Throwable t) {
|
||||
} catch (final Throwable t) {
|
||||
throw new RuntimeException("Unable to buffer " + filename + ": " + t);
|
||||
}
|
||||
basicHelp = basicHelp.replaceAll("PROG", commandName);
|
||||
basicHelp = basicHelp.replaceAll("PROG", this.commandName);
|
||||
return basicHelp;
|
||||
|
||||
}
|
||||
|
||||
private String getMetricsHelpFor(String activityType) {
|
||||
String metrics = MetricsMapper.metricsDetail(activityType);
|
||||
private String getMetricsHelpFor(final String activityType) {
|
||||
final String metrics = MetricsMapper.metricsDetail(activityType);
|
||||
return metrics;
|
||||
}
|
||||
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -14,7 +14,8 @@
|
||||
~ limitations under the License.
|
||||
-->
|
||||
|
||||
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
|
||||
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
||||
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
|
||||
<modelVersion>4.0.0</modelVersion>
|
||||
|
||||
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright (c) 2022 nosqlbench
|
||||
* Copyright (c) 2022-2023 nosqlbench
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -17,8 +17,10 @@
|
||||
package io.nosqlbench.engine.core.lifecycle;
|
||||
|
||||
import com.codahale.metrics.*;
|
||||
import com.codahale.metrics.ConsoleReporter.Builder;
|
||||
import io.nosqlbench.api.engine.metrics.ActivityMetrics;
|
||||
import io.nosqlbench.engine.core.logging.Log4JMetricsReporter;
|
||||
import io.nosqlbench.api.engine.metrics.reporters.Log4JMetricsReporter;
|
||||
import io.nosqlbench.api.engine.metrics.reporters.Log4JMetricsReporter.LoggingLevel;
|
||||
import io.nosqlbench.engine.core.metrics.NBMetricsSummary;
|
||||
|
||||
import java.io.ByteArrayOutputStream;
|
||||
@@ -48,74 +50,66 @@ public class ExecutionMetricsResult extends ExecutionResult {
|
||||
MetricAttribute.M15_RATE
|
||||
);
|
||||
|
||||
public ExecutionMetricsResult(long startedAt, long endedAt, String iolog, Exception error) {
|
||||
public ExecutionMetricsResult(final long startedAt, final long endedAt, final String iolog, final Exception error) {
|
||||
super(startedAt, endedAt, iolog, error);
|
||||
}
|
||||
|
||||
public String getMetricsSummary() {
|
||||
ByteArrayOutputStream os = new ByteArrayOutputStream();
|
||||
try (PrintStream ps = new PrintStream(os)) {
|
||||
ConsoleReporter.Builder builder = ConsoleReporter.forRegistry(ActivityMetrics.getMetricRegistry())
|
||||
final ByteArrayOutputStream os = new ByteArrayOutputStream();
|
||||
try (final PrintStream ps = new PrintStream(os)) {
|
||||
final Builder builder = ConsoleReporter.forRegistry(ActivityMetrics.getMetricRegistry())
|
||||
.convertDurationsTo(TimeUnit.MICROSECONDS)
|
||||
.convertRatesTo(TimeUnit.SECONDS)
|
||||
.filter(MetricFilter.ALL)
|
||||
.outputTo(ps);
|
||||
Set<MetricAttribute> disabled = new HashSet<>(INTERVAL_ONLY_METRICS);
|
||||
if (this.getElapsedMillis()<60000) {
|
||||
disabled.addAll(OVER_ONE_MINUTE_METRICS);
|
||||
}
|
||||
final Set<MetricAttribute> disabled = new HashSet<>(ExecutionMetricsResult.INTERVAL_ONLY_METRICS);
|
||||
if (60000 > this.getElapsedMillis()) disabled.addAll(ExecutionMetricsResult.OVER_ONE_MINUTE_METRICS);
|
||||
builder.disabledMetricAttributes(disabled);
|
||||
ConsoleReporter consoleReporter = builder.build();
|
||||
final ConsoleReporter consoleReporter = builder.build();
|
||||
consoleReporter.report();
|
||||
consoleReporter.close();
|
||||
}
|
||||
String result = os.toString(StandardCharsets.UTF_8);
|
||||
final String result = os.toString(StandardCharsets.UTF_8);
|
||||
return result;
|
||||
}
|
||||
|
||||
public void reportToConsole() {
|
||||
String summaryReport = getMetricsSummary();
|
||||
final String summaryReport = this.getMetricsSummary();
|
||||
System.out.println(summaryReport);
|
||||
}
|
||||
|
||||
|
||||
public void reportMetricsSummaryTo(PrintStream out) {
|
||||
out.println(getMetricsSummary());
|
||||
public void reportMetricsSummaryTo(final PrintStream out) {
|
||||
out.println(this.getMetricsSummary());
|
||||
}
|
||||
|
||||
public void reportMetricsSummaryToLog() {
|
||||
logger.debug("-- WARNING: Metrics which are taken per-interval (like histograms) will not have --");
|
||||
logger.debug("-- active data on this last report. (The workload has already stopped.) Record --");
|
||||
logger.debug("-- metrics to an external format to see values for each reporting interval. --");
|
||||
logger.debug("-- BEGIN METRICS DETAIL --");
|
||||
Log4JMetricsReporter reporter = Log4JMetricsReporter.forRegistry(ActivityMetrics.getMetricRegistry())
|
||||
.withLoggingLevel(Log4JMetricsReporter.LoggingLevel.DEBUG)
|
||||
ExecutionResult.logger.debug("-- WARNING: Metrics which are taken per-interval (like histograms) will not have --");
|
||||
ExecutionResult.logger.debug("-- active data on this last report. (The workload has already stopped.) Record --");
|
||||
ExecutionResult.logger.debug("-- metrics to an external format to see values for each reporting interval. --");
|
||||
ExecutionResult.logger.debug("-- BEGIN METRICS DETAIL --");
|
||||
final Log4JMetricsReporter reporter = Log4JMetricsReporter.forRegistry(ActivityMetrics.getMetricRegistry())
|
||||
.withLoggingLevel(LoggingLevel.DEBUG)
|
||||
.convertDurationsTo(TimeUnit.MICROSECONDS)
|
||||
.convertRatesTo(TimeUnit.SECONDS)
|
||||
.filter(MetricFilter.ALL)
|
||||
.outputTo(logger)
|
||||
.outputTo(ExecutionResult.logger)
|
||||
.build();
|
||||
reporter.report();
|
||||
reporter.close();
|
||||
logger.debug("-- END METRICS DETAIL --");
|
||||
ExecutionResult.logger.debug("-- END METRICS DETAIL --");
|
||||
}
|
||||
|
||||
public void reportMetricsCountsTo(PrintStream printStream) {
|
||||
StringBuilder sb = new StringBuilder();
|
||||
public void reportMetricsCountsTo(final PrintStream printStream) {
|
||||
final StringBuilder sb = new StringBuilder();
|
||||
|
||||
ActivityMetrics.getMetricRegistry().getMetrics().forEach((k, v) -> {
|
||||
if (v instanceof Counting counting) {
|
||||
long count = counting.getCount();
|
||||
if (count > 0) {
|
||||
NBMetricsSummary.summarize(sb, k, v);
|
||||
}
|
||||
final long count = counting.getCount();
|
||||
if (0 < count) NBMetricsSummary.summarize(sb, k, v);
|
||||
} else if (v instanceof Gauge<?> gauge) {
|
||||
Object value = gauge.getValue();
|
||||
if (value instanceof Number n) {
|
||||
if (n.doubleValue() != 0) {
|
||||
NBMetricsSummary.summarize(sb, k, v);
|
||||
}
|
||||
}
|
||||
final Object value = gauge.getValue();
|
||||
if (value instanceof Number n) if (0 != n.doubleValue()) NBMetricsSummary.summarize(sb, k, v);
|
||||
}
|
||||
});
|
||||
|
||||
|
||||
@@ -16,6 +16,7 @@
|
||||
|
||||
package io.nosqlbench.engine.core.lifecycle.activity;
|
||||
|
||||
import io.nosqlbench.api.config.NBLabeledElement;
|
||||
import io.nosqlbench.api.engine.activityimpl.ActivityDef;
|
||||
import io.nosqlbench.engine.api.activityapi.core.Activity;
|
||||
import io.nosqlbench.engine.api.activityimpl.uniform.StandardActivityType;
|
||||
@@ -32,23 +33,23 @@ import java.util.concurrent.ConcurrentHashMap;
|
||||
* see each other by name.
|
||||
*/
|
||||
public class ActivityLoader {
|
||||
private final static Logger logger = LogManager.getLogger("ACTIVITIES");
|
||||
private static final Logger logger = LogManager.getLogger("ACTIVITIES");
|
||||
private final Map<String, Activity> activityMap = new ConcurrentHashMap<>();
|
||||
private final Scenario scenario;
|
||||
|
||||
public ActivityLoader(Scenario scenario) {
|
||||
public ActivityLoader(final Scenario scenario) {
|
||||
this.scenario = scenario;
|
||||
}
|
||||
|
||||
public synchronized Activity loadActivity(ActivityDef activityDef) {
|
||||
public synchronized Activity loadActivity(ActivityDef activityDef, final NBLabeledElement labels) {
|
||||
activityDef= activityDef.deprecate("yaml","workload").deprecate("type","driver");
|
||||
Activity activity = new StandardActivityType(activityDef).getAssembledActivity(activityDef, activityMap);
|
||||
activityMap.put(activity.getAlias(),activity);
|
||||
logger.debug("Resolved activity for alias '" + activityDef.getAlias() + "'");
|
||||
final Activity activity = new StandardActivityType(activityDef, labels).getAssembledActivity(activityDef, this.activityMap, labels);
|
||||
this.activityMap.put(activity.getAlias(),activity);
|
||||
ActivityLoader.logger.debug("Resolved activity for alias '{}'", activityDef.getAlias());
|
||||
return activity;
|
||||
}
|
||||
|
||||
public void purgeActivity(String activityAlias) {
|
||||
this.activityMap.remove(activityAlias);
|
||||
public void purgeActivity(final String activityAlias) {
|
||||
activityMap.remove(activityAlias);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -16,6 +16,7 @@
|
||||
|
||||
package io.nosqlbench.engine.core.lifecycle.activity;
|
||||
|
||||
import io.nosqlbench.api.config.NBLabeledElement;
|
||||
import io.nosqlbench.engine.api.activityapi.core.ActivityType;
|
||||
import io.nosqlbench.api.engine.activityimpl.ActivityDef;
|
||||
import io.nosqlbench.engine.api.activityimpl.uniform.DriverAdapter;
|
||||
@@ -44,76 +45,71 @@ public class ActivityTypeLoader {
|
||||
private final SimpleServiceLoader<DriverAdapter> DRIVERADAPTER_SPI_FINDER = new SimpleServiceLoader<>(DriverAdapter.class, Maturity.Any);
|
||||
private final Set<URL> jarUrls = new HashSet<>();
|
||||
|
||||
public ActivityTypeLoader setMaturity(Maturity maturity) {
|
||||
ACTIVITYTYPE_SPI_FINDER.setMaturity(maturity);
|
||||
public ActivityTypeLoader setMaturity(final Maturity maturity) {
|
||||
this.ACTIVITYTYPE_SPI_FINDER.setMaturity(maturity);
|
||||
return this;
|
||||
}
|
||||
|
||||
public ActivityTypeLoader() {
|
||||
|
||||
List<String> libpaths = NBEnvironment.INSTANCE.interpolateEach(":", "$" + NBEnvironment.NBLIBS);
|
||||
final List<String> libpaths = NBEnvironment.INSTANCE.interpolateEach(":", '$' + NBEnvironment.NBLIBS);
|
||||
Set<URL> urlsToAdd = new HashSet<>();
|
||||
|
||||
for (String libpaths_entry : libpaths) {
|
||||
Path libpath = Path.of(libpaths_entry);
|
||||
if (Files.isDirectory(libpath)) {
|
||||
urlsToAdd = addLibDir(urlsToAdd, libpath);
|
||||
} else if (Files.isRegularFile(libpath) && libpath.toString().toLowerCase().endsWith(".zip")) {
|
||||
urlsToAdd = addZipDir(urlsToAdd, libpath);
|
||||
} else if (Files.isRegularFile(libpath) && libpath.toString().toLowerCase().endsWith(".jar")) {
|
||||
urlsToAdd = addJarFile(urlsToAdd, libpath);
|
||||
}
|
||||
for (final String libpaths_entry : libpaths) {
|
||||
final Path libpath = Path.of(libpaths_entry);
|
||||
if (Files.isDirectory(libpath)) urlsToAdd = this.addLibDir(urlsToAdd, libpath);
|
||||
else if (Files.isRegularFile(libpath) && libpath.toString().toLowerCase().endsWith(".zip"))
|
||||
urlsToAdd = this.addZipDir(urlsToAdd, libpath);
|
||||
else if (Files.isRegularFile(libpath) && libpath.toString().toLowerCase().endsWith(".jar"))
|
||||
urlsToAdd = this.addJarFile(urlsToAdd, libpath);
|
||||
}
|
||||
extendClassLoader(urlsToAdd);
|
||||
this.extendClassLoader(urlsToAdd);
|
||||
}
|
||||
|
||||
private synchronized void extendClassLoader(String... paths) {
|
||||
Set<URL> urls = new HashSet<>();
|
||||
for (String path : paths) {
|
||||
private synchronized void extendClassLoader(final String... paths) {
|
||||
final Set<URL> urls = new HashSet<>();
|
||||
for (final String path : paths) {
|
||||
URL url = null;
|
||||
try {
|
||||
url = new URL(path);
|
||||
} catch (MalformedURLException e) {
|
||||
} catch (final MalformedURLException e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
urls.add(url);
|
||||
}
|
||||
extendClassLoader(urls);
|
||||
this.extendClassLoader(urls);
|
||||
}
|
||||
|
||||
private synchronized void extendClassLoader(Set<URL> urls) {
|
||||
Set<URL> newUrls = new HashSet<>();
|
||||
if (!jarUrls.containsAll(urls)) {
|
||||
for (URL url : urls) {
|
||||
if (!jarUrls.contains(url)) {
|
||||
private synchronized void extendClassLoader(final Set<URL> urls) {
|
||||
final Set<URL> newUrls = new HashSet<>();
|
||||
if (!this.jarUrls.containsAll(urls)) {
|
||||
for (final URL url : urls)
|
||||
if (!this.jarUrls.contains(url)) {
|
||||
newUrls.add(url);
|
||||
jarUrls.add(url);
|
||||
this.jarUrls.add(url);
|
||||
}
|
||||
}
|
||||
URL[] newUrlAry = newUrls.toArray(new URL[]{});
|
||||
URLClassLoader ucl = URLClassLoader.newInstance(newUrlAry, Thread.currentThread().getContextClassLoader());
|
||||
final URL[] newUrlAry = newUrls.toArray(new URL[]{});
|
||||
final URLClassLoader ucl = URLClassLoader.newInstance(newUrlAry, Thread.currentThread().getContextClassLoader());
|
||||
Thread.currentThread().setContextClassLoader(ucl);
|
||||
logger.debug("Extended class loader layering with " + newUrls);
|
||||
} else {
|
||||
logger.debug("All URLs specified were already in a class loader.");
|
||||
}
|
||||
ActivityTypeLoader.logger.debug("Extended class loader layering with {}", newUrls);
|
||||
} else ActivityTypeLoader.logger.debug("All URLs specified were already in a class loader.");
|
||||
}
|
||||
|
||||
private Set<URL> addJarFile(Set<URL> urls, Path libpath) {
|
||||
private Set<URL> addJarFile(final Set<URL> urls, final Path libpath) {
|
||||
try {
|
||||
urls.add(libpath.toUri().toURL());
|
||||
} catch (MalformedURLException e) {
|
||||
} catch (final MalformedURLException e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
return urls;
|
||||
}
|
||||
|
||||
private Set<URL> addZipDir(Set<URL> urlsToAdd, Path libpath) {
|
||||
private Set<URL> addZipDir(final Set<URL> urlsToAdd, final Path libpath) {
|
||||
return urlsToAdd;
|
||||
}
|
||||
|
||||
private Set<URL> addLibDir(Set<URL> urlsToAdd, Path libpath) {
|
||||
Set<URL> urls = NBIO.local()
|
||||
private Set<URL> addLibDir(final Set<URL> urlsToAdd, final Path libpath) {
|
||||
final Set<URL> urls = NBIO.local()
|
||||
.searchPrefixes(libpath.toString())
|
||||
.extensionSet(".jar")
|
||||
.list().stream().map(Content::getURL)
|
||||
@@ -122,16 +118,16 @@ public class ActivityTypeLoader {
|
||||
return urlsToAdd;
|
||||
}
|
||||
|
||||
public Optional<ActivityType> load(ActivityDef activityDef) {
|
||||
public Optional<ActivityType> load(final ActivityDef activityDef, final NBLabeledElement labels) {
|
||||
|
||||
final String driverName = activityDef.getParams()
|
||||
String driverName = activityDef.getParams()
|
||||
.getOptionalString("driver", "type")
|
||||
.orElseThrow(() -> new BasicError("The parameter 'driver=' is required."));
|
||||
|
||||
activityDef.getParams()
|
||||
.getOptionalString("jar")
|
||||
.map(jar -> {
|
||||
Set<URL> urls = NBIO.local().search(jar)
|
||||
final Set<URL> urls = NBIO.local().search(jar)
|
||||
.list()
|
||||
.stream().map(Content::getURL)
|
||||
.collect(Collectors.toSet());
|
||||
@@ -139,28 +135,27 @@ public class ActivityTypeLoader {
|
||||
})
|
||||
.ifPresent(this::extendClassLoader);
|
||||
|
||||
return this.getDriverAdapter(driverName,activityDef)
|
||||
.or(() -> ACTIVITYTYPE_SPI_FINDER.getOptionally(driverName));
|
||||
return getDriverAdapter(driverName,activityDef,labels)
|
||||
.or(() -> this.ACTIVITYTYPE_SPI_FINDER.getOptionally(driverName));
|
||||
|
||||
}
|
||||
|
||||
private Optional<ActivityType> getDriverAdapter(String activityTypeName, ActivityDef activityDef) {
|
||||
Optional<DriverAdapter> oda = DRIVERADAPTER_SPI_FINDER.getOptionally(activityTypeName);
|
||||
private Optional<ActivityType> getDriverAdapter(final String activityTypeName, final ActivityDef activityDef, final NBLabeledElement labels) {
|
||||
final Optional<DriverAdapter> oda = this.DRIVERADAPTER_SPI_FINDER.getOptionally(activityTypeName);
|
||||
|
||||
if (oda.isPresent()) {
|
||||
DriverAdapter<?, ?> driverAdapter = oda.get();
|
||||
final DriverAdapter<?, ?> driverAdapter = oda.get();
|
||||
|
||||
ActivityType activityType = new StandardActivityType<>(driverAdapter, activityDef);
|
||||
final ActivityType activityType = new StandardActivityType<>(driverAdapter, activityDef, labels);
|
||||
return Optional.of(activityType);
|
||||
} else {
|
||||
return Optional.empty();
|
||||
}
|
||||
return Optional.empty();
|
||||
}
|
||||
|
||||
public Set<String> getAllSelectors() {
|
||||
Map<String, Maturity> allSelectors = ACTIVITYTYPE_SPI_FINDER.getAllSelectors();
|
||||
Map<String, Maturity> addAdapters = DRIVERADAPTER_SPI_FINDER.getAllSelectors();
|
||||
Set<String> all = new LinkedHashSet<>();
|
||||
final Map<String, Maturity> allSelectors = this.ACTIVITYTYPE_SPI_FINDER.getAllSelectors();
|
||||
final Map<String, Maturity> addAdapters = this.DRIVERADAPTER_SPI_FINDER.getAllSelectors();
|
||||
final Set<String> all = new LinkedHashSet<>();
|
||||
all.addAll(allSelectors.keySet());
|
||||
all.addAll(addAdapters.keySet());
|
||||
return all;
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user