mirror of
https://github.com/nosqlbench/nosqlbench.git
synced 2025-02-25 18:55:28 -06:00
Merge a18311b4ee
into 8cc5bd77da
This commit is contained in:
commit
2012a15e31
@ -19,6 +19,7 @@ package io.nosqlbench.adapter.amqp;
|
||||
import io.nosqlbench.adapter.amqp.dispensers.AmqpMsgRecvOpDispenser;
|
||||
import io.nosqlbench.adapter.amqp.dispensers.AmqpMsgSendOpDispenser;
|
||||
import io.nosqlbench.adapter.amqp.ops.AmqpTimeTrackOp;
|
||||
import io.nosqlbench.adapters.api.activityimpl.uniform.Space;
|
||||
import io.nosqlbench.nb.api.components.core.NBComponent;
|
||||
import io.nosqlbench.nb.api.config.standard.NBConfiguration;
|
||||
import io.nosqlbench.adapters.api.activityimpl.OpDispenser;
|
||||
@ -43,7 +44,7 @@ public class AmqpOpMapper implements OpMapper<AmqpTimeTrackOp,AmqpSpace> {
|
||||
}
|
||||
|
||||
@Override
|
||||
public OpDispenser<AmqpTimeTrackOp> apply(NBComponent adapterC, ParsedOp op, LongFunction spaceF) {
|
||||
public OpDispenser<AmqpTimeTrackOp> apply(NBComponent adapterC, ParsedOp op, LongFunction<AmqpSpace> spaceF) {
|
||||
//public OpDispenser<AmqpTimeTrackOp> apply(ParsedOp op, LongFunction<AmqpTimeTrackOp> spaceInitF) {
|
||||
int spaceName = op.getStaticConfigOr("space", 0);
|
||||
|
||||
@ -66,6 +67,4 @@ public class AmqpOpMapper implements OpMapper<AmqpTimeTrackOp,AmqpSpace> {
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
|
@ -16,6 +16,7 @@
|
||||
|
||||
package io.nosqlbench.adapter.azureaisearch;
|
||||
|
||||
import io.nosqlbench.adapters.api.activityimpl.uniform.Space;
|
||||
import io.nosqlbench.nb.api.components.core.NBComponent;
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
|
@ -18,8 +18,6 @@ package io.nosqlbench.adapter.cqld4;
|
||||
|
||||
import io.nosqlbench.adapter.cqld4.opmappers.Cqld4CoreOpMapper;
|
||||
import io.nosqlbench.adapter.cqld4.optypes.Cqld4BaseOp;
|
||||
import io.nosqlbench.adapters.api.activityimpl.uniform.Validator;
|
||||
import io.nosqlbench.adapters.api.templating.ParsedOp;
|
||||
import io.nosqlbench.nb.api.config.standard.NBConfigModel;
|
||||
import io.nosqlbench.nb.api.config.standard.NBConfiguration;
|
||||
import io.nosqlbench.adapters.api.activityimpl.OpMapper;
|
||||
|
@ -18,12 +18,26 @@ package io.nosqlbench.adapter.cqld4.opdispensers;
|
||||
*/
|
||||
|
||||
|
||||
import com.datastax.oss.driver.api.core.cql.ColumnDefinition;
|
||||
import com.datastax.oss.driver.api.core.cql.ColumnDefinitions;
|
||||
import com.datastax.oss.driver.api.core.cql.Row;
|
||||
import io.nosqlbench.adapter.cqld4.Cqld4DriverAdapter;
|
||||
import io.nosqlbench.adapter.cqld4.optypes.Cqld4BaseOp;
|
||||
import io.nosqlbench.adapter.cqld4.optypes.Cqld4CqlOp;
|
||||
import io.nosqlbench.adapters.api.templating.ParsedOp;
|
||||
import io.nosqlbench.virtdata.core.templates.CapturePoint;
|
||||
import io.nosqlbench.virtdata.core.templates.CapturePointException;
|
||||
import io.nosqlbench.virtdata.core.templates.CapturePoints;
|
||||
import io.nosqlbench.virtdata.core.templates.UniformVariableCapture;
|
||||
|
||||
public abstract class Cqld4CqlBaseOpDispenser<T extends Cqld4CqlOp> extends Cqld4BaseOpDispenser<T> {
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.function.Function;
|
||||
|
||||
public abstract class Cqld4CqlBaseOpDispenser<T extends Cqld4CqlOp> extends Cqld4BaseOpDispenser<T>
|
||||
implements UniformVariableCapture<List<Row>>
|
||||
{
|
||||
|
||||
public Cqld4CqlBaseOpDispenser(Cqld4DriverAdapter adapter, ParsedOp op) {
|
||||
super(adapter, op);
|
||||
@ -31,4 +45,35 @@ public abstract class Cqld4CqlBaseOpDispenser<T extends Cqld4CqlOp> extends Cqld
|
||||
|
||||
@Override
|
||||
public abstract T getOp(long cycle);
|
||||
|
||||
@Override
|
||||
public Function<List<Row>, Map<String, ?>> captureF(CapturePoints<List<Row>> points) {
|
||||
Function<List<Row>, Map<String, ?>> f = (List<Row> result) -> {
|
||||
if (result.size() != 1) {
|
||||
throw new CapturePointException(
|
||||
"result contained " + result.size() + " rows, required exactly 1");
|
||||
}
|
||||
Row row = result.get(0);
|
||||
ColumnDefinitions coldefs = row.getColumnDefinitions();
|
||||
Map<String, Object> values = new HashMap<>(coldefs.size());
|
||||
|
||||
if (points.isGlob()) {
|
||||
for (ColumnDefinition coldef : coldefs) {
|
||||
String colname = coldef.getName().toString();
|
||||
values.put(colname, row.getObject(colname));
|
||||
}
|
||||
} else {
|
||||
for (CapturePoint<List<Row>> point : points) {
|
||||
String sourceName = point.getSourceName();
|
||||
Object value = row.getObject(point.getSourceName());
|
||||
Object recast = point.getAsCast().cast(value);
|
||||
values.put(point.getAsName(), recast);
|
||||
}
|
||||
}
|
||||
|
||||
return values;
|
||||
};
|
||||
return f;
|
||||
}
|
||||
|
||||
}
|
||||
|
@ -29,7 +29,7 @@ import io.nosqlbench.adapters.api.templating.ParsedOp;
|
||||
import java.util.Optional;
|
||||
import java.util.function.LongFunction;
|
||||
|
||||
public class Cqld4GremlinOpDispenser extends BaseOpDispenser<Cqld4BaseOp<?>, Cqld4Space> {
|
||||
public class Cqld4GremlinOpDispenser extends BaseOpDispenser<Cqld4ScriptGraphOp, Cqld4Space> {
|
||||
|
||||
private final LongFunction<? extends ScriptGraphStatement> stmtFunc;
|
||||
private final LongFunction<CqlSession> sessionFunc;
|
||||
|
@ -17,23 +17,27 @@
|
||||
package io.nosqlbench.adapter.cqld4.opdispensers;
|
||||
|
||||
import com.datastax.oss.driver.api.core.CqlSession;
|
||||
import com.datastax.oss.driver.api.core.cql.BoundStatement;
|
||||
import com.datastax.oss.driver.api.core.cql.PreparedStatement;
|
||||
import com.datastax.oss.driver.api.core.cql.Statement;
|
||||
import com.datastax.oss.driver.api.core.cql.*;
|
||||
import io.nosqlbench.adapter.cqld4.Cqld4DriverAdapter;
|
||||
import io.nosqlbench.adapter.cqld4.Cqld4Space;
|
||||
import io.nosqlbench.adapter.cqld4.RSProcessors;
|
||||
import io.nosqlbench.adapter.cqld4.diagnostics.CQLD4PreparedStmtDiagnostics;
|
||||
import io.nosqlbench.adapter.cqld4.optypes.Cqld4CqlPreparedStatement;
|
||||
import io.nosqlbench.adapters.api.activityimpl.uniform.FieldBindingsMetadata;
|
||||
import io.nosqlbench.adapters.api.templating.ParsedOp;
|
||||
import io.nosqlbench.nb.api.components.core.NBNamedElement;
|
||||
import io.nosqlbench.nb.api.errors.OpConfigError;
|
||||
import io.nosqlbench.virtdata.core.templates.BindPoint;
|
||||
import io.nosqlbench.virtdata.core.templates.ParsedTemplateString;
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
|
||||
import java.util.LinkedHashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.function.LongFunction;
|
||||
|
||||
public class Cqld4PreparedStmtDispenser extends Cqld4BaseOpDispenser<Cqld4CqlPreparedStatement> {
|
||||
public class Cqld4PreparedStmtDispenser extends Cqld4BaseOpDispenser<Cqld4CqlPreparedStatement> implements FieldBindingsMetadata {
|
||||
private final static Logger logger = LogManager.getLogger(Cqld4PreparedStmtDispenser.class);
|
||||
|
||||
private final RSProcessors processors;
|
||||
@ -41,6 +45,7 @@ public class Cqld4PreparedStmtDispenser extends Cqld4BaseOpDispenser<Cqld4CqlPre
|
||||
private final ParsedTemplateString stmtTpl;
|
||||
private final LongFunction<Object[]> fieldsF;
|
||||
private final LongFunction<Cqld4Space> spaceInitF;
|
||||
private final LongFunction<PreparedStatement> cachedStatementF;
|
||||
private PreparedStatement preparedStmt;
|
||||
// This is a stable enum for the op template from the workload, bounded by cardinality of all op templates
|
||||
private int refkey;
|
||||
@ -57,7 +62,8 @@ public class Cqld4PreparedStmtDispenser extends Cqld4BaseOpDispenser<Cqld4CqlPre
|
||||
this.stmtTpl = stmtTpl;
|
||||
this.fieldsF = getFieldsFunction(op);
|
||||
this.spaceInitF = spaceInitF;
|
||||
stmtFunc = createStmtFunc(fieldsF, op);
|
||||
this.cachedStatementF = getCachedStatementF(fieldsF, op);
|
||||
stmtFunc = createStmtFunc(fieldsF,cachedStatementF, op);
|
||||
}
|
||||
|
||||
private LongFunction<Object[]> getFieldsFunction(ParsedOp op) {
|
||||
@ -68,8 +74,7 @@ public class Cqld4PreparedStmtDispenser extends Cqld4BaseOpDispenser<Cqld4CqlPre
|
||||
}
|
||||
|
||||
|
||||
protected LongFunction<Statement> createStmtFunc(LongFunction<Object[]> fieldsF, ParsedOp op) {
|
||||
|
||||
protected LongFunction<PreparedStatement> getCachedStatementF(LongFunction<Object[]> fieldsF, ParsedOp op) {
|
||||
try {
|
||||
String preparedQueryString = stmtTpl.getPositionalStatement(s -> "?");
|
||||
|
||||
@ -80,9 +85,21 @@ public class Cqld4PreparedStmtDispenser extends Cqld4BaseOpDispenser<Cqld4CqlPre
|
||||
(long l) -> spaceInitF.apply(l);
|
||||
|
||||
int refKey = op.getRefKey();
|
||||
LongFunction<PreparedStatement> cachedStatementF =
|
||||
(long l) -> lookupSpaceF.apply(l).getOrCreatePreparedStatement(refKey,prepareStatementF);
|
||||
LongFunction<PreparedStatement> cStmtF = (long l) -> lookupSpaceF.apply(
|
||||
l).getOrCreatePreparedStatement(refKey, prepareStatementF);
|
||||
|
||||
return cStmtF;
|
||||
} catch (Exception e) {
|
||||
throw new OpConfigError(e + "( for statement '" + stmtTpl + "')");
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
protected LongFunction<Statement> createStmtFunc(LongFunction<Object[]> fieldsF,
|
||||
LongFunction<PreparedStatement> cachedStatementF,
|
||||
ParsedOp op) {
|
||||
|
||||
try {
|
||||
LongFunction<Statement> boundStatementF =
|
||||
(long l) -> cachedStatementF.apply(l).bind(fieldsF.apply(l));
|
||||
|
||||
@ -94,6 +111,26 @@ public class Cqld4PreparedStmtDispenser extends Cqld4BaseOpDispenser<Cqld4CqlPre
|
||||
|
||||
}
|
||||
|
||||
@Override
|
||||
public Map<String,BindPoint> getFieldBindingsMap() {
|
||||
PreparedStatement ps = this.cachedStatementF.apply(0);
|
||||
|
||||
ColumnDefinitions cdefs = ps.getVariableDefinitions();
|
||||
List<BindPoint> bdefs = stmtTpl.getBindPoints();
|
||||
|
||||
if (cdefs.size()!=bdefs.size()){
|
||||
throw new OpConfigError("The number of column defs does not match the number of " +
|
||||
"bindings specified for " + this.getOpName());
|
||||
}
|
||||
|
||||
Map<String,BindPoint> fbmap = new LinkedHashMap<>(cdefs.size());
|
||||
for (int i = 0; i < cdefs.size(); i++) {
|
||||
ColumnDefinition cdef = cdefs.get(i);
|
||||
fbmap.put(cdefs.get(i).getName().asCql(true),bdefs.get(i));
|
||||
}
|
||||
return fbmap;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Cqld4CqlPreparedStatement getOp(long cycle) {
|
||||
BoundStatement stmt = (BoundStatement) stmtFunc.apply(cycle);
|
||||
|
@ -23,6 +23,7 @@ import io.nosqlbench.adapter.cqld4.opdispensers.CqlD4BatchStmtDispenser;
|
||||
import io.nosqlbench.adapter.cqld4.optypes.Cqld4CqlBatchStatement;
|
||||
import io.nosqlbench.adapter.cqld4.optypes.Cqld4CqlOp;
|
||||
import io.nosqlbench.adapters.api.activityimpl.OpDispenser;
|
||||
import io.nosqlbench.adapters.api.activityimpl.uniform.Space;
|
||||
import io.nosqlbench.adapters.api.templating.ParsedOp;
|
||||
import io.nosqlbench.engine.api.templating.TypeAndTarget;
|
||||
import io.nosqlbench.nb.api.components.core.NBComponent;
|
||||
|
@ -21,6 +21,7 @@ import io.nosqlbench.adapter.cqld4.Cqld4Space;
|
||||
import io.nosqlbench.adapter.cqld4.opdispensers.Cqld4SimpleCqlStmtDispenser;
|
||||
import io.nosqlbench.adapter.cqld4.optypes.Cqld4CqlSimpleStatement;
|
||||
import io.nosqlbench.adapters.api.activityimpl.OpDispenser;
|
||||
import io.nosqlbench.adapters.api.activityimpl.uniform.Space;
|
||||
import io.nosqlbench.adapters.api.templating.ParsedOp;
|
||||
import io.nosqlbench.nb.api.components.core.NBComponent;
|
||||
|
||||
|
@ -21,6 +21,7 @@ import io.nosqlbench.adapter.cqld4.opdispensers.Cqld4PreparedStmtDispenser;
|
||||
import io.nosqlbench.adapter.cqld4.optypes.Cqld4CqlPreparedStatement;
|
||||
import io.nosqlbench.adapter.cqld4.processors.CqlFieldCaptureProcessor;
|
||||
import io.nosqlbench.adapters.api.activityimpl.OpDispenser;
|
||||
import io.nosqlbench.adapters.api.activityimpl.uniform.Space;
|
||||
import io.nosqlbench.adapters.api.templating.ParsedOp;
|
||||
import io.nosqlbench.engine.api.templating.TypeAndTarget;
|
||||
import io.nosqlbench.nb.api.components.core.NBComponent;
|
||||
|
@ -18,9 +18,11 @@ package io.nosqlbench.adapter.cqld4.opmappers;
|
||||
|
||||
import com.datastax.oss.driver.api.core.CqlSession;
|
||||
import io.nosqlbench.adapter.cqld4.Cqld4DriverAdapter;
|
||||
import io.nosqlbench.adapter.cqld4.Cqld4Space;
|
||||
import io.nosqlbench.adapter.cqld4.optypes.Cqld4RainbowTableOp;
|
||||
import io.nosqlbench.adapters.api.activityimpl.OpDispenser;
|
||||
import io.nosqlbench.adapters.api.activityimpl.uniform.DriverAdapter;
|
||||
import io.nosqlbench.adapters.api.activityimpl.uniform.Space;
|
||||
import io.nosqlbench.adapters.api.templating.ParsedOp;
|
||||
import io.nosqlbench.nb.api.components.core.NBComponent;
|
||||
|
||||
@ -41,7 +43,7 @@ public class CqlD4RainbowTableMapper<CO extends Cqld4RainbowTableOp> extends Cql
|
||||
}
|
||||
|
||||
@Override
|
||||
public OpDispenser<Cqld4RainbowTableOp> apply(NBComponent adapterC, ParsedOp op, LongFunction spaceF) {
|
||||
public OpDispenser<Cqld4RainbowTableOp> apply(NBComponent adapterC, ParsedOp op, LongFunction<Cqld4Space> spaceF) {
|
||||
return null;
|
||||
// return new CqlD4RainbowTableDispenser(adapter, sessionFunc,targetFunction, op);
|
||||
}
|
||||
|
@ -21,6 +21,7 @@ import io.nosqlbench.adapter.cqld4.Cqld4Space;
|
||||
import io.nosqlbench.adapter.cqld4.opdispensers.Cqld4RawStmtDispenser;
|
||||
import io.nosqlbench.adapter.cqld4.optypes.Cqld4CqlSimpleStatement;
|
||||
import io.nosqlbench.adapters.api.activityimpl.OpDispenser;
|
||||
import io.nosqlbench.adapters.api.activityimpl.uniform.Space;
|
||||
import io.nosqlbench.adapters.api.templating.ParsedOp;
|
||||
import io.nosqlbench.nb.api.components.core.NBComponent;
|
||||
|
||||
|
@ -21,6 +21,7 @@ import io.nosqlbench.adapter.cqld4.Cqld4Space;
|
||||
import io.nosqlbench.adapter.cqld4.optypes.Cqld4BaseOp;
|
||||
import io.nosqlbench.adapters.api.activityimpl.OpDispenser;
|
||||
import io.nosqlbench.adapters.api.activityimpl.OpMapper;
|
||||
import io.nosqlbench.adapters.api.activityimpl.uniform.Space;
|
||||
import io.nosqlbench.adapters.api.templating.ParsedOp;
|
||||
import io.nosqlbench.nb.api.components.core.NBComponent;
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
|
@ -19,6 +19,7 @@ package io.nosqlbench.adapter.cqld4.opmappers;
|
||||
import io.nosqlbench.adapter.cqld4.Cqld4DriverAdapter;
|
||||
import io.nosqlbench.adapter.cqld4.Cqld4Space;
|
||||
import io.nosqlbench.adapter.cqld4.optypes.Cqld4BaseOp;
|
||||
import io.nosqlbench.adapters.api.activityimpl.uniform.Space;
|
||||
import io.nosqlbench.nb.api.components.core.NBComponent;
|
||||
import io.nosqlbench.nb.api.config.standard.NBConfiguration;
|
||||
import io.nosqlbench.adapters.api.activityimpl.OpDispenser;
|
||||
|
@ -22,6 +22,7 @@ import io.nosqlbench.adapter.cqld4.Cqld4DriverAdapter;
|
||||
import io.nosqlbench.adapter.cqld4.Cqld4Space;
|
||||
import io.nosqlbench.adapter.cqld4.optypes.Cqld4CqlOp;
|
||||
import io.nosqlbench.adapters.api.activityimpl.OpDispenser;
|
||||
import io.nosqlbench.adapters.api.activityimpl.uniform.Space;
|
||||
import io.nosqlbench.adapters.api.templating.ParsedOp;
|
||||
import io.nosqlbench.nb.api.components.core.NBComponent;
|
||||
|
||||
|
@ -21,6 +21,7 @@ import io.nosqlbench.adapter.cqld4.Cqld4Space;
|
||||
import io.nosqlbench.adapter.cqld4.optypes.Cqld4CqlOp;
|
||||
import io.nosqlbench.adapter.cqld4.optypes.Cqld4CqlSimpleStatement;
|
||||
import io.nosqlbench.adapters.api.activityimpl.OpDispenser;
|
||||
import io.nosqlbench.adapters.api.activityimpl.uniform.Space;
|
||||
import io.nosqlbench.adapters.api.templating.ParsedOp;
|
||||
import io.nosqlbench.engine.api.templating.TypeAndTarget;
|
||||
import io.nosqlbench.nb.api.components.core.NBComponent;
|
||||
|
@ -25,6 +25,7 @@ import io.nosqlbench.adapter.cqld4.Cqld4Space;
|
||||
import io.nosqlbench.adapter.cqld4.opdispensers.Cqld4FluentGraphOpDispenser;
|
||||
import io.nosqlbench.adapter.cqld4.optypes.Cqld4FluentGraphOp;
|
||||
import io.nosqlbench.adapters.api.activityimpl.OpDispenser;
|
||||
import io.nosqlbench.adapters.api.activityimpl.uniform.Space;
|
||||
import io.nosqlbench.adapters.api.templating.ParsedOp;
|
||||
import io.nosqlbench.engine.api.templating.TypeAndTarget;
|
||||
import io.nosqlbench.nb.api.components.core.NBComponent;
|
||||
|
@ -17,8 +17,10 @@
|
||||
package io.nosqlbench.adapter.cqld4.opmappers;
|
||||
|
||||
import io.nosqlbench.adapter.cqld4.Cqld4DriverAdapter;
|
||||
import io.nosqlbench.adapter.cqld4.Cqld4Space;
|
||||
import io.nosqlbench.adapter.cqld4.opdispensers.Cqld4GremlinOpDispenser;
|
||||
import io.nosqlbench.adapter.cqld4.optypes.Cqld4ScriptGraphOp;
|
||||
import io.nosqlbench.adapters.api.activityimpl.OpDispenser;
|
||||
import io.nosqlbench.adapters.api.templating.ParsedOp;
|
||||
import io.nosqlbench.nb.api.components.core.NBComponent;
|
||||
|
||||
@ -33,7 +35,7 @@ public class Cqld4GremlinOpMapper<CO extends Cqld4ScriptGraphOp> extends Cqld4Ba
|
||||
}
|
||||
|
||||
@Override
|
||||
public Cqld4GremlinOpDispenser apply(NBComponent adapterC, ParsedOp op, LongFunction spaceF) {
|
||||
public OpDispenser<Cqld4ScriptGraphOp> apply(NBComponent adapterC, ParsedOp op, LongFunction<Cqld4Space> spaceF) {
|
||||
return new Cqld4GremlinOpDispenser(
|
||||
adapter,
|
||||
l -> adapter.getSpaceFunc(op).apply(l).getSession(), targetFunction, op);
|
||||
|
@ -50,7 +50,7 @@ import java.util.function.Function;
|
||||
|
||||
|
||||
public abstract class Cqld4CqlOp
|
||||
implements Cqld4BaseOp<List<Row>>, UniformVariableCapture<List<Row>>, OpGenerator, OpResultSize {
|
||||
implements Cqld4BaseOp<List<Row>>, OpGenerator, OpResultSize {
|
||||
|
||||
private final static Logger logger = LogManager.getLogger(Cqld4CqlOp.class);
|
||||
|
||||
@ -168,34 +168,6 @@ public abstract class Cqld4CqlOp
|
||||
return next;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Function<List<Row>, Map<String, ?>> initCaptureF(CapturePoints<List<Row>> points) {
|
||||
Function<List<Row>,Map<String,?>> f = (List<Row> result) -> {
|
||||
if (result.size()!=1) {
|
||||
throw new CapturePointException("result contained " + result.size() + " rows, required exactly 1");
|
||||
}
|
||||
Row row = result.get(0);
|
||||
ColumnDefinitions coldefs = row.getColumnDefinitions();
|
||||
Map<String,Object> values = new HashMap<>(coldefs.size());
|
||||
|
||||
if (points.isGlob()) {
|
||||
for (ColumnDefinition coldef : coldefs) {
|
||||
String colname = coldef.getName().toString();
|
||||
values.put(colname,row.getObject(colname));
|
||||
}
|
||||
} else {
|
||||
for (CapturePoint<List<Row>> point : points) {
|
||||
String sourceName = point.getSourceName();
|
||||
Object value = row.getObject(point.getSourceName());
|
||||
Object recast = point.getAsCast().cast(value);
|
||||
values.put(point.getAsName(), recast);
|
||||
}
|
||||
}
|
||||
|
||||
return values;
|
||||
};
|
||||
return f;
|
||||
}
|
||||
|
||||
public abstract Statement<?> getStmt();
|
||||
|
||||
|
@ -246,6 +246,7 @@ public class CGWorkloadExporter implements BundledApp {
|
||||
case "select_seq" -> genSelectOpTemplates(model, blockname);
|
||||
case "scan_10_seq" -> genScanOpTemplates(model, blockname);
|
||||
case "update_seq" -> genUpdateOpTemplates(model, blockname);
|
||||
case "verify_rampup" -> genVerifyRampupTemplates(model,blockname,"main_insert");
|
||||
default -> throw new RuntimeException("Unable to create block entries for " + component + ".");
|
||||
};
|
||||
block.putAll(additions);
|
||||
@ -379,29 +380,74 @@ public class CGWorkloadExporter implements BundledApp {
|
||||
}
|
||||
|
||||
|
||||
|
||||
private Map<String, Object> genSelectOpTemplates(CqlModel model, String blockname) {
|
||||
Map<String, Object> blockdata = new LinkedHashMap<>();
|
||||
Map<String, Object> ops = new LinkedHashMap<>();
|
||||
blockdata.put("ops", ops);
|
||||
for (CqlTable table : model.getTableDefs()) {
|
||||
ops.put(
|
||||
namer.nameFor(table, "optype", "select", "blockname", blockname),
|
||||
Map.of(
|
||||
"prepared", genSelectSyntax(table),
|
||||
"timeout", timeouts.get("select"),
|
||||
"ratio", readRatioFor(table)
|
||||
)
|
||||
namer.nameFor(table, "optype", "select", "blockname", blockname),
|
||||
Map.of(
|
||||
"prepared", genSelectSyntax(table,false),
|
||||
"timeout", timeouts.get("select"),
|
||||
"ratio", readRatioFor(table)
|
||||
)
|
||||
);
|
||||
}
|
||||
return blockdata;
|
||||
}
|
||||
private Map<String, Object> genVerifyRampupTemplates(CqlModel model, String blockname,
|
||||
String refBlockName) {
|
||||
|
||||
private String genSelectSyntax(CqlTable table) {
|
||||
Map<String, Object> blockdata = new LinkedHashMap<>();
|
||||
Map<String, Object> ops = new LinkedHashMap<>();
|
||||
|
||||
// select
|
||||
blockdata.put("ops", ops);
|
||||
for (CqlTable table : model.getTableDefs()) {
|
||||
String opName = namer.nameFor(table, "optype", "verify", "blockname", blockname);
|
||||
// String refOpName = namer.nameFor(table,"optype",);
|
||||
|
||||
ops.put(
|
||||
opName,
|
||||
Map.of(
|
||||
"prepared", genSelectSyntax(table,true),
|
||||
"timeout", timeouts.get("select"),
|
||||
"ratio", readRatioFor(table),
|
||||
"verify", "op(block:"+refBlockName+",op:"+")"
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
|
||||
// insert
|
||||
blockdata.put("ops", ops);
|
||||
for (CqlTable table : model.getTableDefs()) {
|
||||
if (!isCounterTable(table)) {
|
||||
ops.put(
|
||||
namer.nameFor(table, "optype", "insert", "blockname", blockname),
|
||||
Map.of(
|
||||
"prepared", genInsertSyntax(table),
|
||||
"timeout", timeouts.get("insert"),
|
||||
"ratio", writeRatioFor(table)
|
||||
)
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
return blockdata;
|
||||
|
||||
}
|
||||
|
||||
private String genSelectSyntax(CqlTable table, boolean withCapture) {
|
||||
return """
|
||||
select * from KEYSPACE.TABLE
|
||||
select FIELDS from KEYSPACE.TABLE
|
||||
where PREDICATE
|
||||
LIMIT;
|
||||
"""
|
||||
.replace("FIELDS",withCapture ? "[*]" : "*")
|
||||
.replace("KEYSPACE", table.getKeyspace().getName())
|
||||
.replace("TABLE", table.getName())
|
||||
.replace("PREDICATE", genPredicateTemplate(table, 0))
|
||||
|
@ -21,6 +21,7 @@ import io.nosqlbench.adapter.dataapi.ops.DataApiBaseOp;
|
||||
import io.nosqlbench.adapter.dataapi.ops.DataApiOpType;
|
||||
import io.nosqlbench.adapters.api.activityimpl.OpDispenser;
|
||||
import io.nosqlbench.adapters.api.activityimpl.OpMapper;
|
||||
import io.nosqlbench.adapters.api.activityimpl.uniform.Space;
|
||||
import io.nosqlbench.adapters.api.templating.ParsedOp;
|
||||
import io.nosqlbench.engine.api.templating.TypeAndTarget;
|
||||
import io.nosqlbench.nb.api.components.core.NBComponent;
|
||||
|
@ -20,6 +20,7 @@ package io.nosqlbench.adapter.diag;
|
||||
import io.nosqlbench.adapters.api.activityconfig.OpsLoader;
|
||||
import io.nosqlbench.adapters.api.activityconfig.yaml.OpTemplate;
|
||||
import io.nosqlbench.adapters.api.activityconfig.yaml.OpTemplateFormat;
|
||||
import io.nosqlbench.adapters.api.activityconfig.yaml.OpTemplates;
|
||||
import io.nosqlbench.adapters.api.activityconfig.yaml.OpsDocList;
|
||||
import io.nosqlbench.adapters.api.activityimpl.OpMapper;
|
||||
import io.nosqlbench.adapters.api.activityimpl.uniform.BaseDriverAdapter;
|
||||
@ -70,13 +71,13 @@ public class DiagDriverAdapter extends BaseDriverAdapter<DiagOp, DiagSpace> impl
|
||||
@Override
|
||||
public NBConfigModel getConfigModel() {
|
||||
NBConfigModel model = super.getConfigModel();
|
||||
model.add(DiagSpace.getConfigModel());
|
||||
model.add(DiagSpace.getStaticConfigModel());
|
||||
return model;
|
||||
}
|
||||
|
||||
@Override
|
||||
public NBConfigModel getReconfigModel() {
|
||||
NBConfigModel model = super.getReconfigModel();
|
||||
NBConfigModel model = getReconfigModel();
|
||||
NBConfigModel mapperModel = NBReconfigurable.collectModels(DiagDriverAdapter.class, List.of(mapper));
|
||||
return model.add(mapperModel);
|
||||
}
|
||||
@ -107,9 +108,12 @@ public class DiagDriverAdapter extends BaseDriverAdapter<DiagOp, DiagSpace> impl
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<OpTemplate> getSyntheticOpTemplates(OpsDocList opsDocList, Map<String, Object> params) {
|
||||
return OpsLoader.loadString("noop: noop", OpTemplateFormat.inline, params,null).getOps(true);
|
||||
// return OpsLoader.loadString("log:level=INFO", OpTemplateFormat.inline, params,null).getOps();
|
||||
public OpTemplates getSyntheticOpTemplates(
|
||||
OpTemplates opTemplates,
|
||||
Map<String, Object> params) {
|
||||
OpTemplates matching = OpsLoader.loadString(
|
||||
"noop: noop", OpTemplateFormat.inline, params, null).getOps().matching("", true);
|
||||
return matching;
|
||||
}
|
||||
|
||||
}
|
||||
|
@ -20,6 +20,7 @@ import io.nosqlbench.adapter.diag.optasks.DiagTask;
|
||||
import io.nosqlbench.adapters.api.activityimpl.BaseOpDispenser;
|
||||
import io.nosqlbench.adapters.api.templating.ParsedOp;
|
||||
import io.nosqlbench.nb.api.config.standard.NBConfigModel;
|
||||
import io.nosqlbench.nb.api.config.standard.NBConfigurable;
|
||||
import io.nosqlbench.nb.api.config.standard.NBConfiguration;
|
||||
import io.nosqlbench.nb.api.config.standard.NBReconfigurable;
|
||||
import io.nosqlbench.nb.api.components.core.NBParentComponentInjection;
|
||||
@ -102,6 +103,16 @@ public class DiagOpDispenser extends BaseOpDispenser<DiagOp,DiagSpace> implement
|
||||
return opFunc.getReconfigModel();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void applyConfig(NBConfiguration cfg) {
|
||||
|
||||
}
|
||||
|
||||
@Override
|
||||
public NBConfigModel getConfigModel() {
|
||||
return null;
|
||||
}
|
||||
|
||||
private final static class OpFunc implements LongFunction<DiagOp>, NBReconfigurable {
|
||||
private final List<DiagTask> tasks;
|
||||
private final LongFunction<DiagSpace> spaceF;
|
||||
@ -126,6 +137,17 @@ public class DiagOpDispenser extends BaseOpDispenser<DiagOp,DiagSpace> implement
|
||||
public NBConfigModel getReconfigModel() {
|
||||
return NBReconfigurable.collectModels(DiagTask.class, tasks);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void applyConfig(NBConfiguration cfg) {
|
||||
NBConfigurable.applyMatching(cfg, tasks);
|
||||
|
||||
}
|
||||
|
||||
@Override
|
||||
public NBConfigModel getConfigModel() {
|
||||
return NBConfigurable.collectModels(DiagTask.class, tasks);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -20,9 +20,7 @@ import io.nosqlbench.adapters.api.activityimpl.OpDispenser;
|
||||
import io.nosqlbench.adapters.api.activityimpl.OpMapper;
|
||||
import io.nosqlbench.adapters.api.templating.ParsedOp;
|
||||
import io.nosqlbench.nb.api.components.core.NBComponent;
|
||||
import io.nosqlbench.nb.api.config.standard.NBConfigModel;
|
||||
import io.nosqlbench.nb.api.config.standard.NBConfiguration;
|
||||
import io.nosqlbench.nb.api.config.standard.NBReconfigurable;
|
||||
import io.nosqlbench.nb.api.config.standard.*;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.LinkedHashMap;
|
||||
@ -32,6 +30,7 @@ import java.util.function.LongFunction;
|
||||
public class DiagOpMapper implements OpMapper<DiagOp,DiagSpace>, NBReconfigurable {
|
||||
private final Map<String,DiagOpDispenser> dispensers = new LinkedHashMap<>();
|
||||
private final DiagDriverAdapter adapter;
|
||||
private NBConfiguration config;
|
||||
|
||||
public DiagOpMapper(DiagDriverAdapter adapter) {
|
||||
this.adapter = adapter;
|
||||
@ -45,6 +44,16 @@ public class DiagOpMapper implements OpMapper<DiagOp,DiagSpace>, NBReconfigurabl
|
||||
return dispenser;
|
||||
}
|
||||
|
||||
@Override
|
||||
public NBConfigModel getConfigModel() {
|
||||
return ConfigModel.of(DiagOpMapper.class).asReadOnly();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void applyConfig(NBConfiguration cfg) {
|
||||
this.config = cfg;
|
||||
NBConfigurable.applyMatching(cfg, dispensers.values());
|
||||
}
|
||||
|
||||
@Override
|
||||
public void applyReconfig(NBConfiguration recfg) {
|
||||
|
@ -17,17 +17,12 @@
|
||||
package io.nosqlbench.adapter.diag;
|
||||
|
||||
import io.nosqlbench.adapters.api.activityimpl.uniform.BaseSpace;
|
||||
import io.nosqlbench.engine.api.activityapi.core.ActivityDefObserver;
|
||||
import io.nosqlbench.engine.api.activityapi.simrate.RateLimiter;
|
||||
import io.nosqlbench.nb.api.engine.activityimpl.ActivityDef;
|
||||
import io.nosqlbench.nb.api.config.standard.ConfigModel;
|
||||
import io.nosqlbench.nb.api.config.standard.NBConfigModel;
|
||||
import io.nosqlbench.nb.api.config.standard.NBConfiguration;
|
||||
import io.nosqlbench.nb.api.config.standard.Param;
|
||||
import io.nosqlbench.nb.api.config.standard.*;
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
|
||||
public class DiagSpace extends BaseSpace<DiagSpace> implements ActivityDefObserver {
|
||||
public class DiagSpace extends BaseSpace<DiagSpace> implements NBConfigurable {
|
||||
private final Logger logger = LogManager.getLogger(DiagSpace.class);
|
||||
|
||||
private final NBConfiguration cfg;
|
||||
@ -47,7 +42,10 @@ public class DiagSpace extends BaseSpace<DiagSpace> implements ActivityDefObserv
|
||||
this.errorOnClose = cfg.get("erroronclose",boolean.class);
|
||||
}
|
||||
|
||||
public static NBConfigModel getConfigModel() {
|
||||
public NBConfigModel getConfigModel() {
|
||||
return getStaticConfigModel();
|
||||
}
|
||||
public static NBConfigModel getStaticConfigModel() {
|
||||
return ConfigModel.of(DiagSpace.class)
|
||||
.add(Param.defaultTo("interval",1000))
|
||||
.add(Param.defaultTo("erroronclose", false))
|
||||
@ -60,11 +58,6 @@ public class DiagSpace extends BaseSpace<DiagSpace> implements ActivityDefObserv
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onActivityDefUpdate(ActivityDef activityDef) {
|
||||
NBConfiguration cfg = getConfigModel().apply(activityDef.getParams().getStringStringMap());
|
||||
this.applyConfig(cfg);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void close() throws Exception {
|
||||
@ -73,4 +66,5 @@ public class DiagSpace extends BaseSpace<DiagSpace> implements ActivityDefObserv
|
||||
throw new RuntimeException("diag space was configured to throw this error when it was configured.");
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
@ -137,6 +137,7 @@ public class DiagTask_gauge extends BaseDiagTask implements Gauge<Double>, NBPar
|
||||
}
|
||||
|
||||
logger.info("Registering gauge for diag task with labels:" + getParentLabels().getLabels() + " label:" + label);
|
||||
this.sampleValue=this.function.applyAsDouble(0L);
|
||||
this.gauge=parent.create().gauge(
|
||||
label,
|
||||
() -> this.sampleValue,
|
||||
|
@ -22,6 +22,7 @@ import io.nosqlbench.adapter.dynamodb.optypes.DynamoDBOp;
|
||||
import io.nosqlbench.adapters.api.activityimpl.OpDispenser;
|
||||
import io.nosqlbench.adapters.api.activityimpl.OpMapper;
|
||||
import io.nosqlbench.adapters.api.activityimpl.uniform.DriverAdapter;
|
||||
import io.nosqlbench.adapters.api.activityimpl.uniform.Space;
|
||||
import io.nosqlbench.adapters.api.templating.ParsedOp;
|
||||
import io.nosqlbench.engine.api.templating.TypeAndTarget;
|
||||
import io.nosqlbench.nb.api.components.core.NBComponent;
|
||||
|
@ -23,6 +23,7 @@ import io.nosqlbench.adapter.prototype.ops.ExampleOpType1;
|
||||
import io.nosqlbench.adapter.prototype.ops.ExampleOpTypes;
|
||||
import io.nosqlbench.adapters.api.activityimpl.OpDispenser;
|
||||
import io.nosqlbench.adapters.api.activityimpl.OpMapper;
|
||||
import io.nosqlbench.adapters.api.activityimpl.uniform.Space;
|
||||
import io.nosqlbench.adapters.api.templating.ParsedOp;
|
||||
import io.nosqlbench.engine.api.templating.TypeAndTarget;
|
||||
import io.nosqlbench.nb.api.components.core.NBComponent;
|
||||
|
@ -2,13 +2,13 @@ package io.nosqlbench.adapter.prototype.dispensers;
|
||||
|
||||
/*
|
||||
* Copyright (c) nosqlbench
|
||||
*
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
@ -31,7 +31,7 @@ public class ExampleOpDispenserType1 extends BaseOpDispenser<ExampleOpType1, Exa
|
||||
public ExampleOpDispenserType1(
|
||||
NBComponent adapter,
|
||||
ParsedOp pop,
|
||||
LongFunction<ExampleSpace> spaceInitF
|
||||
LongFunction<? extends ExampleSpace> spaceInitF
|
||||
) {
|
||||
super(adapter, pop, spaceInitF);
|
||||
}
|
||||
|
@ -21,6 +21,7 @@ import io.nosqlbench.adapter.gcpspanner.ops.GCPSpannerBaseOp;
|
||||
import io.nosqlbench.adapter.gcpspanner.types.GCPSpannerOpType;
|
||||
import io.nosqlbench.adapters.api.activityimpl.OpDispenser;
|
||||
import io.nosqlbench.adapters.api.activityimpl.OpMapper;
|
||||
import io.nosqlbench.adapters.api.activityimpl.uniform.Space;
|
||||
import io.nosqlbench.adapters.api.templating.ParsedOp;
|
||||
import io.nosqlbench.engine.api.templating.TypeAndTarget;
|
||||
import io.nosqlbench.nb.api.components.core.NBComponent;
|
||||
|
@ -20,6 +20,7 @@ import io.nosqlbench.adapter.http.HttpDriverAdapter;
|
||||
import io.nosqlbench.adapters.api.activityimpl.OpDispenser;
|
||||
import io.nosqlbench.adapters.api.activityimpl.OpMapper;
|
||||
import io.nosqlbench.adapters.api.activityimpl.uniform.DriverAdapter;
|
||||
import io.nosqlbench.adapters.api.activityimpl.uniform.Space;
|
||||
import io.nosqlbench.adapters.api.templating.ParsedOp;
|
||||
import io.nosqlbench.nb.api.components.core.NBComponent;
|
||||
import io.nosqlbench.nb.api.config.standard.NBConfiguration;
|
||||
|
@ -54,8 +54,9 @@ public class HttpOpMapperTest {
|
||||
|
||||
private static ParsedOp parsedOpFor(final String yaml) {
|
||||
final OpsDocList docs = OpsLoader.loadString(yaml, OpTemplateFormat.yaml, Map.of(), null);
|
||||
final OpTemplate opTemplate = docs.getOps(true).get(0);
|
||||
final ParsedOp parsedOp = new ParsedOp(opTemplate, HttpOpMapperTest.cfg, List.of(HttpOpMapperTest.adapter.getPreprocessor()), new TestComponent("parent","parent"));
|
||||
final OpTemplate opTemplate = docs.getOps().get(0);
|
||||
final ParsedOp parsedOp = new ParsedOp(opTemplate, HttpOpMapperTest.cfg.getMap(),
|
||||
List.of(HttpOpMapperTest.adapter.getPreprocessor()), new TestComponent("parent","parent"));
|
||||
return parsedOp;
|
||||
}
|
||||
|
||||
|
@ -19,6 +19,7 @@ package io.nosqlbench.adapter.kafka;
|
||||
import io.nosqlbench.adapter.kafka.dispensers.MessageConsumerOpDispenser;
|
||||
import io.nosqlbench.adapter.kafka.dispensers.MessageProducerOpDispenser;
|
||||
import io.nosqlbench.adapter.kafka.ops.KafkaOp;
|
||||
import io.nosqlbench.adapters.api.activityimpl.uniform.Space;
|
||||
import io.nosqlbench.nb.api.components.core.NBComponent;
|
||||
import io.nosqlbench.nb.api.config.standard.NBConfiguration;
|
||||
import io.nosqlbench.adapters.api.activityimpl.OpDispenser;
|
||||
|
@ -18,6 +18,7 @@ package io.nosqlbench.adapter.mongodb.core;
|
||||
|
||||
import io.nosqlbench.adapter.mongodb.dispensers.MongoCommandOpDispenser;
|
||||
import io.nosqlbench.adapter.mongodb.ops.MongoDirectCommandOp;
|
||||
import io.nosqlbench.adapters.api.activityimpl.uniform.Space;
|
||||
import io.nosqlbench.nb.api.components.core.NBComponent;
|
||||
import io.nosqlbench.nb.api.config.standard.NBConfiguration;
|
||||
import io.nosqlbench.nb.api.errors.BasicError;
|
||||
|
@ -30,7 +30,7 @@ import io.nosqlbench.nb.annotations.Service;
|
||||
import java.util.function.LongFunction;
|
||||
|
||||
/**
|
||||
* Special thanks to Justin Chu who authored the original NoSQLBench MongoDB ActivityType.
|
||||
* Special thanks to Justin Chu who authored the original NoSQLBench MongoDB StandardActivityType.
|
||||
*/
|
||||
@Service(value = DriverAdapter.class, selector = "mongodb")
|
||||
public class MongodbDriverAdapter extends BaseDriverAdapter<MongoOp<?>, MongoSpace> {
|
||||
|
@ -21,6 +21,7 @@ import io.nosqlbench.adapter.neo4j.ops.Neo4JBaseOp;
|
||||
import io.nosqlbench.adapter.neo4j.types.Neo4JOpType;
|
||||
import io.nosqlbench.adapters.api.activityimpl.OpDispenser;
|
||||
import io.nosqlbench.adapters.api.activityimpl.OpMapper;
|
||||
import io.nosqlbench.adapters.api.activityimpl.uniform.Space;
|
||||
import io.nosqlbench.adapters.api.templating.ParsedOp;
|
||||
import io.nosqlbench.engine.api.templating.TypeAndTarget;
|
||||
import io.nosqlbench.nb.api.components.core.NBComponent;
|
||||
|
@ -18,6 +18,7 @@ package io.nosqlbench.adapter.pulsar;
|
||||
|
||||
import io.nosqlbench.adapter.pulsar.dispensers.*;
|
||||
import io.nosqlbench.adapter.pulsar.ops.PulsarOp;
|
||||
import io.nosqlbench.adapters.api.activityimpl.uniform.Space;
|
||||
import io.nosqlbench.nb.api.components.core.NBComponent;
|
||||
import io.nosqlbench.nb.api.config.standard.NBConfiguration;
|
||||
import io.nosqlbench.adapters.api.activityimpl.OpDispenser;
|
||||
|
@ -21,6 +21,7 @@ import io.nosqlbench.adapter.qdrant.ops.QdrantBaseOp;
|
||||
import io.nosqlbench.adapter.qdrant.types.QdrantOpType;
|
||||
import io.nosqlbench.adapters.api.activityimpl.OpDispenser;
|
||||
import io.nosqlbench.adapters.api.activityimpl.OpMapper;
|
||||
import io.nosqlbench.adapters.api.activityimpl.uniform.Space;
|
||||
import io.nosqlbench.adapters.api.templating.ParsedOp;
|
||||
import io.nosqlbench.engine.api.templating.TypeAndTarget;
|
||||
import io.nosqlbench.nb.api.components.core.NBComponent;
|
||||
|
@ -19,6 +19,7 @@ package io.nosqlbench.adapter.s4j;
|
||||
import io.nosqlbench.adapter.s4j.dispensers.MessageConsumerOpDispenser;
|
||||
import io.nosqlbench.adapter.s4j.dispensers.MessageProducerOpDispenser;
|
||||
import io.nosqlbench.adapter.s4j.ops.S4JOp;
|
||||
import io.nosqlbench.adapters.api.activityimpl.uniform.Space;
|
||||
import io.nosqlbench.nb.api.components.core.NBComponent;
|
||||
import io.nosqlbench.adapters.api.activityimpl.OpDispenser;
|
||||
import io.nosqlbench.adapters.api.activityimpl.OpMapper;
|
||||
|
@ -18,6 +18,7 @@ package io.nosqlbench.adapter.stdout;
|
||||
|
||||
import io.nosqlbench.adapters.api.activityconfig.yaml.OpData;
|
||||
import io.nosqlbench.adapters.api.activityconfig.yaml.OpTemplate;
|
||||
import io.nosqlbench.adapters.api.activityconfig.yaml.OpTemplates;
|
||||
import io.nosqlbench.adapters.api.activityconfig.yaml.OpsDocList;
|
||||
import io.nosqlbench.adapters.api.activityimpl.OpMapper;
|
||||
import io.nosqlbench.adapters.api.activityimpl.uniform.BaseDriverAdapter;
|
||||
@ -48,7 +49,7 @@ public class StdoutDriverAdapter extends BaseDriverAdapter<StdoutOp, StdoutSpace
|
||||
}
|
||||
|
||||
@Override
|
||||
public OpMapper<StdoutOp,StdoutSpace> getOpMapper() {
|
||||
public OpMapper<StdoutOp, StdoutSpace> getOpMapper() {
|
||||
return new StdoutOpMapper(this);
|
||||
}
|
||||
|
||||
@ -59,57 +60,57 @@ public class StdoutDriverAdapter extends BaseDriverAdapter<StdoutOp, StdoutSpace
|
||||
|
||||
@Override
|
||||
public NBConfigModel getConfigModel() {
|
||||
return ConfigModel.of(this.getClass())
|
||||
.add(super.getConfigModel())
|
||||
.add(StdoutSpace.getConfigModel());
|
||||
return ConfigModel.of(this.getClass()).add(super.getConfigModel()).add(
|
||||
StdoutSpace.getConfigModel());
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public List<OpTemplate> getSyntheticOpTemplates(OpsDocList opsDocList, Map<String, Object> cfg) {
|
||||
Set<String> activeBindingNames = new LinkedHashSet<>(opsDocList.getDocBindings().keySet());
|
||||
public OpTemplates getSyntheticOpTemplates(OpTemplates opTempl, Map<String, Object> cfg) {
|
||||
Set<String> activeBindingNames = new LinkedHashSet<>(opTempl.getDocBindings().keySet());
|
||||
|
||||
if (activeBindingNames.isEmpty()) {
|
||||
logger.warn("Unable to synthesize op for driver=" + this.getAdapterName() + " with zero bindings.");
|
||||
return List.of();
|
||||
logger.warn(
|
||||
"Unable to synthesize op for driver=" + this.getAdapterName() + " with zero bindings.");
|
||||
return new OpTemplates(List.of(),OpsDocList.none());
|
||||
}
|
||||
|
||||
String bindings = Optional.ofNullable(cfg.get("bindings")).map(Object::toString).orElse("doc");
|
||||
Pattern bindingsFilter = Pattern.compile(bindings.equalsIgnoreCase("doc") ? ".*" : bindings);
|
||||
String bindings = Optional.ofNullable(cfg.get("bindings")).map(Object::toString).orElse(
|
||||
"doc");
|
||||
Pattern bindingsFilter = Pattern.compile(
|
||||
bindings.equalsIgnoreCase("doc") ? ".*" : bindings);
|
||||
|
||||
Set<String> filteredBindingNames = activeBindingNames
|
||||
.stream()
|
||||
.filter(n -> {
|
||||
if (bindingsFilter.matcher(n).matches()) {
|
||||
logger.trace(() -> "bindings filter kept binding '" + n + "'");
|
||||
return true;
|
||||
} else {
|
||||
logger.trace(() -> "bindings filter removed binding '" + n + "'");
|
||||
return false;
|
||||
}
|
||||
})
|
||||
.collect(Collectors.toSet());
|
||||
Set<String> filteredBindingNames = activeBindingNames.stream().filter(n -> {
|
||||
if (bindingsFilter.matcher(n).matches()) {
|
||||
logger.trace(() -> "bindings filter kept binding '" + n + "'");
|
||||
return true;
|
||||
} else {
|
||||
logger.trace(() -> "bindings filter removed binding '" + n + "'");
|
||||
return false;
|
||||
}
|
||||
}).collect(Collectors.toSet());
|
||||
|
||||
if (filteredBindingNames.isEmpty()) {
|
||||
logger.warn("Unable to synthesize op for driver="+getAdapterName()+" when " + activeBindingNames.size()+"/"+activeBindingNames.size() + " bindings were filtered out with bindings=" + bindings);
|
||||
return List.of();
|
||||
logger.warn(
|
||||
"Unable to synthesize op for driver=" + getAdapterName() + " when " + activeBindingNames.size() + "/" + activeBindingNames.size() + " bindings were filtered out with bindings=" + bindings);
|
||||
return new OpTemplates(List.of(),OpsDocList.none());
|
||||
|
||||
}
|
||||
|
||||
OpData op = new OpData("synthetic", "synthetic", Map.of(), opsDocList.getDocBindings(), cfg,
|
||||
Map.of("stmt", genStatementTemplate(filteredBindingNames, cfg)),200);
|
||||
OpData op = new OpData(
|
||||
"synthetic", "synthetic", Map.of(), opTempl.getDocBindings(), cfg,
|
||||
Map.of("stmt", genStatementTemplate(filteredBindingNames, cfg)), 200
|
||||
);
|
||||
|
||||
return List.of(op);
|
||||
return new OpTemplates(List.of(op),OpsDocList.none());
|
||||
}
|
||||
|
||||
private String genStatementTemplate(Set<String> keySet, Map<String, Object> cfg) {
|
||||
TemplateFormat format = Optional.ofNullable(cfg.get("format"))
|
||||
.map(Object::toString)
|
||||
.map(TemplateFormat::valueOf)
|
||||
.orElse(TemplateFormat.assignments);
|
||||
TemplateFormat format = Optional.ofNullable(cfg.get("format")).map(Object::toString).map(
|
||||
TemplateFormat::valueOf).orElse(TemplateFormat.assignments);
|
||||
|
||||
boolean ensureNewline = Optional.ofNullable(cfg.get("newline"))
|
||||
.map(Object::toString)
|
||||
.map(Boolean::valueOf)
|
||||
.orElse(true);
|
||||
boolean ensureNewline = Optional.ofNullable(cfg.get("newline")).map(Object::toString).map(
|
||||
Boolean::valueOf).orElse(true);
|
||||
|
||||
String stmtTemplate = format.format(ensureNewline, new ArrayList<>(keySet));
|
||||
return stmtTemplate;
|
||||
|
@ -19,6 +19,7 @@ package io.nosqlbench.adapter.stdout;
|
||||
import io.nosqlbench.adapters.api.activityimpl.OpDispenser;
|
||||
import io.nosqlbench.adapters.api.activityimpl.OpMapper;
|
||||
import io.nosqlbench.adapters.api.activityimpl.uniform.DriverAdapter;
|
||||
import io.nosqlbench.adapters.api.activityimpl.uniform.Space;
|
||||
import io.nosqlbench.adapters.api.templating.ParsedOp;
|
||||
import io.nosqlbench.nb.api.components.core.NBComponent;
|
||||
|
||||
|
@ -17,6 +17,7 @@
|
||||
package io.nosqlbench.adapter.tcpclient;
|
||||
|
||||
import io.nosqlbench.adapter.stdout.StdoutDriverAdapter;
|
||||
import io.nosqlbench.adapters.api.activityconfig.yaml.OpTemplates;
|
||||
import io.nosqlbench.nb.api.config.standard.ConfigModel;
|
||||
import io.nosqlbench.nb.api.config.standard.NBConfigModel;
|
||||
import io.nosqlbench.nb.api.config.standard.NBConfiguration;
|
||||
@ -67,7 +68,9 @@ public class TcpClientDriverAdapter extends BaseDriverAdapter<TcpClientOp, TcpCl
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<OpTemplate> getSyntheticOpTemplates(OpsDocList opsDocList, Map<String,Object> cfg) {
|
||||
public OpTemplates getSyntheticOpTemplates(
|
||||
OpTemplates opsDocList,
|
||||
Map<String,Object> cfg) {
|
||||
return adap.getSyntheticOpTemplates(opsDocList, cfg);
|
||||
}
|
||||
|
||||
|
@ -18,6 +18,7 @@ package io.nosqlbench.adapter.tcpclient;
|
||||
|
||||
import io.nosqlbench.adapters.api.activityimpl.OpDispenser;
|
||||
import io.nosqlbench.adapters.api.activityimpl.OpMapper;
|
||||
import io.nosqlbench.adapters.api.activityimpl.uniform.Space;
|
||||
import io.nosqlbench.adapters.api.templating.ParsedOp;
|
||||
import io.nosqlbench.nb.api.components.core.NBComponent;
|
||||
|
||||
|
@ -16,6 +16,7 @@
|
||||
|
||||
package io.nosqlbench.adapter.tcpserver;
|
||||
import io.nosqlbench.adapter.stdout.StdoutDriverAdapter;
|
||||
import io.nosqlbench.adapters.api.activityconfig.yaml.OpTemplates;
|
||||
import io.nosqlbench.nb.api.config.standard.ConfigModel;
|
||||
import io.nosqlbench.nb.api.config.standard.NBConfigModel;
|
||||
import io.nosqlbench.nb.api.config.standard.NBConfiguration;
|
||||
@ -65,7 +66,9 @@ public class TcpServerDriverAdapter extends BaseDriverAdapter<TcpServerOp, TcpSe
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<OpTemplate> getSyntheticOpTemplates(OpsDocList opsDocList, Map<String,Object> cfg) {
|
||||
public OpTemplates getSyntheticOpTemplates(
|
||||
OpTemplates opsDocList,
|
||||
Map<String,Object> cfg) {
|
||||
return adap.getSyntheticOpTemplates(opsDocList, cfg);
|
||||
}
|
||||
|
||||
|
@ -18,6 +18,7 @@ package io.nosqlbench.adapter.tcpserver;
|
||||
|
||||
import io.nosqlbench.adapters.api.activityimpl.OpDispenser;
|
||||
import io.nosqlbench.adapters.api.activityimpl.OpMapper;
|
||||
import io.nosqlbench.adapters.api.activityimpl.uniform.Space;
|
||||
import io.nosqlbench.adapters.api.templating.ParsedOp;
|
||||
import io.nosqlbench.nb.api.components.core.NBComponent;
|
||||
|
||||
|
@ -22,7 +22,6 @@ import org.apache.logging.log4j.LogManager;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
|
||||
import io.nosqlbench.adapter.weaviate.ops.WeaviateBaseOp;
|
||||
import io.nosqlbench.adapter.weaviate.opsdispensers.WeaviateBaseOpDispenser;
|
||||
import io.nosqlbench.adapter.weaviate.opsdispensers.WeaviateCreateCollectionOpDispenser;
|
||||
import io.nosqlbench.adapter.weaviate.opsdispensers.WeaviateCreateObjectsOpDispenser;
|
||||
import io.nosqlbench.adapter.weaviate.opsdispensers.WeaviateDeleteCollectionOpDispenser;
|
||||
@ -33,7 +32,6 @@ import io.nosqlbench.adapters.api.activityimpl.OpMapper;
|
||||
import io.nosqlbench.adapters.api.templating.ParsedOp;
|
||||
import io.nosqlbench.engine.api.templating.TypeAndTarget;
|
||||
|
||||
import java.util.function.IntFunction;
|
||||
import java.util.function.LongFunction;
|
||||
|
||||
public class WeaviateOpMapper implements OpMapper<WeaviateBaseOp<?,?>,WeaviateSpace> {
|
||||
@ -53,7 +51,7 @@ public class WeaviateOpMapper implements OpMapper<WeaviateBaseOp<?,?>,WeaviateSp
|
||||
public OpDispenser<? extends WeaviateBaseOp<?,?>> apply(
|
||||
NBComponent adapterC,
|
||||
ParsedOp pop,
|
||||
LongFunction spaceF
|
||||
LongFunction<WeaviateSpace> spaceF
|
||||
) {
|
||||
TypeAndTarget<WeaviateOpType, String> typeAndTarget = pop.getTypeAndTarget(
|
||||
WeaviateOpType.class,
|
||||
|
@ -61,14 +61,21 @@ public class OpsLoader {
|
||||
public static OpsDocList loadPath(String path, Map<String, ?> params, String... searchPaths) {
|
||||
String[] extensions = path.indexOf('.') > -1 ? new String[]{} : YAML_EXTENSIONS;
|
||||
ResolverChain chain = new ResolverChain(path);
|
||||
Content<?> foundPath = NBIO.chain(chain.getChain()).searchPrefixes(searchPaths).pathname(chain.getPath()).extensionSet(extensions).first()
|
||||
.orElseThrow(() -> new RuntimeException("Unable to load path '" + path + "'"));
|
||||
Content<?> foundPath =
|
||||
NBIO.chain(chain.getChain()).searchPrefixes(searchPaths).pathname(chain.getPath())
|
||||
.extensionSet(extensions).first()
|
||||
.orElseThrow(() -> new RuntimeException("Unable to load path '" + path + "'"));
|
||||
OpTemplateFormat fmt = OpTemplateFormat.valueOfURI(foundPath.getURI());
|
||||
return loadString(foundPath.asString(), fmt, params, foundPath.getURI());
|
||||
}
|
||||
|
||||
public static OpsDocList loadString(
|
||||
final String sourceData, OpTemplateFormat fmt, Map<String, ?> params, URI srcuri) {
|
||||
final String sourceData,
|
||||
OpTemplateFormat fmt,
|
||||
Map<String, ?> params,
|
||||
URI srcuri
|
||||
)
|
||||
{
|
||||
|
||||
if (srcuri != null) {
|
||||
logger.info("workload URI: '" + srcuri + "'");
|
||||
@ -113,9 +120,15 @@ public class OpsLoader {
|
||||
}
|
||||
|
||||
int resultStatus = SjsonnetMain.main0(
|
||||
injected.toArray(new String[0]), new DefaultParseCache(), inputStream, stdoutStream,
|
||||
stderrStream, new os.Path(Path.of(System.getProperty("user.dir"))), Option.empty(),
|
||||
Option.empty(), null
|
||||
injected.toArray(new String[0]),
|
||||
new DefaultParseCache(),
|
||||
inputStream,
|
||||
stdoutStream,
|
||||
stderrStream,
|
||||
new os.Path(Path.of(System.getProperty("user.dir"))),
|
||||
Option.empty(),
|
||||
Option.empty(),
|
||||
null
|
||||
);
|
||||
|
||||
String stdoutOutput = stdoutBuffer.toString(StandardCharsets.UTF_8);
|
||||
@ -133,16 +146,17 @@ public class OpsLoader {
|
||||
}
|
||||
}
|
||||
if (!stderrOutput.isEmpty()) {
|
||||
BasicError error = new BasicError(
|
||||
"stderr output from jsonnet preprocessing: " + stderrOutput);
|
||||
BasicError error =
|
||||
new BasicError("stderr output from jsonnet preprocessing: " + stderrOutput);
|
||||
if (resultStatus != 0) {
|
||||
throw error;
|
||||
} else {
|
||||
logger.warn(error.toString(), error);
|
||||
}
|
||||
}
|
||||
logger.info("jsonnet processing read '" + uri + "', rendered " + stdoutOutput.split(
|
||||
"\n").length + " lines.");
|
||||
logger.info(
|
||||
"jsonnet processing read '" + uri + "', rendered " + stdoutOutput.split("\n").length
|
||||
+ " lines.");
|
||||
logger.trace("jsonnet result:\n" + stdoutOutput);
|
||||
|
||||
return stdoutOutput;
|
||||
@ -152,8 +166,11 @@ public class OpsLoader {
|
||||
// into the parsers in a non-exception way
|
||||
public static boolean isJson(String workload) {
|
||||
try {
|
||||
new GsonBuilder().setPrettyPrinting().create().fromJson(workload, Map.class);
|
||||
return true;
|
||||
if (workload.matches("^\\s*\\{.+")) {
|
||||
new GsonBuilder().setPrettyPrinting().create().fromJson(workload, Map.class);
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
} catch (Exception e) {
|
||||
return false;
|
||||
}
|
||||
@ -163,8 +180,11 @@ public class OpsLoader {
|
||||
// into the parsers in a non-exception way
|
||||
public static boolean isYaml(String workload) {
|
||||
try {
|
||||
Object result = new Load(LoadSettings.builder().build()).loadFromString(workload);
|
||||
return (result instanceof Map);
|
||||
if (workload.indexOf('\n')>=0) {
|
||||
Object result = new Load(LoadSettings.builder().build()).loadFromString(workload);
|
||||
return (result instanceof Map);
|
||||
}
|
||||
return false;
|
||||
} catch (Exception e) {
|
||||
return false;
|
||||
}
|
||||
|
@ -107,7 +107,7 @@ public class OpsOwner extends RawOpFields {
|
||||
}
|
||||
setOpsFieldByType(itemizedMaps);
|
||||
} else if (object instanceof String) {
|
||||
setOpsFieldByType(Map.of("stmt1", (String) object));
|
||||
setOpsFieldByType(Map.of("stmt", (String) object));
|
||||
} else {
|
||||
throw new RuntimeException("Unknown object type: " + object.getClass());
|
||||
}
|
||||
|
@ -16,8 +16,10 @@
|
||||
|
||||
package io.nosqlbench.adapters.api.activityconfig.rawyaml;
|
||||
|
||||
import io.nosqlbench.adapters.api.activityconfig.yaml.OpsDocList;
|
||||
import org.jetbrains.annotations.NotNull;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
import java.util.function.Consumer;
|
||||
@ -39,6 +41,13 @@ public class RawOpsDocList implements Iterable<RawOpsDoc> {
|
||||
return new RawOpsDocList(List.of());
|
||||
}
|
||||
|
||||
public static RawOpsDocList combine(RawOpsDocList l1, RawOpsDocList l2) {
|
||||
List<RawOpsDoc> rawOpsDocs = new ArrayList<>();
|
||||
rawOpsDocs.addAll(l1.getOpsDocs());
|
||||
rawOpsDocs.addAll(l2.getOpsDocs());
|
||||
return new RawOpsDocList(rawOpsDocs);
|
||||
}
|
||||
|
||||
public List<RawOpsDoc> getOpsDocs() {
|
||||
return rawOpsDocList;
|
||||
}
|
||||
|
@ -102,10 +102,13 @@ public class OpDef extends OpTemplate {
|
||||
return tags;
|
||||
}
|
||||
|
||||
/// Op template definitions are auto-tagged according to their placement within the workload
|
||||
/// template. The block name and op name are both added as individual labels.
|
||||
/// No other label should be added as before with auto-concatenation, since this breaks the
|
||||
/// definitive behavior of tag filters over label combinations.
|
||||
private LinkedHashMap<String, String> composeTags() {
|
||||
LinkedHashMap<String, String> tagsWithName = new LinkedHashMap<>(new MultiMapLookup<>(rawOpDef.getTags(), block.getTags()));
|
||||
tagsWithName.put("block",block.getName());
|
||||
tagsWithName.put("name",this.rawOpDef.getName());
|
||||
tagsWithName.put("op",this.rawOpDef.getName());
|
||||
return tagsWithName;
|
||||
}
|
||||
|
@ -0,0 +1,124 @@
|
||||
package io.nosqlbench.adapters.api.activityconfig.yaml;
|
||||
|
||||
/*
|
||||
* Copyright (c) nosqlbench
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
|
||||
import io.nosqlbench.adapters.api.activityconfig.rawyaml.RawOpsDocList;
|
||||
import io.nosqlbench.nb.api.tagging.TagFilter;
|
||||
|
||||
import java.util.function.Function;
|
||||
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.jetbrains.annotations.NotNull;
|
||||
|
||||
import java.util.*;
|
||||
import java.util.stream.Stream;
|
||||
|
||||
/// [OpTemplates] is a list of selected op templates and their backing data.
|
||||
///
|
||||
/// It is a value type which makes it easy to get matching subsets of op templates according to
|
||||
/// tag filters, to combine them, etc.
|
||||
///
|
||||
/// When a user selects an op template, they are expected to use the [TagFilter] mechanism.
|
||||
/// Any such lookup methods should be implemented on this class.
|
||||
public class OpTemplates implements Iterable<OpTemplate> {
|
||||
private final ArrayList<OpTemplate> templates = new ArrayList<>();
|
||||
private final static Logger logger = LogManager.getLogger(OpTemplates.class);
|
||||
private final OpsDocList opsDocList;
|
||||
|
||||
public OpTemplates(OpsDocList opsDocList) {
|
||||
opsDocList.getStmtDocs().stream().flatMap(d -> d.getOpTemplates().stream())
|
||||
.forEach(templates::add);
|
||||
this.opsDocList = opsDocList;
|
||||
}
|
||||
|
||||
public OpTemplates(List<OpTemplate> matchingOpTemplates, OpsDocList opsDocList) {
|
||||
this.opsDocList = opsDocList;
|
||||
templates.addAll(matchingOpTemplates);
|
||||
}
|
||||
|
||||
public OpTemplates() {
|
||||
this.opsDocList = new OpsDocList(new RawOpsDocList(List.of()));
|
||||
}
|
||||
|
||||
public OpTemplates and(OpTemplates other) {
|
||||
this.opsDocList.and(opsDocList);
|
||||
return new OpTemplates();
|
||||
}
|
||||
|
||||
/**
|
||||
@param tagFilterSpec
|
||||
a comma-separated tag filter spec
|
||||
@return The list of all included op templates for all included blocks of in this document,
|
||||
including the inherited and overridden values from this doc and the parent block.
|
||||
*/
|
||||
public OpTemplates matching(String tagFilterSpec, boolean logit) {
|
||||
return matching(new TagFilter(tagFilterSpec), logit);
|
||||
}
|
||||
|
||||
public OpTemplates matching(TagFilter tagFilter, boolean logit) {
|
||||
List<OpTemplate> matchingOpTemplates = new ArrayList<>();
|
||||
|
||||
List<String> matchlog = new ArrayList<>();
|
||||
templates.stream().map(tagFilter::matchesTaggedResult).peek(r -> matchlog.add(r.getLog()))
|
||||
.filter(TagFilter.Result::matched).map(TagFilter.Result::getElement)
|
||||
.forEach(matchingOpTemplates::add);
|
||||
|
||||
if (logit) {
|
||||
for (String s : matchlog) {
|
||||
logger.info(s);
|
||||
}
|
||||
}
|
||||
|
||||
return new OpTemplates(matchingOpTemplates, opsDocList);
|
||||
|
||||
}
|
||||
|
||||
public Map<String, String> getDocBindings() {
|
||||
return opsDocList.getDocBindings();
|
||||
}
|
||||
|
||||
@Override
|
||||
public @NotNull Iterator<OpTemplate> iterator() {
|
||||
return templates.iterator();
|
||||
}
|
||||
|
||||
public Stream<OpTemplate> stream() {
|
||||
return templates.stream();
|
||||
}
|
||||
|
||||
public int size() {
|
||||
return templates.size();
|
||||
}
|
||||
|
||||
public OpTemplate get(int idx) {
|
||||
return templates.get(idx);
|
||||
}
|
||||
|
||||
public boolean isEmpty() {
|
||||
return this.templates.isEmpty();
|
||||
}
|
||||
|
||||
public OpTemplates transform(Function<OpTemplate, OpTemplate> transformF) {
|
||||
List<OpTemplate> transformed = this.templates.stream().map(t -> transformF.apply(t))
|
||||
.toList();
|
||||
return new OpTemplates(transformed, opsDocList);
|
||||
}
|
||||
|
||||
}
|
@ -43,6 +43,11 @@ public class OpsDocList implements Iterable<OpsDoc> {
|
||||
// this.applyModifier(new enumerator());
|
||||
}
|
||||
|
||||
private OpsDocList(RawOpsDocList rawOpsDocList, Map<String, String> templateVariables) {
|
||||
this.rawOpsDocList = rawOpsDocList;
|
||||
this.templateVariables.putAll(templateVariables);
|
||||
}
|
||||
|
||||
public static OpsDocList none() {
|
||||
return new OpsDocList(RawOpsDocList.none());
|
||||
}
|
||||
@ -60,41 +65,10 @@ public class OpsDocList implements Iterable<OpsDoc> {
|
||||
.collect(Collectors.toList());
|
||||
}
|
||||
|
||||
public List<OpTemplate> getOps(boolean logit) {
|
||||
return getOps("", logit);
|
||||
public OpTemplates getOps() {
|
||||
return new OpTemplates(this);
|
||||
}
|
||||
|
||||
/**
|
||||
* @param tagFilterSpec a comma-separated tag filter spec
|
||||
* @return The list of all included op templates for all included blocks of in this document,
|
||||
* including the inherited and overridden values from this doc and the parent block.
|
||||
*/
|
||||
public List<OpTemplate> getOps(String tagFilterSpec, boolean logit) {
|
||||
TagFilter ts = new TagFilter(tagFilterSpec);
|
||||
List<OpTemplate> opTemplates = new ArrayList<>();
|
||||
|
||||
List<OpTemplate> rawtemplates = getStmtDocs().stream()
|
||||
.flatMap(d -> d.getOpTemplates().stream()).toList();
|
||||
|
||||
List<String> matchlog = new ArrayList<>();
|
||||
rawtemplates.stream()
|
||||
.map(ts::matchesTaggedResult)
|
||||
.peek(r -> matchlog.add(r.getLog()))
|
||||
.filter(TagFilter.Result::matched)
|
||||
.map(TagFilter.Result::getElement)
|
||||
.forEach(opTemplates::add);
|
||||
|
||||
if (logit) {
|
||||
for (String s : matchlog) {
|
||||
logger.info(s);
|
||||
}
|
||||
}
|
||||
|
||||
return opTemplates;
|
||||
}
|
||||
|
||||
|
||||
|
||||
@Override
|
||||
public Iterator<OpsDoc> iterator() {
|
||||
return getStmtDocs().iterator();
|
||||
@ -196,4 +170,13 @@ public class OpsDocList implements Iterable<OpsDoc> {
|
||||
return count;
|
||||
}
|
||||
|
||||
public OpsDocList and(OpsDocList other) {
|
||||
return new OpsDocList(
|
||||
RawOpsDocList.combine(this.rawOpsDocList,other.rawOpsDocList),
|
||||
new LinkedHashMap<>() {{
|
||||
putAll(templateVariables);
|
||||
putAll(other.templateVariables);
|
||||
}}
|
||||
);
|
||||
}
|
||||
}
|
||||
|
@ -19,6 +19,8 @@ package io.nosqlbench.adapters.api.activityimpl;
|
||||
import com.codahale.metrics.Timer;
|
||||
import groovy.lang.Binding;
|
||||
import io.nosqlbench.adapters.api.activityimpl.uniform.Space;
|
||||
import io.nosqlbench.adapters.api.activityimpl.uniform.Verifier;
|
||||
import io.nosqlbench.adapters.api.activityimpl.uniform.ValidatorSource;
|
||||
import io.nosqlbench.adapters.api.activityimpl.uniform.flowtypes.CycleOp;
|
||||
import io.nosqlbench.adapters.api.evalctx.*;
|
||||
import io.nosqlbench.adapters.api.metrics.ThreadLocalNamedTimers;
|
||||
@ -48,7 +50,9 @@ import java.util.function.LongFunction;
|
||||
* The type of operation
|
||||
*/
|
||||
public abstract class BaseOpDispenser<OP extends CycleOp<?>, SPACE extends Space>
|
||||
extends NBBaseComponent implements OpDispenser<OP> {
|
||||
extends NBBaseComponent
|
||||
implements OpDispenser<OP>, ValidatorSource
|
||||
{
|
||||
protected final static Logger logger = LogManager.getLogger(BaseOpDispenser.class);
|
||||
public static final String VERIFIER = "verifier";
|
||||
public static final String VERIFIER_INIT = "verifier-init";
|
||||
@ -79,11 +83,13 @@ public abstract class BaseOpDispenser<OP extends CycleOp<?>, SPACE extends Space
|
||||
*/
|
||||
private final CycleFunction<Boolean> _verifier;
|
||||
private final ThreadLocal<CycleFunction<Boolean>> tlVerifier;
|
||||
private final long ratio;
|
||||
|
||||
protected BaseOpDispenser(final NBComponent parentC, final ParsedOp op, LongFunction<? extends SPACE> spaceF) {
|
||||
super(parentC);
|
||||
opName = op.getName();
|
||||
labels = op.getLabels();
|
||||
this.ratio = op.takeOptionalStaticValue("ratio", Long.class).orElse(1L);
|
||||
|
||||
this.timerStarts = op.takeOptionalStaticValue(START_TIMERS, String.class)
|
||||
.map(s -> s.split(", *"))
|
||||
@ -230,4 +236,14 @@ public abstract class BaseOpDispenser<OP extends CycleOp<?>, SPACE extends Space
|
||||
OP op = getOp(value);
|
||||
return op;
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<Verifier> getValidator(NBComponent parent, ParsedOp pop, OpLookup lookup) {
|
||||
return CoreOpValidators.getValidator(this, pop, lookup);
|
||||
}
|
||||
|
||||
@Override
|
||||
public long getRatio() {
|
||||
return this.ratio;
|
||||
}
|
||||
}
|
||||
|
@ -0,0 +1,51 @@
|
||||
package io.nosqlbench.adapters.api.activityimpl;
|
||||
|
||||
/*
|
||||
* Copyright (c) nosqlbench
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
|
||||
import io.nosqlbench.adapters.api.activityimpl.uniform.Verifier;
|
||||
import io.nosqlbench.adapters.api.templating.ParsedOp;
|
||||
import io.nosqlbench.engine.api.templating.TypeAndTarget;
|
||||
import io.nosqlbench.nb.api.components.core.NBComponent;
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.Optional;
|
||||
|
||||
public class CoreOpValidators {
|
||||
private static final Logger logger = LogManager.getLogger(CoreOpValidators.class);
|
||||
|
||||
public static List<Verifier> getValidator(NBComponent parent, ParsedOp pop, OpLookup lookup) {
|
||||
List<Verifier> validators = new ArrayList();
|
||||
Optional<TypeAndTarget<CoreValidators, Object>> optionalValidator = pop.getOptionalTypeAndTargetEnum(
|
||||
CoreValidators.class, Object.class);
|
||||
|
||||
if (optionalValidator.isPresent()) {
|
||||
TypeAndTarget<CoreValidators, Object> validator = optionalValidator.get();
|
||||
logger.debug("found validator '" + validator.enumId.name() + "' for op '" + pop.getName() + "'");
|
||||
switch (validator.enumId) {
|
||||
case verify_fields:
|
||||
validators.add(new FieldVerifier(parent, pop, lookup));
|
||||
}
|
||||
}
|
||||
|
||||
return validators;
|
||||
}
|
||||
}
|
@ -0,0 +1,30 @@
|
||||
package io.nosqlbench.adapters.api.activityimpl;
|
||||
|
||||
/*
|
||||
* Copyright (c) nosqlbench
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
|
||||
import io.nosqlbench.adapters.api.activityimpl.uniform.Verifier;
|
||||
|
||||
public enum CoreValidators {
|
||||
verify_fields(FieldVerifier.class);
|
||||
private final Class<? extends Verifier> validatorImpl;
|
||||
|
||||
CoreValidators(Class<? extends Verifier> validatorClass) {
|
||||
this.validatorImpl = validatorClass;
|
||||
}
|
||||
}
|
@ -0,0 +1,260 @@
|
||||
package io.nosqlbench.adapters.api.activityimpl;
|
||||
|
||||
/*
|
||||
* Copyright (c) nosqlbench
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
import io.nosqlbench.adapters.api.activityimpl.uniform.Verifier;
|
||||
import io.nosqlbench.adapters.api.activityimpl.uniform.opwrappers.DiffType;
|
||||
import io.nosqlbench.adapters.api.templating.ParsedOp;
|
||||
import io.nosqlbench.nb.api.components.core.NBComponent;
|
||||
import io.nosqlbench.nb.api.engine.metrics.instruments.MetricCategory;
|
||||
import io.nosqlbench.nb.api.engine.metrics.instruments.NBMetricCounter;
|
||||
import io.nosqlbench.nb.api.errors.OpConfigError;
|
||||
import io.nosqlbench.virtdata.core.templates.BindPoint;
|
||||
import io.nosqlbench.virtdata.core.templates.CapturePoints;
|
||||
|
||||
import java.util.*;
|
||||
import java.util.function.LongFunction;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
/// The field verifier is a type of result checker which can assert
|
||||
/// that fields in an op result match the values expected, or in other
|
||||
/// words, that the data is _correct_.
|
||||
///
|
||||
/// The field verifier works with another optional capability, called
|
||||
/// _field capture_ which is able to extract the results of an operation
|
||||
/// into a specific set of named fields, asserting their value types, and
|
||||
/// renaming the results if needed.
|
||||
///
|
||||
/// Together field capture and field verify behaviors allow an adapter-agnostic
|
||||
/// way of asserting correctness of results. Further details on how this
|
||||
/// works will be provided in a separate doc, with examples.
|
||||
public class FieldVerifier implements Verifier {
|
||||
|
||||
private final LongFunction<Map<String, Object>> expectedValuesF;
|
||||
private final DiffType diffType;
|
||||
private final NBMetricCounter resultsVerifiedError;
|
||||
private final NBMetricCounter resultsOkCounter;
|
||||
private final NBMetricCounter verifiedFieldsCounter;
|
||||
private final String[] fieldNames;
|
||||
private final String[] bindingNames;
|
||||
|
||||
public FieldVerifier(NBComponent parent, ParsedOp pop, OpLookup lookup) {
|
||||
this.resultsVerifiedError = parent.create().counter(
|
||||
"results_verified_error", MetricCategory.Verification,
|
||||
"The number of results which have been verified with no error"
|
||||
);
|
||||
this.resultsOkCounter = parent.create().counter(
|
||||
"results_verified_ok",
|
||||
MetricCategory.Verification,
|
||||
"The number of results which had " + "a verification error"
|
||||
);
|
||||
this.verifiedFieldsCounter = parent.create().counter(
|
||||
"field_verified_ok", MetricCategory.Verification,
|
||||
"the number of fields in results which have been verified with no error"
|
||||
);
|
||||
|
||||
this.diffType = pop.takeEnumFromFieldOr(DiffType.class, DiffType.all, "compare");
|
||||
|
||||
List<String> fields = new ArrayList<>();
|
||||
List<String> bindings = new ArrayList<>();
|
||||
CapturePoints captures = pop.getCaptures();
|
||||
|
||||
ParsedOp config = pop.takeAsSubConfig("verify_fields");
|
||||
|
||||
Optional<Object> vspec = config.takeOptionalStaticValue("verify_fields", Object.class);
|
||||
if (vspec.isPresent()) {
|
||||
Object vspeco = vspec.get();
|
||||
if (vspeco instanceof Map verifyers) {
|
||||
verifyers.forEach((k, v) -> {
|
||||
if (k instanceof CharSequence keyName && v instanceof CharSequence keyValue) {
|
||||
fields.add(keyName.toString());
|
||||
bindings.add(keyValue.toString());
|
||||
|
||||
} else {
|
||||
throw new RuntimeException(
|
||||
"Strings must be used in map form of " + "verify_field");
|
||||
}
|
||||
|
||||
});
|
||||
} else if (vspeco instanceof String verifyBindingSpec) {
|
||||
parseFieldSpec(verifyBindingSpec, lookup, fields, bindings, captures, pop);
|
||||
} else {
|
||||
throw new OpConfigError("Unrecognized type for verify_fields value:" + vspeco.getClass().getSimpleName());
|
||||
}
|
||||
} else {
|
||||
config.getDefinedNames().forEach(name -> {
|
||||
fields.add(name);
|
||||
bindings.add(config.getStaticValue(name));
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
List<BindPoint> bindPoints = pop.getBindPoints();
|
||||
|
||||
// Optional<String> vb = config.getOptionalStaticValue("verify_bindings", String.class);
|
||||
// if (vb.isPresent()) {
|
||||
// String verifyBindingSpec = vb.get();
|
||||
// if (verifyBindingSpec.startsWith("op(") && verifyBindingSpec.endsWith(")")) {
|
||||
// String toLookup = verifyBindingSpec.substring(2, verifyBindingSpec.lastIndexOf(-1));
|
||||
// ParsedOp referenced = lookup.lookup(toLookup).orElseThrow();
|
||||
// }
|
||||
// }
|
||||
|
||||
this.fieldNames = fields.toArray(new String[fields.size()]);
|
||||
this.bindingNames = bindings.toArray(new String[bindings.size()]);
|
||||
this.expectedValuesF = pop.newOrderedMapBinder(bindingNames);
|
||||
}
|
||||
|
||||
/// This is not stable yet, and may change again soon
|
||||
///
|
||||
/// There are two ways a field verifier is expected to be configured:
|
||||
/// 1. By reference to another op template which is presumed to have written
|
||||
/// the data to be verified.
|
||||
/// 2. Directly, by telling the field verifier the names, value bindings, and level
|
||||
/// of verification.
|
||||
private void parseFieldSpec(
|
||||
String fieldSpec, OpLookup lookup, List<String> fields,
|
||||
List<String> bindings, CapturePoints captures, ParsedOp pop
|
||||
) {
|
||||
// This is an indirect way of configuring a verifier. A user specifies
|
||||
// a tag filter which is meant to locate another op template within the
|
||||
// current workload template (it can be an active or inactive op template)
|
||||
// and that referenced template is used to derive the field names, bindings,
|
||||
// etc to verify against results of this op templates result values.
|
||||
if (fieldSpec.startsWith("op(") && fieldSpec.endsWith(")")) {
|
||||
String toLookup = fieldSpec.substring("op(".length(), fieldSpec.length() - 1);
|
||||
Optional<ParsedOp> referenced = lookup.lookup(toLookup);
|
||||
if (referenced.isPresent()) {
|
||||
List<String> vars = referenced.get().getBindPoints().stream().map(
|
||||
bp -> bp.getAnchor()).toList();
|
||||
fields.addAll(vars);
|
||||
bindings.addAll(vars);
|
||||
} else {
|
||||
throw new OpConfigError(
|
||||
"no op found for verify setting '" + fieldSpec + "' " + "for op " + "template" + " '" + pop.getName() + "'");
|
||||
}
|
||||
}
|
||||
// This is the direct way of configuring a verifier.
|
||||
// See verify.md for details on the supported format. As this is experimental, this doc
|
||||
// should be updated when this code is stable.
|
||||
else {
|
||||
String[] vfields = fieldSpec.split("\\s*,\\s*");
|
||||
for (String vfield : vfields) {
|
||||
// if (vfield.equals("*")) {
|
||||
// fields.addAll(captures.getAsNames());
|
||||
// fields.addAll(bindPoints.stream().map(bp -> bp.getAnchor()).toList());
|
||||
// } else
|
||||
if (vfield.startsWith("+")) {
|
||||
fields.add(vfield.substring(1));
|
||||
} else if (vfield.startsWith("-")) {
|
||||
fields.remove(vfield.substring(1));
|
||||
} else if (vfield.matches("\\w+(\\w+->[\\w-]+)?")) {
|
||||
String[] parts = vfield.split("->", 2);
|
||||
fields.add(parts[0]);
|
||||
bindings.add(parts[1]);
|
||||
} else {
|
||||
throw new RuntimeException("unknown verify_fields format: '" + vfield + "'");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
/// Compare the values of the row with the values generated.
|
||||
///
|
||||
/// Specifically,
|
||||
/// - Ensure the same number of fields.
|
||||
/// - Ensure the same pair-wise field names.
|
||||
/// - Ensure that each pair of same-named fields has the same data type.
|
||||
/// - Ensure that the value of each pair of fields is equal according to the equals
|
||||
/// operator for the respective type.
|
||||
/// @return a count of differences between the row and the reference values
|
||||
@Override
|
||||
public void verify(long cycle, Object data) {
|
||||
if (data instanceof Map<?, ?> r) {
|
||||
Map<String, ?> result = (Map<String, ?>) r;
|
||||
Map<String, Object> referenceMap = this.expectedValuesF.apply(cycle);
|
||||
|
||||
int diff = 0;
|
||||
StringBuilder logbuffer = new StringBuilder(); // make this a TL
|
||||
logbuffer.setLength(0);
|
||||
|
||||
if (diffType.is(DiffType.reffields)) {
|
||||
|
||||
List<String> missingRowFields = Arrays.stream(this.fieldNames).filter(
|
||||
gk -> !result.containsKey(gk)).collect(Collectors.toList());
|
||||
if (missingRowFields.size() > 0) {
|
||||
diff += missingRowFields.size();
|
||||
|
||||
logbuffer.append("\nexpected fields '");
|
||||
logbuffer.append(String.join("','", missingRowFields));
|
||||
logbuffer.append("' not in row.");
|
||||
}
|
||||
}
|
||||
|
||||
// if (diffType.is(DiffType.rowfields)) {
|
||||
// List<String> missingRefFields = result.keySet().stream().filter(
|
||||
// k -> !referenceMap.containsKey(k)).collect(Collectors.toList());
|
||||
// if (missingRefFields.size() > 0) {
|
||||
// diff += missingRefFields.size();
|
||||
//
|
||||
// logbuffer.append("\nexpected fields '");
|
||||
// logbuffer.append(String.join("','", missingRefFields));
|
||||
// logbuffer.append("' not in reference data: " + referenceMap);
|
||||
// }
|
||||
// }
|
||||
|
||||
if (diffType.is(DiffType.values)) {
|
||||
for (int fidx = 0; fidx < fieldNames.length; fidx++) {
|
||||
String fname = fieldNames[fidx];
|
||||
;
|
||||
String rname = bindingNames[fidx];
|
||||
if (referenceMap.containsKey(rname)) {
|
||||
if (referenceMap.get(rname).equals(result.get(fname))) {
|
||||
verifiedFieldsCounter.inc();
|
||||
} else {
|
||||
logbuffer.append("\nvalue differs for '").append(fname).append("' ");
|
||||
logbuffer.append("expected:'").append(
|
||||
referenceMap.get(fname).toString()).append("'");
|
||||
logbuffer.append(" actual:'").append(result.get(rname)).append("'");
|
||||
diff++;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
if (diff == 0) {
|
||||
resultsOkCounter.inc();
|
||||
} else {
|
||||
resultsVerifiedError.inc();
|
||||
throw new RuntimeException("in cycle " + cycle + ", " + logbuffer.toString());
|
||||
}
|
||||
|
||||
} else {
|
||||
throw new OpConfigError("Can only validate fields of type Map");
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getName() {
|
||||
return "verify_fields";
|
||||
}
|
||||
|
||||
|
||||
}
|
@ -109,4 +109,6 @@ public interface OpDispenser<OPTYPE extends CycleOp<?>> extends LongFunction<OPT
|
||||
CycleFunction<Boolean> getVerifier();
|
||||
|
||||
String getOpName();
|
||||
|
||||
long getRatio();
|
||||
}
|
||||
|
@ -0,0 +1,27 @@
|
||||
package io.nosqlbench.adapters.api.activityimpl;
|
||||
|
||||
/*
|
||||
* Copyright (c) nosqlbench
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
|
||||
import io.nosqlbench.adapters.api.templating.ParsedOp;
|
||||
|
||||
import java.util.Optional;
|
||||
|
||||
public interface OpLookup {
|
||||
Optional<ParsedOp> lookup(String opName);
|
||||
}
|
@ -144,55 +144,6 @@ public abstract class BaseDriverAdapter<RESULT
|
||||
return cfg;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void applyConfig(NBConfiguration cfg) {
|
||||
this.cfg = cfg;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void applyReconfig(NBConfiguration reconf) {
|
||||
this.cfg = getReconfigModel().apply(reconf.getMap());
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* In order to be provided with config information, it is required
|
||||
* that the driver adapter specify the valid configuration options,
|
||||
* their types, and so on.
|
||||
*/
|
||||
@Override
|
||||
public NBConfigModel getConfigModel() {
|
||||
return ConfigModel.of(BaseDriverAdapter.class)
|
||||
.add(Param.optional("alias"))
|
||||
.add(Param.optional("labels", String.class, "Labels which will apply to metrics and annotations for this activity only"))
|
||||
.add(Param.defaultTo("strict", true, "strict op field mode, which requires that provided op fields are recognized and used"))
|
||||
.add(Param.optional(List.of("op", "stmt", "statement"), String.class, "op template in statement form"))
|
||||
.add(Param.optional("tags", String.class, "tags to be used to filter operations"))
|
||||
.add(Param.defaultTo("errors", "stop", "error handler configuration"))
|
||||
.add(Param.optional("threads").setRegex("\\d+|\\d+x|auto").setDescription("number of concurrent operations, controlled by threadpool"))
|
||||
.add(Param.optional("stride").setRegex("\\d+"))
|
||||
.add(Param.optional("striderate", String.class, "rate limit for strides per second"))
|
||||
.add(Param.optional("cycles").setRegex("\\d+[KMBGTPE]?|\\d+[KMBGTPE]?\\.\\.\\d+[KMBGTPE]?").setDescription("cycle interval to use"))
|
||||
.add(Param.optional("recycles").setDescription("allow cycles to be re-used this many times"))
|
||||
.add(Param.optional(List.of("cyclerate", "targetrate", "rate"), String.class, "rate limit for cycles per second"))
|
||||
.add(Param.optional("seq", String.class, "sequencing algorithm"))
|
||||
.add(Param.optional("instrument", Boolean.class))
|
||||
.add(Param.optional(List.of("workload", "yaml"), String.class, "location of workload yaml file"))
|
||||
.add(Param.optional("driver", String.class))
|
||||
.add(Param.defaultTo("dryrun", "none").setRegex("(op|jsonnet|emit|none)"))
|
||||
.add(Param.optional("maxtries", Integer.class))
|
||||
.asReadOnly();
|
||||
|
||||
}
|
||||
|
||||
@Override
|
||||
public NBConfigModel getReconfigModel() {
|
||||
return ConfigModel.of(BaseDriverAdapter.class)
|
||||
.add(Param.optional("threads").setRegex("\\d+|\\d+x|auto").setDescription("number of concurrent operations, controlled by threadpool"))
|
||||
.add(Param.optional("striderate", String.class, "rate limit for strides per second"))
|
||||
.add(Param.optional(List.of("cyclerate", "targetrate", "rate"), String.class, "rate limit for cycles per second"))
|
||||
.asReadOnly();
|
||||
}
|
||||
|
||||
@Override
|
||||
public LongFunction<SPACE> getSpaceFunc(ParsedOp pop) {
|
||||
@ -233,4 +184,30 @@ public abstract class BaseDriverAdapter<RESULT
|
||||
}
|
||||
super.beforeDetach();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void applyConfig(NBConfiguration cfg) {
|
||||
this.cfg = cfg;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void applyReconfig(NBConfiguration reconf) {
|
||||
this.cfg = getReconfigModel().apply(reconf.getMap());
|
||||
}
|
||||
|
||||
/// These are implemented here as _unit_ values, meaning, you shouldn't be asking
|
||||
/// "Does this element have a configuration model", but instead you should be asking
|
||||
/// "What is the (possibly empty?) configuration model of this element?"
|
||||
@Override
|
||||
public NBConfigModel getConfigModel() {
|
||||
return ConfigModel.of(BaseDriverAdapter.class).asReadOnly();
|
||||
}
|
||||
|
||||
/// These are implemented here as _unit_ values, meaning, you shouldn't be asking
|
||||
/// "Does this element have a reconfiguration model", but instead you should be asking
|
||||
/// "What is the (possibly empty?) reconfiguration model of this element?"
|
||||
@Override
|
||||
public NBConfigModel getReconfigModel() {
|
||||
return ConfigModel.of(BaseDriverAdapter.class).asReadOnly();
|
||||
}
|
||||
}
|
||||
|
@ -0,0 +1,38 @@
|
||||
package io.nosqlbench.adapters.api.activityimpl.uniform;
|
||||
|
||||
/*
|
||||
* Copyright (c) nosqlbench
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
|
||||
import java.util.Map;
|
||||
import io.nosqlbench.adapters.api.activityimpl.OpDispenser;
|
||||
import io.nosqlbench.nb.api.components.core.NBNamedElement;
|
||||
import io.nosqlbench.virtdata.core.templates.BindPoint;
|
||||
|
||||
/// This optional type allows for [OpDispenser] (or other) implementations to
|
||||
/// map native field names to their associated binding names. Often, the
|
||||
/// adapter-native logic is the only place this association can be derived, although
|
||||
/// it is sometimes needed in core adapter-agnostic logic.
|
||||
public interface FieldBindingsMetadata<FIELDTYPE> {
|
||||
|
||||
/// Get the map of native fields to bind points.
|
||||
/// The bind points don't need to be the same actual object which is used, but both the
|
||||
/// field names and the binding points should be equivalent as in [Object#equals].
|
||||
/// @return an ordered map of native driver/client fields to their associated bindpoints.
|
||||
Map<String, BindPoint> getFieldBindingsMap();
|
||||
|
||||
}
|
@ -2,13 +2,13 @@ package io.nosqlbench.adapters.api.activityimpl.uniform;
|
||||
|
||||
/*
|
||||
* Copyright (c) nosqlbench
|
||||
*
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
@ -18,12 +18,14 @@ package io.nosqlbench.adapters.api.activityimpl.uniform;
|
||||
*/
|
||||
|
||||
|
||||
import io.nosqlbench.adapters.api.activityimpl.OpLookup;
|
||||
import io.nosqlbench.adapters.api.templating.ParsedOp;
|
||||
import io.nosqlbench.nb.api.components.core.NBComponent;
|
||||
|
||||
import java.util.Optional;
|
||||
import java.util.List;
|
||||
|
||||
/// A [DriverAdapter] may implement this interface to provide adapter-specific
|
||||
/// validators.
|
||||
public interface ValidatorSource {
|
||||
Optional<Validator> getValidator(String name, ParsedOp pop);
|
||||
List<Verifier> getValidator(NBComponent parent, ParsedOp pop, OpLookup lookup);
|
||||
}
|
||||
|
@ -0,0 +1,24 @@
|
||||
package io.nosqlbench.adapters.api.activityimpl.uniform;
|
||||
|
||||
/*
|
||||
* Copyright (c) nosqlbench
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
import io.nosqlbench.nb.api.components.core.NBNamedElement;
|
||||
|
||||
public interface Verifier<RESULT> extends NBNamedElement {
|
||||
public void verify(long cycle, RESULT result);
|
||||
}
|
@ -17,6 +17,7 @@
|
||||
package io.nosqlbench.adapters.api.activityimpl.uniform.decorators;
|
||||
|
||||
import io.nosqlbench.adapters.api.activityconfig.yaml.OpTemplate;
|
||||
import io.nosqlbench.adapters.api.activityconfig.yaml.OpTemplates;
|
||||
import io.nosqlbench.adapters.api.activityconfig.yaml.OpsDocList;
|
||||
import io.nosqlbench.adapters.api.templating.DriverAdapterDecorators;
|
||||
|
||||
@ -41,10 +42,10 @@ public interface SyntheticOpTemplateProvider extends DriverAdapterDecorators {
|
||||
* If a driver adapter supports creating example op templates from bindings,
|
||||
* it must implement this method to do so.
|
||||
*
|
||||
* @param opsDocList
|
||||
* @param opTemplates
|
||||
* The existing doc structure, which should contain no fully defined op templates, but may contain other
|
||||
* elements like bindings
|
||||
* @return A list of op templates, size zero or more
|
||||
*/
|
||||
List<OpTemplate> getSyntheticOpTemplates(OpsDocList opsDocList, Map<String, Object> params);
|
||||
OpTemplates getSyntheticOpTemplates(OpTemplates opTemplates, Map<String, Object> params);
|
||||
}
|
||||
|
@ -16,15 +16,15 @@
|
||||
|
||||
package io.nosqlbench.adapters.api.activityimpl.uniform.opwrappers;
|
||||
|
||||
import io.nosqlbench.adapters.api.activityimpl.uniform.Validator;
|
||||
import io.nosqlbench.adapters.api.activityimpl.uniform.Verifier;
|
||||
import io.nosqlbench.adapters.api.activityimpl.uniform.flowtypes.CycleOp;
|
||||
|
||||
public class AssertingOp<T> implements CycleOp<T> {
|
||||
|
||||
private final CycleOp<T> op;
|
||||
private final Validator<T> validator;
|
||||
private final Verifier<T> validator;
|
||||
|
||||
public AssertingOp(CycleOp<T> op, Validator<T> validator) {
|
||||
public AssertingOp(CycleOp<T> op, Verifier<T> validator) {
|
||||
this.op = op;
|
||||
this.validator = validator;
|
||||
}
|
||||
@ -32,7 +32,7 @@ public class AssertingOp<T> implements CycleOp<T> {
|
||||
@Override
|
||||
public T apply(long value) {
|
||||
T result = op.apply(value);
|
||||
validator.validate(result);
|
||||
validator.verify(value, result);
|
||||
return result;
|
||||
}
|
||||
}
|
||||
|
@ -20,20 +20,20 @@ import io.nosqlbench.adapters.api.activityimpl.BaseOpDispenser;
|
||||
import io.nosqlbench.adapters.api.activityimpl.OpDispenser;
|
||||
import io.nosqlbench.adapters.api.activityimpl.uniform.DriverAdapter;
|
||||
import io.nosqlbench.adapters.api.activityimpl.uniform.Space;
|
||||
import io.nosqlbench.adapters.api.activityimpl.uniform.Validator;
|
||||
import io.nosqlbench.adapters.api.activityimpl.uniform.Verifier;
|
||||
import io.nosqlbench.adapters.api.activityimpl.uniform.flowtypes.CycleOp;
|
||||
import io.nosqlbench.adapters.api.templating.ParsedOp;
|
||||
|
||||
public class AssertingOpDispenser<S extends Space, RESULT> extends BaseOpDispenser<CycleOp<RESULT>, S> {
|
||||
|
||||
private final OpDispenser<CycleOp<RESULT>> realDispenser;
|
||||
private final Validator<RESULT> validator;
|
||||
private final Verifier<RESULT> validator;
|
||||
|
||||
public AssertingOpDispenser(
|
||||
DriverAdapter<CycleOp<RESULT>, S> adapter,
|
||||
ParsedOp pop,
|
||||
OpDispenser<CycleOp<RESULT>> realDispenser,
|
||||
Validator<RESULT> validator
|
||||
Verifier<RESULT> validator
|
||||
) {
|
||||
super(adapter, pop, adapter.getSpaceFunc(pop));
|
||||
this.realDispenser = realDispenser;
|
||||
|
@ -16,7 +16,6 @@
|
||||
|
||||
package io.nosqlbench.adapters.api.activityimpl.uniform.opwrappers;
|
||||
|
||||
import io.nosqlbench.adapters.api.activityimpl.uniform.Validator;
|
||||
import io.nosqlbench.adapters.api.activityimpl.uniform.flowtypes.CycleOp;
|
||||
|
||||
import java.util.Map;
|
||||
|
@ -0,0 +1,55 @@
|
||||
package io.nosqlbench.adapters.api.activityimpl.uniform.opwrappers;
|
||||
|
||||
/*
|
||||
* Copyright (c) nosqlbench
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
|
||||
public enum DiffType {
|
||||
|
||||
|
||||
/// Verify nothing for this statement
|
||||
none(0),
|
||||
|
||||
/// Verify that fields named in the row are present in the reference map.
|
||||
resultfields(0x1),
|
||||
|
||||
/// Verify that fields in the reference map are present in the row data.
|
||||
reffields(0x1 << 1),
|
||||
|
||||
/// Verify that all fields present in either the row or the reference data
|
||||
/// are also present in the other.
|
||||
fields(0x1 | 0x1 << 1),
|
||||
|
||||
/// Verify that all values of the same named field are equal, according to
|
||||
/// {@link Object#equals(Object)}}.
|
||||
values(0x1<<2),
|
||||
|
||||
/// Cross-verify all fields and field values between the reference data and
|
||||
/// the actual data.
|
||||
all(0x1|0x1<<1|0x1<<2);
|
||||
|
||||
public int bitmask;
|
||||
|
||||
DiffType(int bit) {
|
||||
this.bitmask = bit;
|
||||
}
|
||||
|
||||
public boolean is(DiffType option) {
|
||||
return (bitmask & option.bitmask) > 0;
|
||||
}
|
||||
|
||||
}
|
@ -30,6 +30,7 @@ import io.nosqlbench.nb.api.config.fieldreaders.DynamicFieldReader;
|
||||
import io.nosqlbench.nb.api.config.fieldreaders.StaticFieldReader;
|
||||
import io.nosqlbench.nb.api.config.standard.NBConfigError;
|
||||
import io.nosqlbench.nb.api.config.standard.NBConfiguration;
|
||||
import io.nosqlbench.nb.api.engine.util.Tagged;
|
||||
import io.nosqlbench.nb.api.errors.OpConfigError;
|
||||
import io.nosqlbench.nb.api.labels.NBLabelSpec;
|
||||
import io.nosqlbench.nb.api.labels.NBLabels;
|
||||
@ -224,7 +225,7 @@ opfield1: value1
|
||||
</UL>
|
||||
</P>
|
||||
|
||||
<H3>Enabling Activity Params</H3>
|
||||
<H3>Enabling StandardActivity Params</H3>
|
||||
<P>If a user wants to allow an activity param as an default for an fields, they must publish the op
|
||||
field
|
||||
name in the configuration model for the activity. Otherwise it is an error to specify the value at
|
||||
@ -352,7 +353,7 @@ prepared: false
|
||||
document level,
|
||||
down to each block and then down to each statement.
|
||||
|
||||
<H3>Activity Params</H3>
|
||||
<H3>StandardActivity Params</H3>
|
||||
<PRE>{@code
|
||||
./nb run driver=... workload=... cl=LOCAL_QUORUM
|
||||
}</PRE>
|
||||
@ -380,7 +381,7 @@ prepared: false
|
||||
field within the set of possible fields. More than one will throw an error.</LI>
|
||||
</UL>
|
||||
</P> */
|
||||
public class ParsedOp extends NBBaseComponent implements LongFunction<Map<String, ?>>, NBComponent, StaticFieldReader, DynamicFieldReader {
|
||||
public class ParsedOp extends NBBaseComponent implements LongFunction<Map<String, ?>>, NBComponent, StaticFieldReader, DynamicFieldReader, Tagged {
|
||||
|
||||
private static final Logger logger = LogManager.getLogger(ParsedOp.class);
|
||||
|
||||
@ -393,34 +394,33 @@ public class ParsedOp extends NBBaseComponent implements LongFunction<Map<String
|
||||
private final List<CapturePoint> captures = new ArrayList<>();
|
||||
|
||||
private final OpTemplate _opTemplate;
|
||||
private final NBConfiguration activityCfg;
|
||||
private final Map<String,Object> activityCfg;
|
||||
private final ParsedTemplateMap tmap;
|
||||
private final NBLabels labels;
|
||||
private final List<Function<Map<String, Object>, Map<String, Object>>> preprocessors;
|
||||
|
||||
/**
|
||||
Create a parsed command from an Op template. This version is exactly like
|
||||
except that it allows
|
||||
preprocessors. Preprocessors are all applied to the the op template before
|
||||
it is applied to the parsed command fields, allowing you to combine or destructure
|
||||
fields from more tha one representation into a single canonical representation
|
||||
for processing.
|
||||
@param opTemplate
|
||||
The OpTemplate as provided by a user via YAML, JSON, or API (data structure)
|
||||
@param activityCfg
|
||||
The activity configuration, used to resolve nested config parameters
|
||||
@param preprocessors
|
||||
Map->Map transformers.
|
||||
*/
|
||||
|
||||
public ParsedOp(ParsedOp pop, NBConfiguration config) {
|
||||
this(pop._opTemplate,new LinkedHashMap<>(pop.activityCfg) {{ this.putAll(config.getMap());}},List.of(),pop.parent);
|
||||
}
|
||||
/// Create a parsed command from an Op template. Preprocessors are all applied to the the op template before
|
||||
/// it is applied to the parsed command fields, allowing you to combine or destructure
|
||||
/// fields from more tha one representation into a single canonical representation
|
||||
/// for processing.
|
||||
/// @param opTemplate The OpTemplate as provided by a user via YAML, JSON, or API (data structure)
|
||||
/// @param activityCfg The activity configuration, used to resolve nested config parameters
|
||||
/// @param preprocessors Map->Map transformers.
|
||||
public ParsedOp(
|
||||
OpTemplate opTemplate, NBConfiguration activityCfg,
|
||||
OpTemplate opTemplate,
|
||||
Map<String,Object> activityCfg,
|
||||
List<Function<Map<String, Object>, Map<String, Object>>> preprocessors,
|
||||
NBComponent parent
|
||||
) {
|
||||
// TODO: the block and op name below should be populated more robustly
|
||||
// They should not be strictly required, but a way of taking "what is provided" in the
|
||||
// name should be used
|
||||
super(
|
||||
parent,
|
||||
NBLabels.forKV(((parent instanceof ParsedOp) ? "subop" : "op"), opTemplate.getName())
|
||||
NBLabels.forMap(opTemplate.getTags())
|
||||
);
|
||||
this._opTemplate = opTemplate;
|
||||
this.activityCfg = activityCfg;
|
||||
@ -435,7 +435,7 @@ public class ParsedOp extends NBBaseComponent implements LongFunction<Map<String
|
||||
|
||||
this.tmap = new ParsedTemplateMap(
|
||||
getName(), map, opTemplate.getBindings(),
|
||||
List.of(opTemplate.getParams(), activityCfg.getMap())
|
||||
List.of(opTemplate.getParams(), activityCfg)
|
||||
);
|
||||
|
||||
NBLabels opLabels = parent.getLabels().and(
|
||||
@ -894,6 +894,7 @@ public class ParsedOp extends NBBaseComponent implements LongFunction<Map<String
|
||||
return tmap.getOptionalTargetEnum(enumclass, valueClass);
|
||||
}
|
||||
|
||||
|
||||
public <E extends Enum<E>, V> Optional<TypeAndTarget<E, V>> getOptionalTypeAndTargetEnum(
|
||||
Class<E> enumclass, Class<V> valueClass) {
|
||||
return tmap.getOptionalTargetEnum(enumclass, valueClass);
|
||||
@ -1025,6 +1026,11 @@ public class ParsedOp extends NBBaseComponent implements LongFunction<Map<String
|
||||
return this._opTemplate.getRefKey();
|
||||
}
|
||||
|
||||
@Override
|
||||
public Map<String, String> getTags() {
|
||||
return this._opTemplate.getTags();
|
||||
}
|
||||
|
||||
|
||||
public static enum SubOpNaming {
|
||||
SubKey, ParentAndSubKey
|
||||
@ -1046,9 +1052,9 @@ public class ParsedOp extends NBBaseComponent implements LongFunction<Map<String
|
||||
return new ParsedOp(
|
||||
new OpData(
|
||||
"sub-op of '" + this.getName() + "' field '" + fromOpField + "', element '" + elemName + "' name '" + subopName + "'",
|
||||
subopName, new LinkedHashMap<String, String>(_opTemplate.getTags()) {{
|
||||
put("subop", subopName);
|
||||
}}, _opTemplate.getBindings(), _opTemplate.getParams(), opfields, 100
|
||||
subopName,
|
||||
new LinkedHashMap<String, String>(Map.of("subop", subopName)),
|
||||
_opTemplate.getBindings(), _opTemplate.getParams(), opfields, 100
|
||||
), this.activityCfg, List.of(), this
|
||||
);
|
||||
}
|
||||
@ -1096,10 +1102,23 @@ public class ParsedOp extends NBBaseComponent implements LongFunction<Map<String
|
||||
return subOpMap;
|
||||
}
|
||||
|
||||
public ParsedOp takeAsSubConfig(String s) {
|
||||
Object subtree = tmap.takeStaticValue(s, Object.class);
|
||||
if (subtree instanceof Map map) {
|
||||
return makeSubOp(s, s, map, SubOpNaming.SubKey);
|
||||
} else if (subtree instanceof String seq) {
|
||||
return makeSubOp(s, s, Map.of(s, seq), SubOpNaming.SubKey);
|
||||
} else {
|
||||
throw new RuntimeException(
|
||||
"unable to make sub config from key '" + s + "', because " + "it is a " + subtree.getClass().getCanonicalName());
|
||||
}
|
||||
}
|
||||
|
||||
public ParsedOp getAsSubOp(String name, SubOpNaming naming) {
|
||||
Object o = _opTemplate.getOp().map(raw -> raw.get(name)).orElseThrow(
|
||||
() -> new OpConfigError(
|
||||
"Could not find op field '" + name + "' for subop on parent op '" + name + "'"));
|
||||
|
||||
if (o instanceof Map map) {
|
||||
return makeSubOp(this.getName(), name, map, naming);
|
||||
} else {
|
||||
@ -1230,8 +1249,8 @@ public class ParsedOp extends NBBaseComponent implements LongFunction<Map<String
|
||||
return tmap.getCaptures();
|
||||
}
|
||||
|
||||
public Map<String, String> getBindPoints() {
|
||||
return null;
|
||||
public List<BindPoint> getBindPoints() {
|
||||
return tmap.getBindPoints();
|
||||
}
|
||||
|
||||
public boolean isDefinedExactly(String... fields) {
|
||||
|
@ -20,7 +20,7 @@ import com.google.gson.Gson;
|
||||
import io.nosqlbench.nb.api.advisor.NBAdvisorBuilder;
|
||||
import io.nosqlbench.nb.api.advisor.NBAdvisorPoint;
|
||||
import io.nosqlbench.nb.api.advisor.conditions.Conditions;
|
||||
import io.nosqlbench.nb.api.engine.activityimpl.ActivityDef;
|
||||
import io.nosqlbench.nb.api.config.standard.NBConfiguration;
|
||||
import io.nosqlbench.nb.api.errors.OpConfigError;
|
||||
import io.nosqlbench.nb.api.nbio.Content;
|
||||
import io.nosqlbench.nb.api.nbio.NBIO;
|
||||
@ -53,9 +53,9 @@ public class StrInterpolator implements Function<String, String> {
|
||||
private final Pattern COMMENT = Pattern.compile("^\\s*#.*");
|
||||
private final Pattern INSERT = Pattern.compile("^(\\s*)INSERT:\\s+(.+)$");
|
||||
|
||||
public StrInterpolator(ActivityDef... activityDefs) {
|
||||
public StrInterpolator(NBConfiguration... activityDefs) {
|
||||
Arrays.stream(activityDefs)
|
||||
.map(ad -> ad.getParams().getStringStringMap())
|
||||
.map(ad -> ad.getMap())
|
||||
.forEach(multimap::add);
|
||||
}
|
||||
|
||||
|
@ -22,6 +22,7 @@ import io.nosqlbench.adapters.api.activityconfig.OpsLoader;
|
||||
import io.nosqlbench.adapters.api.activityconfig.rawyaml.RawOpsLoader;
|
||||
import io.nosqlbench.adapters.api.activityconfig.yaml.OpTemplate;
|
||||
import io.nosqlbench.adapters.api.activityconfig.yaml.OpTemplateFormat;
|
||||
import io.nosqlbench.adapters.api.activityconfig.yaml.OpTemplates;
|
||||
import io.nosqlbench.adapters.api.activityconfig.yaml.OpsDocList;
|
||||
import io.nosqlbench.nb.spectest.api.STAssemblyValidator;
|
||||
import io.nosqlbench.nb.spectest.core.STNodeAssembly;
|
||||
@ -110,7 +111,7 @@ public class YamlSpecValidator implements STAssemblyValidator {
|
||||
List<Map<String, Object>> expectedList = gson.fromJson(json, type);
|
||||
|
||||
OpsDocList stmtsDocs = OpsLoader.loadString(yaml, OpTemplateFormat.yaml, new HashMap<>(), null);
|
||||
List<OpTemplate> stmts = stmtsDocs.getOps(false);
|
||||
OpTemplates stmts = stmtsDocs.getOps();
|
||||
List<Map<String, Object>> stmt_objs = stmts.stream().map(OpTemplate::asData).collect(Collectors.toList());
|
||||
|
||||
try {
|
||||
|
@ -130,9 +130,9 @@ public class OpsDocListTest {
|
||||
|
||||
@Test
|
||||
public void testFilteredStmts() {
|
||||
List<OpTemplate> stmts = doclist.getOps("",true);
|
||||
OpTemplates stmts = doclist.getOps().matching("", true);
|
||||
Assertions.assertThat(stmts).hasSize(6);
|
||||
stmts = doclist.getOps("root1:value23",true);
|
||||
stmts = doclist.getOps().matching("root1:value23",true);
|
||||
Assertions.assertThat(stmts).hasSize(2);
|
||||
}
|
||||
|
||||
|
@ -37,39 +37,27 @@ import static org.assertj.core.api.Assertions.assertThat;
|
||||
public class ParsedOpTest {
|
||||
|
||||
private NBComponent getParent() {
|
||||
return new TestComponent("opparent","opparent");
|
||||
return new TestComponent("opparent", "opparent");
|
||||
}
|
||||
|
||||
private ParsedOp getOp() {
|
||||
|
||||
ParsedOp pc = new ParsedOp(
|
||||
new OpData().applyFields(
|
||||
Map.of(
|
||||
"op", Map.of(
|
||||
"stmt", "test",
|
||||
"dyna1", "{dyna1}",
|
||||
"dyna2", "{{NumberNameToString()}}",
|
||||
"identity", "{{Identity()}}"
|
||||
),
|
||||
"bindings", Map.of(
|
||||
"dyna1", "NumberNameToString()"
|
||||
)
|
||||
)
|
||||
),
|
||||
ConfigModel.of(ParsedOpTest.class)
|
||||
.add(Param.defaultTo("testcfg", "testval"))
|
||||
.asReadOnly()
|
||||
.apply(Map.of()),
|
||||
List.of(),
|
||||
getParent()
|
||||
);
|
||||
OpData opTemplate = new OpData().applyFields(Map.of(
|
||||
"op", Map.of(
|
||||
"stmt", "test", "dyna1", "{dyna1}", "dyna2", "{{NumberNameToString()}}", "identity",
|
||||
"{{Identity()}}"), "bindings", Map.of("dyna1", "NumberNameToString()")));
|
||||
|
||||
NBConfiguration nbcfg = ConfigModel.of(ParsedOpTest.class)
|
||||
.add(Param.defaultTo("testcfg", "testval")).asReadOnly().apply(Map.of());
|
||||
|
||||
ParsedOp pc = new ParsedOp(opTemplate, nbcfg.getMap(), List.of(), getParent());
|
||||
return pc;
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testFieldDelegationFromDynamicToStaticToConfig() {
|
||||
final NBConfiguration cfg = ConfigModel.of(ParsedOpTest.class)
|
||||
.add(Param.defaultTo("puppy", "dog"))
|
||||
.add(Param.required("surname", String.class))
|
||||
.add(Param.defaultTo("puppy", "dog")).add(Param.required("surname", String.class))
|
||||
.asReadOnly().apply(Map.of("surname", "yes"));
|
||||
|
||||
final String opt = """
|
||||
@ -80,10 +68,11 @@ public class ParsedOpTest {
|
||||
params:
|
||||
ps1: "param-one"
|
||||
""";
|
||||
final OpsDocList stmtsDocs = OpsLoader.loadString(opt, OpTemplateFormat.yaml, cfg.getMap(), null);
|
||||
assertThat(stmtsDocs.getOps(true).size()).isEqualTo(1);
|
||||
final OpTemplate opTemplate = stmtsDocs.getOps(true).get(0);
|
||||
final ParsedOp parsedOp = new ParsedOp(opTemplate, cfg, List.of(), getParent());
|
||||
final OpsDocList stmtsDocs = OpsLoader.loadString(
|
||||
opt, OpTemplateFormat.yaml, cfg.getMap(), null);
|
||||
assertThat(stmtsDocs.getOps().matching("", true).size()).isEqualTo(1);
|
||||
final OpTemplate opTemplate = stmtsDocs.getOps().matching("", true).get(0);
|
||||
final ParsedOp parsedOp = new ParsedOp(opTemplate, cfg.getMap(), List.of(), getParent());
|
||||
|
||||
assertThat(parsedOp.getAsFunctionOr("d1", "invalid").apply(1L)).isEqualTo("one");
|
||||
assertThat(parsedOp.getAsFunctionOr("s1", "invalid").apply(1L)).isEqualTo("static-one");
|
||||
@ -104,31 +93,20 @@ public class ParsedOpTest {
|
||||
final ParsedOp parsedOp = new ParsedOp(
|
||||
new OpData().applyFields(Map.of(
|
||||
"op", Map.of(
|
||||
"field1-literal", "literalvalue1",
|
||||
"field2-object", "{{NumberNameToString()}}",
|
||||
"field3-template", "pre-{dyna1}-post",
|
||||
"field4-map-template", Map.of(
|
||||
"subfield1-object", "{{Identity(); ToString()}}"
|
||||
), "field5-map-literal", Map.of(
|
||||
"subfield2-literal", "LiteralValue"
|
||||
)
|
||||
),
|
||||
"bindings", Map.of(
|
||||
"dyna1", "NumberNameToString()"
|
||||
))
|
||||
),
|
||||
ConfigModel.of(ParsedOpTest.class)
|
||||
.add(Param.defaultTo("testcfg", "testval"))
|
||||
.asReadOnly()
|
||||
.apply(Map.of()),
|
||||
List.of(),
|
||||
getParent()
|
||||
);
|
||||
"field1-literal", "literalvalue1", "field2-object", "{{NumberNameToString()}}",
|
||||
"field3-template", "pre-{dyna1}-post", "field4-map-template",
|
||||
Map.of("subfield1-object", "{{Identity(); ToString()}}"), "field5-map-literal",
|
||||
Map.of("subfield2-literal", "LiteralValue")), "bindings",
|
||||
Map.of("dyna1", "NumberNameToString()"))),
|
||||
ConfigModel.of(ParsedOpTest.class).add(Param.defaultTo("testcfg", "testval"))
|
||||
.asReadOnly().apply(Map.of()).getMap(), List.of(), getParent());
|
||||
final LongFunction<? extends String> f1 = parsedOp.getAsRequiredFunction("field1-literal");
|
||||
final LongFunction<? extends String> f2 = parsedOp.getAsRequiredFunction("field2-object");
|
||||
final LongFunction<? extends String> f3 = parsedOp.getAsRequiredFunction("field3-template");
|
||||
final LongFunction<? extends Map> f4 = parsedOp.getAsRequiredFunction("field4-map-template", Map.class);
|
||||
final LongFunction<? extends Map> f5 = parsedOp.getAsRequiredFunction("field5-map-literal", Map.class);
|
||||
final LongFunction<? extends Map> f4 = parsedOp.getAsRequiredFunction(
|
||||
"field4-map-template", Map.class);
|
||||
final LongFunction<? extends Map> f5 = parsedOp.getAsRequiredFunction(
|
||||
"field5-map-literal", Map.class);
|
||||
assertThat(f1.apply(1)).isNotNull();
|
||||
assertThat(f2.apply(2)).isNotNull();
|
||||
assertThat(f3.apply(3)).isNotNull();
|
||||
@ -148,21 +126,25 @@ public class ParsedOpTest {
|
||||
|
||||
@Test
|
||||
public void testNewListBinder() {
|
||||
final LongFunction<List<Object>> lb = getOp().newListBinder("dyna1", "identity", "dyna2", "identity");
|
||||
final LongFunction<List<Object>> lb = getOp().newListBinder(
|
||||
"dyna1", "identity", "dyna2", "identity");
|
||||
final List<Object> objects = lb.apply(1);
|
||||
assertThat(objects).isEqualTo(List.of("one", 1L, "one", 1L));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testNewMapBinder() {
|
||||
final LongFunction<Map<String, Object>> mb = getOp().newOrderedMapBinder("dyna1", "identity", "dyna2");
|
||||
final LongFunction<Map<String, Object>> mb = getOp().newOrderedMapBinder(
|
||||
"dyna1", "identity", "dyna2");
|
||||
final Map<String, Object> objects = mb.apply(2);
|
||||
assertThat(objects).isEqualTo(Map.<String, Object>of("dyna1", "two", "identity", 2L, "dyna2", "two"));
|
||||
assertThat(objects).isEqualTo(
|
||||
Map.<String, Object>of("dyna1", "two", "identity", 2L, "dyna2", "two"));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testNewAryBinder() {
|
||||
final LongFunction<Object[]> ab = getOp().newArrayBinder("dyna1", "dyna1", "identity", "identity");
|
||||
final LongFunction<Object[]> ab = getOp().newArrayBinder(
|
||||
"dyna1", "dyna1", "identity", "identity");
|
||||
final Object[] objects = ab.apply(3);
|
||||
assertThat(objects).isEqualTo(new Object[]{"three", "three", 3L, 3L});
|
||||
}
|
||||
@ -170,45 +152,21 @@ public class ParsedOpTest {
|
||||
@Test
|
||||
public void testLayeredListBinder() {
|
||||
ParsedOp pc = new ParsedOp(
|
||||
new OpData().applyFields(
|
||||
Map.of(
|
||||
"op", Map.of(
|
||||
"alist", List.of(
|
||||
List.of(
|
||||
"item1",
|
||||
"item2-{dyna1}"
|
||||
),
|
||||
Map.of(
|
||||
"akey", "avalue",
|
||||
"akey2", "a {dyna1} value2"
|
||||
)
|
||||
)
|
||||
),
|
||||
"bindings", Map.of(
|
||||
"dyna1", "NumberNameToString()"
|
||||
)
|
||||
)
|
||||
),
|
||||
ConfigModel.of(ParsedOpTest.class)
|
||||
.add(Param.defaultTo("testcfg", "testval"))
|
||||
.asReadOnly()
|
||||
.apply(Map.of()),
|
||||
List.of(),
|
||||
getParent()
|
||||
);
|
||||
new OpData().applyFields(Map.of(
|
||||
"op", Map.of(
|
||||
"alist",
|
||||
List.of(
|
||||
List.of("item1", "item2-{dyna1}"),
|
||||
Map.of("akey", "avalue", "akey2", "a {dyna1} value2"))), "bindings",
|
||||
Map.of("dyna1", "NumberNameToString()"))),
|
||||
ConfigModel.of(ParsedOpTest.class).add(Param.defaultTo("testcfg", "testval"))
|
||||
.asReadOnly().apply(Map.of()).getMap(), List.of(), getParent());
|
||||
|
||||
Map<String, Object> result = pc.getTemplateMap().apply(1);
|
||||
assertThat(result).isEqualTo(
|
||||
Map.of(
|
||||
"alist", List.of(
|
||||
List.of("item1", "item2-one"),
|
||||
Map.of(
|
||||
"akey", "avalue",
|
||||
"akey2", "a one value2"
|
||||
)
|
||||
)
|
||||
)
|
||||
);
|
||||
assertThat(result).isEqualTo(Map.of(
|
||||
"alist", List.of(
|
||||
List.of("item1", "item2-one"),
|
||||
Map.of("akey", "avalue", "akey2", "a one value2"))));
|
||||
|
||||
}
|
||||
|
||||
|
@ -37,7 +37,9 @@ import java.util.Optional;
|
||||
import java.util.concurrent.ConcurrentHashMap;
|
||||
import java.util.function.Function;
|
||||
|
||||
public class NBBaseComponent extends NBBaseComponentMetrics implements NBComponent, NBTokenWords, NBComponentTimeline {
|
||||
public class NBBaseComponent extends NBBaseComponentMetrics
|
||||
implements NBComponent, NBTokenWords, NBComponentTimeline
|
||||
{
|
||||
private final static Logger logger = LogManager.getLogger("RUNTIME");
|
||||
protected final NBComponent parent;
|
||||
protected final NBLabels labels;
|
||||
@ -66,8 +68,7 @@ public class NBBaseComponent extends NBBaseComponentMetrics implements NBCompone
|
||||
labelsAdvisor.validateAll(componentSpecificLabelsOnly.asMap().keySet());
|
||||
labelsAdvisor.validateAll(componentSpecificLabelsOnly.asMap().values());
|
||||
|
||||
labelsAdvisor.setName("Labels", "Check label names and values")
|
||||
.logName();
|
||||
labelsAdvisor.setName("Labels", "Check label names and values").logName();
|
||||
NBAdvisorResults advisorResults = getAdvisorResults();
|
||||
advisorResults.evaluate();
|
||||
|
||||
@ -83,7 +84,12 @@ public class NBBaseComponent extends NBBaseComponentMetrics implements NBCompone
|
||||
state = (state == NBInvokableState.ERRORED) ? state : NBInvokableState.RUNNING;
|
||||
}
|
||||
|
||||
public NBBaseComponent(NBComponent parentComponent, NBLabels componentSpecificLabelsOnly, Map<String, String> props) {
|
||||
public NBBaseComponent(
|
||||
NBComponent parentComponent,
|
||||
NBLabels componentSpecificLabelsOnly,
|
||||
Map<String, String> props
|
||||
)
|
||||
{
|
||||
this(parentComponent, componentSpecificLabelsOnly);
|
||||
props.forEach(this::setComponentProp);
|
||||
}
|
||||
@ -96,24 +102,42 @@ public class NBBaseComponent extends NBBaseComponentMetrics implements NBCompone
|
||||
@Override
|
||||
public synchronized NBComponent attachChild(NBComponent... children) {
|
||||
|
||||
for (NBComponent child : children) {
|
||||
logger.debug(() -> "attaching " + child.description() + " to parent " + this.description());
|
||||
for (NBComponent adding : children) {
|
||||
logger.debug(() -> "attaching " + adding.description() + " to parent "
|
||||
+ this.description());
|
||||
for (NBComponent extant : this.children) {
|
||||
NBLabels eachLabels = extant.getComponentOnlyLabels();
|
||||
NBLabels newLabels = child.getComponentOnlyLabels();
|
||||
NBLabels newLabels = adding.getComponentOnlyLabels();
|
||||
|
||||
if (eachLabels != null && newLabels != null && !eachLabels.isEmpty() && !newLabels.isEmpty() && child.getComponentOnlyLabels().equals(extant.getComponentOnlyLabels())) {
|
||||
throw new RuntimeException("Adding second child under already-defined labels is not allowed:\n" + " extant: (" + extant.getClass().getSimpleName() + ") " + extant.description() + "\n" + " adding: (" + child.getClass().getSimpleName() + ") " + child.description());
|
||||
if (eachLabels != null && newLabels != null && !eachLabels.isEmpty()
|
||||
&& !newLabels.isEmpty() && adding.getComponentOnlyLabels()
|
||||
.equals(extant.getComponentOnlyLabels()))
|
||||
{
|
||||
throw new RuntimeException("""
|
||||
Adding second child under already-defined labels is not allowed:
|
||||
parent: (PARENTCLASS) PARENTNAME
|
||||
extant: (EXTANTCLASS) EXTANTNAME
|
||||
adding: (ADDINGCLASS) ADDINGNAME
|
||||
""".replaceAll("PARENTCLASS", this.getClass().getSimpleName())
|
||||
.replaceAll("PARENTNAME", this.description())
|
||||
.replaceAll("EXTANTCLASS", extant.getClass().getSimpleName())
|
||||
.replaceAll("EXTANTNAME", extant.description())
|
||||
.replaceAll("ADDINGCLASS", adding.getClass().getSimpleName())
|
||||
.replaceAll("ADDINGNAME", adding.description()));
|
||||
}
|
||||
}
|
||||
|
||||
this.children.add(child);
|
||||
this.children.add(adding);
|
||||
}
|
||||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
public NBComponent detachChild(NBComponent... children) {
|
||||
for (NBComponent child : children) {
|
||||
logger.debug(() -> "notifying before detaching " + child.description() + " from "
|
||||
+ this.description());
|
||||
child.beforeDetach();
|
||||
}
|
||||
for (NBComponent child : children) {
|
||||
logger.debug(() -> "detaching " + child.description() + " from " + this.description());
|
||||
this.children.remove(child);
|
||||
@ -130,7 +154,8 @@ public class NBBaseComponent extends NBBaseComponentMetrics implements NBCompone
|
||||
@Override
|
||||
public NBLabels getLabels() {
|
||||
NBLabels effectiveLabels = (this.parent == null ? NBLabels.forKV() : parent.getLabels());
|
||||
effectiveLabels = (this.labels == null) ? effectiveLabels : effectiveLabels.and(this.labels);
|
||||
effectiveLabels = (this.labels == null) ? effectiveLabels :
|
||||
effectiveLabels.and(this.labels);
|
||||
return effectiveLabels;
|
||||
}
|
||||
|
||||
@ -140,11 +165,28 @@ public class NBBaseComponent extends NBBaseComponentMetrics implements NBCompone
|
||||
}
|
||||
|
||||
|
||||
/// Override this method when you need to do some action within the active
|
||||
/// component tree after the parent decides to detach your component, but before
|
||||
/// your component loses access to the live component tree.
|
||||
@Override
|
||||
public void beforeDetach() {
|
||||
logger.debug("before detach " + description());
|
||||
}
|
||||
|
||||
/// The [java.io.Closeable] and [AutoCloseable] behaviors of components are
|
||||
/// explicitly managed within the core [NBComponent] implementation. Thus, components can not
|
||||
/// override this method, to ensure that subtype behaviors are not orphaned. The way you can
|
||||
/// add a _close_ behavior is to implement [#teardown()].
|
||||
///
|
||||
/// During component tree unwinding, each component does the following in order:
|
||||
/// 1. Changes state to [NBInvokableState#CLOSING]
|
||||
/// 2. calls [#close()] on every child.
|
||||
/// 3. calls [#beforeDetach()]] on every child
|
||||
/// 4. detaches every child.
|
||||
/// 5. calls [#teardown()] on itself
|
||||
///
|
||||
/// This happens recursively, and is mediated by the [#close()] method itself.
|
||||
|
||||
@Override
|
||||
public final void close() throws RuntimeException {
|
||||
state = (state == NBInvokableState.ERRORED) ? state : NBInvokableState.CLOSING;
|
||||
@ -171,15 +213,16 @@ public class NBBaseComponent extends NBBaseComponentMetrics implements NBCompone
|
||||
}
|
||||
|
||||
public void onError(Exception e) {
|
||||
RuntimeException wrapped = new RuntimeException("While in state " + this.state + ", an error occured: " + e, e);
|
||||
RuntimeException wrapped = new RuntimeException(
|
||||
"While in state " + this.state + ", an error occured: " + e, e);
|
||||
logger.error(wrapped);
|
||||
this.error = wrapped;
|
||||
state = NBInvokableState.ERRORED;
|
||||
}
|
||||
|
||||
/**
|
||||
* Override this method in your component implementations when you need to do something
|
||||
* to close out your component.
|
||||
Override this method in your component implementations when you need to do something
|
||||
to close out your component.
|
||||
*/
|
||||
protected void teardown() {
|
||||
logger.debug("tearing down " + description());
|
||||
@ -216,7 +259,8 @@ public class NBBaseComponent extends NBBaseComponentMetrics implements NBCompone
|
||||
logger.debug(() -> description() + " handling event " + event.toString());
|
||||
switch (event) {
|
||||
case UpEvent ue -> {
|
||||
if (parent != null) parent.onEvent(ue);
|
||||
if (parent != null)
|
||||
parent.onEvent(ue);
|
||||
}
|
||||
case DownEvent de -> {
|
||||
for (NBComponent child : children) {
|
||||
@ -256,11 +300,12 @@ public class NBBaseComponent extends NBBaseComponentMetrics implements NBCompone
|
||||
}
|
||||
|
||||
/**
|
||||
* This method is called by the engine to report a component going out of scope. The metrics for that component
|
||||
* will bubble up through the component layers and can be buffered for reporting at multiple levels.
|
||||
*
|
||||
* @param m
|
||||
* The metric to report
|
||||
This method is called by the engine to report a component going out of scope. The metrics for
|
||||
that component
|
||||
will bubble up through the component layers and can be buffered for reporting at multiple
|
||||
levels.
|
||||
@param m
|
||||
The metric to report
|
||||
*/
|
||||
@Override
|
||||
public void reportExecutionMetric(NBMetric m) {
|
||||
|
@ -26,10 +26,13 @@ public class NBComponentExecutionScope implements AutoCloseable {
|
||||
public NBComponentExecutionScope(NBComponent... components) {
|
||||
this.components = components;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void close() throws RuntimeException {
|
||||
for (NBComponent component : components) {
|
||||
component.beforeDetach();
|
||||
// This is now handled inline with [NBComponent#detachChild], which puts it after the
|
||||
// out of scope notification -- this might need testing adjustments or clarification
|
||||
// component.beforeDetach();
|
||||
component.onEvent(new ComponentOutOfScope(component));
|
||||
NBComponent parent = component.getParent();
|
||||
if (parent!=null) {
|
||||
|
@ -18,6 +18,7 @@ package io.nosqlbench.nb.api.config.standard;
|
||||
|
||||
import io.nosqlbench.nb.api.advisor.NBAdvisorOutput;
|
||||
import io.nosqlbench.nb.api.errors.BasicError;
|
||||
import io.nosqlbench.nb.api.errors.OpConfigError;
|
||||
import org.apache.logging.log4j.Level;
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
@ -63,10 +64,10 @@ public class ConfigModel implements NBConfigModel {
|
||||
}
|
||||
|
||||
/**
|
||||
* Add a param that, when present in a runtime configuration, will cause the config
|
||||
* model to be expanded dynamically. This is for scenarios in which you have external
|
||||
* configurable resources or templates which contain their own models that can
|
||||
* only be known at runtime.
|
||||
Add a param that, when present in a runtime configuration, will cause the config
|
||||
model to be expanded dynamically. This is for scenarios in which you have external
|
||||
configurable resources or templates which contain their own models that can
|
||||
only be known at runtime.
|
||||
*/
|
||||
|
||||
public NBConfigModel asReadOnly() {
|
||||
@ -88,11 +89,39 @@ public class ConfigModel implements NBConfigModel {
|
||||
return ofType;
|
||||
}
|
||||
|
||||
public static <T> T convertValueTo(String configName, String paramName, Object value, Class<T> type) {
|
||||
public static <T> T convertValueTo(
|
||||
String configName,
|
||||
String paramName,
|
||||
Object value,
|
||||
Class<T> type
|
||||
)
|
||||
{
|
||||
try {
|
||||
if (type.isAssignableFrom(value.getClass())) {
|
||||
return type.cast(value);
|
||||
|
||||
} else if (Enum.class.isAssignableFrom(type)) {
|
||||
if (value instanceof CharSequence cseq) {
|
||||
String lowerval = cseq.toString().toLowerCase();
|
||||
EnumSet<?> values = EnumSet.allOf((Class<? extends Enum>) type);
|
||||
List<T> matched = new ArrayList<>();
|
||||
for (Enum<?> _enum : values) {
|
||||
String lowerenum = _enum.name().toLowerCase(Locale.ROOT).replaceAll("\\W","");
|
||||
if (lowerval.equals(lowerenum)) {
|
||||
matched.add((T)_enum);
|
||||
}
|
||||
}
|
||||
if (matched.size() == 1) {
|
||||
return matched.get(0);
|
||||
}
|
||||
if (matched.size() > 1) {
|
||||
throw new OpConfigError(
|
||||
"Multiple matches were found for config param [" + paramName + "] to "
|
||||
+ "possible enums: [" + values + "]");
|
||||
}
|
||||
} else {
|
||||
throw new RuntimeException("Cannot convert " + value + " to " + type + " for "
|
||||
+ "config param [" + paramName + "] (must be "
|
||||
+ "string)");}
|
||||
} else if (Number.class.isAssignableFrom(value.getClass())) { // A numeric value, and do we have a compatible target type?
|
||||
Number number = (Number) value;
|
||||
// This series of double fake-outs is heinous, but it works to get around design
|
||||
@ -111,8 +140,9 @@ public class ConfigModel implements NBConfigModel {
|
||||
} else if (type.equals(Short.class) || type == short.class) {
|
||||
return (T) (Short) number.shortValue();
|
||||
} else {
|
||||
throw new RuntimeException("Number type " + type.getSimpleName() + " could " +
|
||||
" not be converted from " + value.getClass().getSimpleName());
|
||||
throw new RuntimeException("Number type " + type.getSimpleName() + " could "
|
||||
+ " not be converted from " + value.getClass()
|
||||
.getSimpleName());
|
||||
}
|
||||
} else if (value instanceof CharSequence) { // A stringy type, and do we have a compatible target type?
|
||||
String string = ((CharSequence) value).toString();
|
||||
@ -132,15 +162,17 @@ public class ConfigModel implements NBConfigModel {
|
||||
} else if (type == boolean.class || type == Boolean.class) {
|
||||
return (T) Boolean.valueOf(Boolean.parseBoolean(string));
|
||||
} else {
|
||||
throw new RuntimeException("CharSequence type " + type.getSimpleName() + " could " +
|
||||
" not be converted from " + value.getClass().getSimpleName());
|
||||
throw new RuntimeException(
|
||||
"CharSequence type " + type.getSimpleName() + " could "
|
||||
+ " not be converted from " + value.getClass().getSimpleName());
|
||||
}
|
||||
} else if (value instanceof Boolean bool) {
|
||||
if (type == boolean.class) {
|
||||
return (T) bool;
|
||||
} else {
|
||||
throw new RuntimeException("Boolean type " + type.getSimpleName() + " could " +
|
||||
" not be converted from " + value.getClass().getSimpleName());
|
||||
throw new RuntimeException("Boolean type " + type.getSimpleName() + " could "
|
||||
+ " not be converted from " + value.getClass()
|
||||
.getSimpleName());
|
||||
|
||||
}
|
||||
}
|
||||
@ -150,10 +182,8 @@ public class ConfigModel implements NBConfigModel {
|
||||
}
|
||||
|
||||
throw new RuntimeException(
|
||||
"While configuring " + paramName + " for " + configName + ", " +
|
||||
"Unable to convert " + value.getClass() + " to " +
|
||||
type.getCanonicalName()
|
||||
);
|
||||
"While configuring " + paramName + " for " + configName + ", " + "Unable to convert "
|
||||
+ value.getClass() + " to " + type.getCanonicalName());
|
||||
}
|
||||
|
||||
@Override
|
||||
@ -210,7 +240,8 @@ public class ConfigModel implements NBConfigModel {
|
||||
}
|
||||
if (cval == null && param.isRequired()) {
|
||||
activename = param.getNames().get(0);
|
||||
cval = param.getDefaultValue(); // We know this will be valid. It was validated, correct?
|
||||
cval
|
||||
= param.getDefaultValue(); // We know this will be valid. It was validated, correct?
|
||||
}
|
||||
if (cval != null) {
|
||||
cval = convertValueTo(ofType.getSimpleName(), activename, cval, type);
|
||||
@ -225,7 +256,8 @@ public class ConfigModel implements NBConfigModel {
|
||||
for (String configkey : config.keySet()) {
|
||||
Param<?> element = this.paramsByName.get(configkey);
|
||||
if (element != null) {
|
||||
String warning = "Config parameter '" + configkey + "' is also a " + type + ". Check for possible conflicts.\n";
|
||||
String warning = "Config parameter '" + configkey + "' is also a " + type
|
||||
+ ". Check for possible conflicts.\n";
|
||||
NBAdvisorOutput.output(Level.WARN, warning);
|
||||
}
|
||||
}
|
||||
@ -241,8 +273,8 @@ public class ConfigModel implements NBConfigModel {
|
||||
}
|
||||
|
||||
private ConfigModel expand(ConfigModel configModel, Map<String, ?> config) {
|
||||
List<Param<?>> expanders = configModel.params.stream()
|
||||
.filter(p -> p.getExpander() != null).toList();
|
||||
List<Param<?>> expanders = configModel.params.stream().filter(p -> p.getExpander() != null)
|
||||
.toList();
|
||||
for (Param<?> expandingParameter : expanders) {
|
||||
for (String name : expandingParameter.getNames()) {
|
||||
if (config.containsKey(name)) {
|
||||
@ -285,9 +317,10 @@ public class ConfigModel implements NBConfigModel {
|
||||
}
|
||||
}
|
||||
if (!provided) {
|
||||
throw new RuntimeException("A required config element named '" + param.getNames() +
|
||||
"' and type '" + param.getType().getSimpleName() + "' was not found\n" +
|
||||
"for configuring a " + getOf().getSimpleName());
|
||||
throw new RuntimeException(
|
||||
"A required config element named '" + param.getNames() + "' and type '"
|
||||
+ param.getType().getSimpleName() + "' was not found\n"
|
||||
+ "for configuring a " + getOf().getSimpleName());
|
||||
}
|
||||
|
||||
}
|
||||
@ -298,8 +331,10 @@ public class ConfigModel implements NBConfigModel {
|
||||
// For each provided configuration element ...
|
||||
for (String configkey : config.keySet()) {
|
||||
Param<?> element = this.paramsByName.get(configkey);
|
||||
String warning = "Unknown config parameter '" + configkey + "' in config model while configuring " + getOf().getSimpleName()
|
||||
+ ", possible parameter names are " + this.paramsByName.keySet() + ".\n";
|
||||
String warning = "Unknown config parameter '" + configkey
|
||||
+ "' in config model while configuring " + getOf().getSimpleName()
|
||||
+ ", possible parameter names are " + this.paramsByName.keySet()
|
||||
+ ".\n";
|
||||
if (element == null) {
|
||||
String warnonly = System.getenv("NB_CONFIG_WARNINGS_ONLY");
|
||||
logger.warn("WARNING: " + warning);
|
||||
@ -324,7 +359,8 @@ public class ConfigModel implements NBConfigModel {
|
||||
}
|
||||
}
|
||||
if (names.size() > 1) {
|
||||
throw new NBConfigError("Multiple names for the same parameter were provided: " + names);
|
||||
throw new NBConfigError(
|
||||
"Multiple names for the same parameter were provided: " + names);
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -341,15 +377,15 @@ public class ConfigModel implements NBConfigModel {
|
||||
|
||||
@Override
|
||||
public void log() {
|
||||
logger.debug(() -> "ConfigModel: "+ofType);
|
||||
for (Param<?> param : getParams()) logger.debug(() -> "ConfigModel: " + param);
|
||||
logger.debug(() -> "ConfigModel: " + ofType);
|
||||
for (Param<?> param : getParams())
|
||||
logger.debug(() -> "ConfigModel: " + param);
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
String sb = "[" +
|
||||
params.stream().map(p -> p.getNames().get(0)).collect(Collectors.joining(",")) +
|
||||
"]";
|
||||
String sb = "[" + params.stream().map(p -> p.getNames().get(0))
|
||||
.collect(Collectors.joining(",")) + "]";
|
||||
return sb;
|
||||
}
|
||||
}
|
||||
|
@ -18,6 +18,7 @@ package io.nosqlbench.nb.api.config.standard;
|
||||
|
||||
import java.util.Arrays;
|
||||
import java.util.Collection;
|
||||
import java.util.List;
|
||||
|
||||
/**
|
||||
* All implementation types which wish to have a type-marshalled configuration
|
||||
@ -65,6 +66,15 @@ public interface NBConfigurable extends NBCanConfigure, NBConfigModelProvider {
|
||||
}
|
||||
}
|
||||
|
||||
static void applyMatchingCollection(NBConfiguration cfg, Collection<?> configurables) {
|
||||
for (Object configurable : configurables) {
|
||||
if (configurable instanceof NBConfigurable c) {
|
||||
NBConfiguration partial = c.getConfigModel().matchConfig(cfg);
|
||||
c.applyConfig(partial);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
static NBConfigModel collectModels(Class<?> of, Collection<?> configurables) {
|
||||
ConfigModel model = ConfigModel.of(of);
|
||||
for (Object configurable : configurables) {
|
||||
|
@ -18,25 +18,22 @@ package io.nosqlbench.nb.api.config.standard;
|
||||
|
||||
import io.nosqlbench.nb.api.system.NBEnvironment;
|
||||
|
||||
import java.util.Arrays;
|
||||
import java.util.LinkedHashMap;
|
||||
import java.util.Map;
|
||||
import java.util.Optional;
|
||||
import java.util.*;
|
||||
|
||||
public class NBConfiguration {
|
||||
|
||||
private final LinkedHashMap<String, Object> data;
|
||||
private final NBConfigModel model;
|
||||
private final List<NBReconfigurable> listeners = new ArrayList<>();
|
||||
|
||||
/**
|
||||
* Create a NBConfigReader from a known valid configuration and a config model.
|
||||
* This method is restricted to encourage construction of readers only by passing
|
||||
* through the friendly {@link NBConfigModel#apply(Map)} method.
|
||||
*
|
||||
* @param model
|
||||
* A configuration model, describing what is allowed to be configured by name and type.
|
||||
* @param validConfig
|
||||
* A valid config reader.
|
||||
Create a NBConfigReader from a known valid configuration and a config model.
|
||||
This method is restricted to encourage construction of readers only by passing
|
||||
through the friendly {@link NBConfigModel#apply(Map)} method.
|
||||
@param model
|
||||
A configuration model, describing what is allowed to be configured by name and type.
|
||||
@param validConfig
|
||||
A valid config reader.
|
||||
*/
|
||||
protected NBConfiguration(NBConfigModel model, LinkedHashMap<String, Object> validConfig) {
|
||||
this.data = validConfig;
|
||||
@ -48,16 +45,18 @@ public class NBConfiguration {
|
||||
}
|
||||
|
||||
public static NBConfiguration empty() {
|
||||
return new NBConfiguration(ConfigModel.of(Object.class).asReadOnly(), new LinkedHashMap<>());
|
||||
return new NBConfiguration(
|
||||
ConfigModel.of(Object.class).asReadOnly(),
|
||||
new LinkedHashMap<>()
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the value of the named parameter as {@link #getOptional(String)}, so long
|
||||
* as no env vars were reference OR all env var references were found.
|
||||
*
|
||||
* @param name
|
||||
* The name of the variable to look up
|
||||
* @return An optional value, if present and (optionally) interpolated correctly from the environment
|
||||
Returns the value of the named parameter as {@link #getOptional(String)}, so long
|
||||
as no env vars were reference OR all env var references were found.
|
||||
@param name
|
||||
The name of the variable to look up
|
||||
@return An optional value, if present and (optionally) interpolated correctly from the environment
|
||||
*/
|
||||
public Optional<String> getEnvOptional(String name) {
|
||||
Optional<String> optionalValue = getOptional(name);
|
||||
@ -78,50 +77,55 @@ public class NBConfiguration {
|
||||
|
||||
public <T> T getWithEnv(String name, Class<? extends T> vclass) {
|
||||
T value = get(name, vclass);
|
||||
if (value == null) {
|
||||
|
||||
}
|
||||
if (value instanceof String) {
|
||||
Optional<String> interpolated = NBEnvironment.INSTANCE.interpolate(value.toString());
|
||||
if (interpolated.isEmpty()) {
|
||||
throw new NBConfigError("Unable to interpolate env and sys props in '" + value + "'");
|
||||
throw new NBConfigError(
|
||||
"Unable to interpolate env and sys props in '" + value + "'");
|
||||
}
|
||||
String result = interpolated.get();
|
||||
return ConfigModel.convertValueTo(this.getClass().getSimpleName(), name, result, vclass);
|
||||
return ConfigModel.convertValueTo(
|
||||
this.getClass().getSimpleName(),
|
||||
name,
|
||||
result,
|
||||
vclass
|
||||
);
|
||||
} else {
|
||||
return value;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get a config value or object by name. This uses type inference (as a generic method)
|
||||
* in addition to the internal model for type checking and ergonomic use. If you do not
|
||||
* call this within an assignment or context where the Java compiler knows what type you
|
||||
* are expecting, then use {@link #get(String, Class)} instead.
|
||||
*
|
||||
* @param name
|
||||
* The name of the configuration parameter
|
||||
* @param <T>
|
||||
* The (inferred) generic type of the configuration value
|
||||
* @return The value of type T, matching the config model type for the provided field name
|
||||
Get a config value or object by name. This uses type inference (as a generic method)
|
||||
in addition to the internal model for type checking and ergonomic use. If you do not
|
||||
call this within an assignment or context where the Java compiler knows what type you
|
||||
are expecting, then use {@link #get(String, Class)} instead.
|
||||
@param name
|
||||
The name of the configuration parameter
|
||||
@param <T>
|
||||
The (inferred) generic type of the configuration value
|
||||
@return The value of type T, matching the config model type for the provided field name
|
||||
*/
|
||||
public <T> T get(String name) {
|
||||
Param<T> param = (Param<T>) model.getNamedParams().get(name);
|
||||
if (param == null) {
|
||||
throw new NBConfigError("Attempted to get parameter for name '" + name + "' but this parameter has no " +
|
||||
"model defined for " + this.getModel().getOf());
|
||||
throw new NBConfigError(
|
||||
"Attempted to get parameter for name '" + name + "' but this parameter has no "
|
||||
+ "model defined for " + this.getModel().getOf());
|
||||
}
|
||||
// if (param.isRequired() && (param.getDefaultValue()==null) && )
|
||||
// if (param.isRequired() && (param.getDefaultValue()==null) && )
|
||||
Object object = this.data.get(name);
|
||||
object = object != null ? object : param.getDefaultValue();
|
||||
if (object == null && param.isRequired()) {
|
||||
throw new NBConfigError("An object by name '" + name + "' was requested as required, and no value was" +
|
||||
" defined for it. This user provided value must be set or otherwise marked optional or given a" +
|
||||
" default value in the parameter model.");
|
||||
throw new NBConfigError(
|
||||
"An object by name '" + name + "' was requested as required, and no value was"
|
||||
+ " defined for it. This user provided value must be set or otherwise marked optional or given a"
|
||||
+ " default value in the parameter model.");
|
||||
} else if (object == null && !param.isRequired()) {
|
||||
throw new NBConfigError("An object by name '" + name + "' was requested as given by the config layer," +
|
||||
" but no value was present, and no default was found in the config model. This is an ambiguous " +
|
||||
"scenario. Either access the object as optional, or give it a default value. (code change)");
|
||||
throw new NBConfigError(
|
||||
"An object by name '" + name + "' was requested as given by the config layer,"
|
||||
+ " but no value was present, and no default was found in the config model. This is an ambiguous "
|
||||
+ "scenario. Either access the object as optional, or give it a default value. (code change)");
|
||||
}
|
||||
if (param.type.isInstance(object)) {
|
||||
return (T) object;
|
||||
@ -130,7 +134,11 @@ public class NBConfiguration {
|
||||
} else if (NBTypeConverter.canConvert(object, param.type)) {
|
||||
return NBTypeConverter.convert(object, param.type);
|
||||
} else {
|
||||
throw new NBConfigError("Unable to assign config value for field '" + name + "' of type '" + object.getClass().getCanonicalName() + "' to the required return type '" + param.type.getCanonicalName() + "' as specified in the config model for '" + model.getOf().getCanonicalName());
|
||||
throw new NBConfigError(
|
||||
"Unable to assign config value for field '" + name + "' of type '"
|
||||
+ object.getClass().getCanonicalName() + "' to the required return type '"
|
||||
+ param.type.getCanonicalName() + "' as specified in the config model for '"
|
||||
+ model.getOf().getCanonicalName());
|
||||
}
|
||||
}
|
||||
|
||||
@ -138,12 +146,16 @@ public class NBConfiguration {
|
||||
|
||||
Param<T> param = model.getParam(name);
|
||||
if (param == null) {
|
||||
throw new NBConfigError("Parameter named '" + name + "' is not valid for " + model.getOf().getSimpleName() + ".");
|
||||
throw new NBConfigError(
|
||||
"Parameter named '" + name + "' is not valid for " + model.getOf().getSimpleName()
|
||||
+ ".");
|
||||
}
|
||||
|
||||
if ((!param.isRequired()) && param.getDefaultValue() == null) {
|
||||
throw new RuntimeException("Non-optional get on optional parameter " + name + "' which has no default value while configuring " + model.getOf() + "." +
|
||||
"\nTo avoid user impact, ensure that ConfigModel and NBConfigurable usage are aligned.");
|
||||
throw new RuntimeException("""
|
||||
Non-optional get on optional parameter 'PNAME' which has no default value while configuring OF.
|
||||
To avoid user impact, ensure that ConfigModel and NBConfigurable usage are aligned.
|
||||
""".replaceAll("PNAME", name).replaceAll("OF", model.getOf().getSimpleName()));
|
||||
}
|
||||
|
||||
Object o = data.get(name);
|
||||
@ -178,7 +190,8 @@ public class NBConfiguration {
|
||||
}
|
||||
}
|
||||
} else {
|
||||
throw new NBConfigError("Parameter definition was not found for " + Arrays.toString(names) + ".");
|
||||
throw new NBConfigError(
|
||||
"Parameter definition was not found for " + Arrays.toString(names) + ".");
|
||||
}
|
||||
}
|
||||
if (o == null) {
|
||||
@ -195,7 +208,9 @@ public class NBConfiguration {
|
||||
} else if (NBTypeConverter.canConvert(o, type)) {
|
||||
return Optional.of((T) NBTypeConverter.convert(o, type));
|
||||
} else {
|
||||
throw new NBConfigError("config param " + Arrays.toString(names) + " was not assignable to class '" + type.getCanonicalName() + "'");
|
||||
throw new NBConfigError(
|
||||
"config param " + Arrays.toString(names) + " was not assignable to class '"
|
||||
+ type.getCanonicalName() + "'");
|
||||
}
|
||||
|
||||
}
|
||||
@ -208,7 +223,9 @@ public class NBConfiguration {
|
||||
if (defaultValue.getClass().isAssignableFrom(o.getClass())) {
|
||||
return (T) o;
|
||||
}
|
||||
throw new NBConfigError("config parameter '" + name + "' is not assignable to required type '" + defaultValue.getClass() + "'");
|
||||
throw new NBConfigError(
|
||||
"config parameter '" + name + "' is not assignable to required type '"
|
||||
+ defaultValue.getClass() + "'");
|
||||
}
|
||||
|
||||
public <T> T param(String name, Class<? extends T> vclass) {
|
||||
@ -238,4 +255,36 @@ public class NBConfiguration {
|
||||
return data;
|
||||
}
|
||||
|
||||
/// see [#update(Map)]
|
||||
public <T> NBConfiguration update(String fieldName, T value) {
|
||||
return update(Map.of(fieldName, value));
|
||||
}
|
||||
|
||||
/// This will create a new configuration without modifying the existing one,
|
||||
/// retaining the same config model and all other values except for the modified ones.
|
||||
/// Further, any reconfig listeners which are registered will be notified via the
|
||||
/// [NBReconfigurable#applyConfig(NBConfiguration)] method
|
||||
///
|
||||
/// This eventing will occur whether or not the value was actually changed. Spurious
|
||||
/// eventing of duplicate values should be considered a design bug.
|
||||
///
|
||||
/// Any holders of an updated configurations must maintain their own copies if necessary for
|
||||
/// deltas.
|
||||
public <T> NBConfiguration update(Map<String, Object> entries) {
|
||||
NBConfiguration updated = model.apply(new LinkedHashMap<>(this.data) {
|
||||
{
|
||||
putAll(entries);
|
||||
}
|
||||
});
|
||||
for (NBReconfigurable listener : this.listeners) {
|
||||
listener.applyReconfig(updated);
|
||||
}
|
||||
return updated;
|
||||
}
|
||||
|
||||
public NBConfiguration addListener(NBReconfigurable reconfigurable) {
|
||||
this.listeners.add(reconfigurable);
|
||||
return this;
|
||||
}
|
||||
|
||||
}
|
||||
|
@ -39,7 +39,7 @@ import java.util.Collection;
|
||||
* {@link #applyMatching(NBConfiguration, Collection)} can be used to apply
|
||||
* reconfigurations to groups of elements with a shared configuration model.
|
||||
*/
|
||||
public interface NBReconfigurable extends NBCanReconfigure, NBReconfigModelProvider {
|
||||
public interface NBReconfigurable extends NBConfigurable, NBCanReconfigure, NBReconfigModelProvider {
|
||||
|
||||
/**
|
||||
* This applies a configuration to an element <EM>AFTER</EM> the initial
|
||||
|
@ -0,0 +1,113 @@
|
||||
package io.nosqlbench.nb.api.engine.activityimpl;
|
||||
|
||||
/*
|
||||
* Copyright (c) nosqlbench
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
|
||||
import io.nosqlbench.nb.api.config.standard.NBConfigModel;
|
||||
import io.nosqlbench.nb.api.config.standard.NBConfiguration;
|
||||
import io.nosqlbench.nb.api.labels.NBLabelSpec;
|
||||
import io.nosqlbench.nb.api.labels.NBLabels;
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
|
||||
import java.util.LinkedHashMap;
|
||||
import java.util.Optional;
|
||||
|
||||
public class ActivityConfig extends NBConfiguration {
|
||||
|
||||
public static final String DEFAULT_ALIAS = "UNNAMEDACTIVITY";
|
||||
public static final String DEFAULT_ATYPE = "stdout";
|
||||
public static final String DEFAULT_CYCLES = "0";
|
||||
public static final String DEFAULT_RECYCLES = "1";
|
||||
public static final int DEFAULT_THREADS = 1;
|
||||
public static final Logger logger = LogManager.getLogger(ActivityConfig.class);
|
||||
|
||||
// an alias with which to control the activity while it is running
|
||||
public static final String FIELD_ALIAS = "alias";
|
||||
// a file or URL containing the activity: op templates, generator bindings, ...
|
||||
public static final String FIELD_ATYPE = "type";
|
||||
// cycles for this activity in either "M" or "N..M" form. "M" form implies "0..M"
|
||||
public static final String FIELD_CYCLES = "cycles";
|
||||
public static final String FIELD_RECYCLES = "recycles";
|
||||
// initial thread concurrency for this activity
|
||||
public static final String FIELD_THREADS = "threads";
|
||||
public static final String FIELD_LABELS = "labels";
|
||||
|
||||
public static final String[] field_list = {
|
||||
FIELD_ALIAS, FIELD_ATYPE, FIELD_CYCLES, FIELD_THREADS, FIELD_RECYCLES
|
||||
};
|
||||
|
||||
public ActivityConfig(NBConfiguration config) {
|
||||
this(config.getModel(), config.getMap());
|
||||
}
|
||||
|
||||
public ActivityConfig(NBConfigModel model, LinkedHashMap<String, Object> validConfig)
|
||||
{
|
||||
super(model, validConfig);
|
||||
Optional<String> directAlias = getOptional("alias");
|
||||
if (!directAlias.isPresent()) {
|
||||
String indirectAlias = getOptional(ActivityConfig.FIELD_ALIAS).or(
|
||||
() -> getOptional("workload")).or(() -> getOptional("driver"))
|
||||
.orElse("ACTIVITYNAME");
|
||||
getMap().put("alias", indirectAlias);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
public String getAlias() {
|
||||
return get("alias");
|
||||
}
|
||||
|
||||
public NBLabels auxLabels() {
|
||||
Optional<String> auxLabelSpec = getOptional(FIELD_LABELS);
|
||||
if (auxLabelSpec.isPresent()) {
|
||||
return NBLabelSpec.parseLabels(auxLabelSpec.get());
|
||||
}
|
||||
return NBLabels.forKV();
|
||||
|
||||
}
|
||||
|
||||
public Optional<String> getDriver() {
|
||||
return getOptional("driver", "type");
|
||||
// .orElseThrow(() -> new BasicError("The parameter " +
|
||||
// "'driver=' is required."));
|
||||
|
||||
}
|
||||
|
||||
public void setThreads(int i) {
|
||||
update("threads", i);
|
||||
}
|
||||
|
||||
public int getThreads() {
|
||||
return get(FIELD_THREADS, Integer.class);
|
||||
}
|
||||
|
||||
public String summary() {
|
||||
return String.valueOf(this);
|
||||
}
|
||||
|
||||
public void updateLastCycle(long maxValue) {
|
||||
CyclesSpec spec = CyclesSpec.parse(get("cycles", String.class));
|
||||
spec = spec.withLast(maxValue);
|
||||
update("cycles", spec.toString());
|
||||
}
|
||||
|
||||
public CyclesSpec getCyclesSpec() {
|
||||
return CyclesSpec.parse(get("cycles", String.class));
|
||||
}
|
||||
}
|
@ -1,272 +0,0 @@
|
||||
/*
|
||||
* Copyright (c) 2022-2023 nosqlbench
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package io.nosqlbench.nb.api.engine.activityimpl;
|
||||
|
||||
import io.nosqlbench.nb.api.components.core.NBNamedElement;
|
||||
import io.nosqlbench.nb.api.config.standard.ConfigModel;
|
||||
import io.nosqlbench.nb.api.config.standard.NBConfigModel;
|
||||
import io.nosqlbench.nb.api.config.standard.Param;
|
||||
import io.nosqlbench.nb.api.errors.BasicError;
|
||||
import io.nosqlbench.nb.api.labels.NBLabelSpec;
|
||||
import io.nosqlbench.nb.api.labels.NBLabels;
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
|
||||
import java.security.InvalidParameterException;
|
||||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Optional;
|
||||
import java.util.concurrent.atomic.AtomicLong;
|
||||
|
||||
/**
|
||||
* <p>A runtime definition for an activity.</p>
|
||||
* <p>Instances of ActivityDef hold control values for the execution of a single activity.
|
||||
* Each thread of the related activity is initialized with the associated ActivityDef.
|
||||
* When the ActivityDef is modified, interested activity threads are notified so that
|
||||
* they can dynamically adjust.</p>
|
||||
* <p>The canonical values for all parameters are kept internally in the parameter map.
|
||||
* Essentially, ActivityDef is just a type-aware wrapper around a thread-safe parameter map,
|
||||
* with an atomic change counter which can be used to signal changes to observers.</p>
|
||||
*/
|
||||
public class ActivityDef implements NBNamedElement {
|
||||
|
||||
// milliseconds between cycles per thread, for slow tests only
|
||||
public static final String DEFAULT_ALIAS = "UNNAMEDACTIVITY";
|
||||
public static final String DEFAULT_ATYPE = "stdout";
|
||||
public static final String DEFAULT_CYCLES = "0";
|
||||
public static final String DEFAULT_RECYCLES = "1";
|
||||
public static final int DEFAULT_THREADS = 1;
|
||||
public static final Logger logger = LogManager.getLogger(ActivityDef.class);
|
||||
// an alias with which to control the activity while it is running
|
||||
public static final String FIELD_ALIAS = "alias";
|
||||
// a file or URL containing the activity: op templates, generator bindings, ...
|
||||
public static final String FIELD_ATYPE = "type";
|
||||
// cycles for this activity in either "M" or "N..M" form. "M" form implies "0..M"
|
||||
public static final String FIELD_CYCLES = "cycles";
|
||||
public static final String FIELD_RECYCLES = "recycles";
|
||||
// initial thread concurrency for this activity
|
||||
public static final String FIELD_THREADS = "threads";
|
||||
public static final String[] field_list = {
|
||||
FIELD_ALIAS, FIELD_ATYPE, FIELD_CYCLES, FIELD_THREADS, FIELD_RECYCLES
|
||||
};
|
||||
// parameter map has its own internal atomic map
|
||||
private final ParameterMap parameterMap;
|
||||
private CyclesSpec cyclesSpec;
|
||||
private CyclesSpec reCyclesSpec;
|
||||
|
||||
public ActivityDef(ParameterMap parameterMap) {
|
||||
this.parameterMap = parameterMap;
|
||||
}
|
||||
|
||||
//public static Optional<ActivityDef> parseActivityDefOptionally(String namedActivitySpec) {
|
||||
// try {
|
||||
// ActivityDef activityDef = parseActivityDef(namedActivitySpec);
|
||||
// return Optional.of(activityDef);
|
||||
// } catch (Exception e) {
|
||||
// return Optional.empty();
|
||||
// }
|
||||
//}
|
||||
|
||||
public static ActivityDef parseActivityDef(String namedActivitySpec) {
|
||||
Optional<ParameterMap> activityParameterMap = ParameterMap.parseParams(namedActivitySpec);
|
||||
ActivityDef activityDef = new ActivityDef(activityParameterMap.orElseThrow(
|
||||
() -> new RuntimeException("Unable to parse:" + namedActivitySpec)
|
||||
));
|
||||
logger.info("parsed activityDef {} to-> {}", namedActivitySpec, activityDef);
|
||||
|
||||
return activityDef;
|
||||
}
|
||||
|
||||
public String toString() {
|
||||
return "ActivityDef:" + parameterMap.toString();
|
||||
}
|
||||
|
||||
/**
|
||||
* The alias that the associated activity instance is known by.
|
||||
*
|
||||
* @return the alias
|
||||
*/
|
||||
public String getAlias() {
|
||||
return parameterMap.getOptionalString("alias").orElse(DEFAULT_ALIAS);
|
||||
}
|
||||
|
||||
/**
|
||||
* Return tbe Activity Driver Adapter Name
|
||||
*
|
||||
* @return the driver adapter name
|
||||
*/
|
||||
public String getActivityDriver() {
|
||||
return parameterMap.getOptionalString("type", "driver").orElse(DEFAULT_ATYPE);
|
||||
}
|
||||
|
||||
/**
|
||||
* The first cycle that will be used for execution of this activity, inclusive.
|
||||
* If the value is provided as a range as in 0..10, then the first number is the start cycle
|
||||
* and the second number is the end cycle +1. Effectively, cycle ranges
|
||||
* are [closed,open) intervals, as in [min..max)
|
||||
*
|
||||
* @return the long start cycle
|
||||
*/
|
||||
public long getStartCycle() {
|
||||
return getCyclesSpec().first_inclusive();
|
||||
}
|
||||
|
||||
public void setStartCycle(long firstCycleInclusive) {
|
||||
cyclesSpec=getCyclesSpec().withFirst(firstCycleInclusive);
|
||||
}
|
||||
|
||||
public void setStartCycle(String firstCycleInclusive) {
|
||||
cyclesSpec=getCyclesSpec().withFirst(firstCycleInclusive);
|
||||
}
|
||||
|
||||
public void setEndCycle(long lastCycleExclusive) {
|
||||
cyclesSpec=getCyclesSpec().withLast(lastCycleExclusive);
|
||||
}
|
||||
public void setEndCycle(String lastCycleExclusive) {
|
||||
cyclesSpec=getCyclesSpec().withLast(lastCycleExclusive);
|
||||
}
|
||||
|
||||
/**
|
||||
* The last cycle that will be used for execution of this activity, inclusive.
|
||||
*
|
||||
* @return the long end cycle
|
||||
*/
|
||||
public long getEndCycle() {
|
||||
return getCyclesSpec().last_exclusive();
|
||||
}
|
||||
|
||||
/**
|
||||
* The number of threads (AKA slots) that the associated activity should currently be using.
|
||||
*
|
||||
* @return target thread count
|
||||
*/
|
||||
public int getThreads() {
|
||||
return parameterMap.getOptionalInteger(FIELD_THREADS).orElse(DEFAULT_THREADS);
|
||||
}
|
||||
|
||||
public void setThreads(int threads) {
|
||||
parameterMap.set(FIELD_THREADS, threads);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the parameter map, which is the backing-store for all data within an ActivityDef.
|
||||
*
|
||||
* @return the parameter map
|
||||
*/
|
||||
public ParameterMap getParams() {
|
||||
return parameterMap;
|
||||
}
|
||||
|
||||
public AtomicLong getChangeCounter() {
|
||||
return parameterMap.getChangeCounter();
|
||||
}
|
||||
|
||||
public void setCycles(String cycles) {
|
||||
parameterMap.set(FIELD_CYCLES, cycles);
|
||||
this.cyclesSpec=CyclesSpec.parse(cycles);
|
||||
checkInvariants();
|
||||
}
|
||||
|
||||
public String getCycleSummary() {
|
||||
return getCyclesSpec().summary();
|
||||
}
|
||||
|
||||
public synchronized long getCycleCount() {
|
||||
return getCyclesSpec().cycle_count();
|
||||
}
|
||||
|
||||
public synchronized CyclesSpec getCyclesSpec() {
|
||||
if (this.cyclesSpec==null) {
|
||||
this.cyclesSpec = CyclesSpec.parse(parameterMap.getOptionalString(FIELD_CYCLES).orElse(DEFAULT_CYCLES));
|
||||
}
|
||||
return this.cyclesSpec;
|
||||
}
|
||||
public synchronized CyclesSpec getRecyclesSpec() {
|
||||
if (this.reCyclesSpec==null) {
|
||||
this.reCyclesSpec = CyclesSpec.parse(parameterMap.getOptionalString(FIELD_RECYCLES).orElse(DEFAULT_RECYCLES));
|
||||
}
|
||||
return this.reCyclesSpec;
|
||||
|
||||
}
|
||||
|
||||
private void checkInvariants() {
|
||||
if (getStartCycle() >= getEndCycle()) {
|
||||
throw new InvalidParameterException("Start cycle must be strictly less than end cycle, but they are [" + getStartCycle() + ',' + getEndCycle() + ')');
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getName() {
|
||||
return getAlias();
|
||||
}
|
||||
|
||||
public ActivityDef deprecate(String deprecatedName, String newName) {
|
||||
Object deprecatedParam = this.parameterMap.get(deprecatedName);
|
||||
if (null == deprecatedParam) {
|
||||
return this;
|
||||
}
|
||||
if (deprecatedParam instanceof CharSequence chars) {
|
||||
if (this.parameterMap.containsKey(newName)) {
|
||||
throw new BasicError("You have specified activity param '" + deprecatedName + "' in addition to the valid name '" + newName + "'. Remove '" + deprecatedName + "'.");
|
||||
}
|
||||
if (!newName.equals("driver")) {
|
||||
logger.warn("Auto replacing deprecated activity param '{}={}' with new '{}={}'.", deprecatedName, chars, newName, chars);
|
||||
}
|
||||
parameterMap.put(newName, parameterMap.remove(deprecatedName));
|
||||
} else {
|
||||
throw new BasicError("Can't replace deprecated name with value of type " + deprecatedName.getClass().getCanonicalName());
|
||||
}
|
||||
return this;
|
||||
}
|
||||
|
||||
public NBLabels auxLabels() {
|
||||
Optional<String> auxLabelSpec = getParams().getOptionalString("labels");
|
||||
if (auxLabelSpec.isPresent()) {
|
||||
return NBLabelSpec.parseLabels(auxLabelSpec.get());
|
||||
}
|
||||
return NBLabels.forKV();
|
||||
|
||||
}
|
||||
|
||||
public NBConfigModel getConfigModel() {
|
||||
ConfigModel cfgmodel = ConfigModel.of(this.getClass());
|
||||
Map<String, String> params = parameterMap.getStringStringMap();
|
||||
params.forEach((k, v) -> {
|
||||
cfgmodel.add(Param.defaultTo(k, v, "activity parameter found on command line"));
|
||||
});
|
||||
cfgmodel.add(Param.defaultTo(FIELD_ALIAS, DEFAULT_ALIAS).setDescription("The alias for the operations"));
|
||||
cfgmodel.add(Param.defaultTo(FIELD_ATYPE, DEFAULT_ATYPE).setDescription("The default adapter type is 'stdout'"));
|
||||
cfgmodel.add(Param.defaultTo(FIELD_CYCLES, DEFAULT_CYCLES).setDescription("The default number of cycles to test is '0'"));
|
||||
cfgmodel.add(Param.defaultTo(FIELD_THREADS, DEFAULT_THREADS).setDescription("The default number of threads for testing is '1'"));
|
||||
cfgmodel.add(Param.defaultTo(FIELD_RECYCLES, DEFAULT_RECYCLES).setDescription("The default number of recycles to test is '1'"));
|
||||
cfgmodel.add(Param.optional("labels", String.class).setDescription("Metric labels for this activity"));
|
||||
cfgmodel.add(Param.optional("tags", String.class).setDescription("Tags for selecting workload op templates"));
|
||||
cfgmodel.add(Param.defaultTo("driver", DEFAULT_ATYPE).setDescription("The default adapter driver is 'stdout'"));
|
||||
cfgmodel.add(Param.optional("workload", String.class).setDescription("The test workload"));
|
||||
cfgmodel.add(Param.optional("yaml", String.class).setDescription("The test workload"));
|
||||
cfgmodel.add(Param.defaultTo("async", 1,"Inflight Ops"));
|
||||
cfgmodel.add(Param.defaultTo("maxtries", 10,"Maximum number of retries"));
|
||||
cfgmodel.add(Param.defaultTo("interval", 1000,"Action interval"));
|
||||
cfgmodel.add(Param.defaultTo("hdr_digits", 4,"HDR Digits"));
|
||||
cfgmodel.add(Param.optional("errors").setDescription("Error handling method"));
|
||||
cfgmodel.add(Param.optional("striderate").setDescription("Rate limiting stride"));
|
||||
List<String> rates = Arrays.asList("cyclerate", "targetrate", "rate");
|
||||
cfgmodel.add(Param.optional(rates, String.class, "Rate limit"));
|
||||
return cfgmodel.asReadOnly();
|
||||
}
|
||||
}
|
@ -161,7 +161,8 @@ public class PromExpositionFormat {
|
||||
.append(stringValue)
|
||||
.append('\n');
|
||||
} else throw new RuntimeException(
|
||||
"Unknown label set for metric type '" + metric.getClass().getCanonicalName() + '\''
|
||||
"Unknown label set for metric value type '" + (value==null? "NULL" :
|
||||
value.getClass()) + '\''
|
||||
);
|
||||
}
|
||||
if (metric instanceof final Metered meter) {
|
||||
|
@ -24,8 +24,8 @@ import java.util.regex.Pattern;
|
||||
|
||||
public class MapLabels implements NBLabels {
|
||||
|
||||
// private final static Logger logger = LogManager.getLogger(MapLabels.class);
|
||||
protected final Map<String,String> labels;
|
||||
// private final static Logger logger = LogManager.getLogger(MapLabels.class);
|
||||
protected final Map<String, String> labels;
|
||||
|
||||
public MapLabels(final Map<String, String> labels) {
|
||||
verifyValidNamesAndValues(labels);
|
||||
@ -34,23 +34,26 @@ public class MapLabels implements NBLabels {
|
||||
}
|
||||
|
||||
|
||||
public MapLabels(final Map<String,String> parentLabels, final Map<String,String> childLabels) {
|
||||
public MapLabels(
|
||||
final Map<String, String> parentLabels, final Map<String, String> childLabels) {
|
||||
final Map<String, String> combined = new LinkedHashMap<>(parentLabels);
|
||||
childLabels.forEach((k,v) -> {
|
||||
if (combined.containsKey(k))
|
||||
throw new RuntimeException("Can't overlap label keys (for instance " + k + ") between parent and child elements. parent:" + parentLabels + ", child:" + childLabels);
|
||||
combined.put(k,v);
|
||||
childLabels.forEach((k, v) -> {
|
||||
if (combined.containsKey(k)) throw new RuntimeException(
|
||||
"Can't overlap label (any) key '" + k + "' between parent (a " + parentLabels.getClass().getSimpleName() + ") and child (a " + childLabels.getClass().getSimpleName() + ") parent:" + parentLabels + ", child:" + childLabels);
|
||||
combined.put(k, v);
|
||||
});
|
||||
verifyValidNamesAndValues(combined);
|
||||
// verifyValidValues(combined);
|
||||
labels=Collections.unmodifiableMap(combined);
|
||||
labels = Collections.unmodifiableMap(combined);
|
||||
}
|
||||
|
||||
private final Pattern validNamesPattern = Pattern.compile("^[a-zA-Z_][a-zA-Z0-9_]*");
|
||||
|
||||
private void verifyValidNamesAndValues(Map<String, String> labels) {
|
||||
labels.forEach((label,value) -> {
|
||||
labels.forEach((label, value) -> {
|
||||
if (!validNamesPattern.matcher(label).matches()) {
|
||||
throw new RuntimeException("Invalid label name '" + label + "', only a-z,A-Z,_ are allowed as the initial character, and a-z,A-Z,0-9,_ are allowed after.");
|
||||
throw new RuntimeException(
|
||||
"Invalid label name '" + label + "', only a-z,A-Z,_ are allowed as the initial character, and a-z,A-Z,0-9,_ are allowed after.");
|
||||
}
|
||||
// if (!validNamesPattern.matcher(value).matches()) {
|
||||
// throw new RuntimeException("Invalid label value '" + value + "', only a-z,A-Z,_ are allowed as the initial character, and a-z,A-Z,0-9,_ are allowed after.");
|
||||
@ -61,13 +64,13 @@ public class MapLabels implements NBLabels {
|
||||
private void verifyValidValues(Map<String, String> labels) {
|
||||
for (String value : labels.values()) {
|
||||
if (!validNamesPattern.matcher(value).matches()) {
|
||||
throw new RuntimeException("Invalid label value '" + value + "', only a-z,A-Z,_ are allowed as the initial character, and a-z,A-Z,0-9,_ are allowed after.");
|
||||
throw new RuntimeException(
|
||||
"Invalid label value '" + value + "', only a-z,A-Z,_ are allowed as the initial character, and a-z,A-Z,0-9,_ are allowed after.");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
@Override
|
||||
public String linearizeValues(final char delim, final String... included) {
|
||||
final StringBuilder sb = new StringBuilder();
|
||||
@ -76,8 +79,9 @@ public class MapLabels implements NBLabels {
|
||||
else includedNames.addAll(this.labels.keySet());
|
||||
|
||||
for (String includedName : includedNames) {
|
||||
final boolean optional= includedName.startsWith("[") && includedName.endsWith("]");
|
||||
includedName=optional?includedName.substring(1,includedName.length()-1):includedName;
|
||||
final boolean optional = includedName.startsWith("[") && includedName.endsWith("]");
|
||||
includedName = optional ? includedName.substring(
|
||||
1, includedName.length() - 1) : includedName;
|
||||
|
||||
final String component = this.labels.get(includedName);
|
||||
if (null == component) {
|
||||
@ -86,7 +90,7 @@ public class MapLabels implements NBLabels {
|
||||
}
|
||||
sb.append(component).append(delim);
|
||||
}
|
||||
sb.setLength(sb.length()-1);
|
||||
sb.setLength(sb.length() - 1);
|
||||
return sb.toString();
|
||||
}
|
||||
|
||||
@ -101,7 +105,7 @@ public class MapLabels implements NBLabels {
|
||||
}
|
||||
}
|
||||
if (!sb.isEmpty()) {
|
||||
sb.setLength(sb.length()-"__".length());
|
||||
sb.setLength(sb.length() - "__".length());
|
||||
}
|
||||
|
||||
List<String> keys = new ArrayList<>(keyset);
|
||||
@ -110,7 +114,7 @@ public class MapLabels implements NBLabels {
|
||||
for (String key : keys) {
|
||||
sb.append("_").append(key).append("_").append(labels.get(key)).append("__");
|
||||
}
|
||||
sb.setLength(sb.length()-"__".length());
|
||||
sb.setLength(sb.length() - "__".length());
|
||||
}
|
||||
return sb.toString();
|
||||
}
|
||||
@ -126,7 +130,8 @@ public class MapLabels implements NBLabels {
|
||||
if (null != bareName) {
|
||||
rawName = this.labels.get(bareName);
|
||||
includedNames.remove(bareName);
|
||||
if (null == rawName) throw new RuntimeException("Unable to get value for key '" + bareName + '\'');
|
||||
if (null == rawName) throw new RuntimeException(
|
||||
"Unable to get value for key '" + bareName + '\'');
|
||||
sb.append(rawName);
|
||||
}
|
||||
if (!includedNames.isEmpty()) {
|
||||
@ -134,13 +139,9 @@ public class MapLabels implements NBLabels {
|
||||
for (final String includedName : includedNames) {
|
||||
final String includedValue = this.labels.get(includedName);
|
||||
Objects.requireNonNull(includedValue);
|
||||
sb.append(includedName)
|
||||
.append("=\"")
|
||||
.append(includedValue)
|
||||
.append('"')
|
||||
.append(',');
|
||||
sb.append(includedName).append("=\"").append(includedValue).append('"').append(',');
|
||||
}
|
||||
sb.setLength(sb.length()-",".length());
|
||||
sb.setLength(sb.length() - ",".length());
|
||||
sb.append('}');
|
||||
}
|
||||
|
||||
@ -158,7 +159,7 @@ public class MapLabels implements NBLabels {
|
||||
for (String key : keys) {
|
||||
sb.append(key).append("=\"").append(labels.get(key)).append("\",");
|
||||
}
|
||||
sb.setLength(sb.length()-",".length());
|
||||
sb.setLength(sb.length() - ",".length());
|
||||
sb.append("}");
|
||||
return sb.toString();
|
||||
|
||||
@ -176,7 +177,7 @@ public class MapLabels implements NBLabels {
|
||||
for (String key : keys) {
|
||||
sb.append(key).append("=").append(labels.get(key)).append(",");
|
||||
}
|
||||
sb.setLength(sb.length()-",".length());
|
||||
sb.setLength(sb.length() - ",".length());
|
||||
return sb.toString();
|
||||
|
||||
}
|
||||
@ -196,8 +197,8 @@ public class MapLabels implements NBLabels {
|
||||
|
||||
@Override
|
||||
public MapLabels and(final Object... labelsAndValues) {
|
||||
final Map<String,String> childLabels = getStringStringMap(labelsAndValues);
|
||||
return new MapLabels(labels,childLabels);
|
||||
final Map<String, String> childLabels = getStringStringMap(labelsAndValues);
|
||||
return new MapLabels(labels, childLabels);
|
||||
}
|
||||
|
||||
@Override
|
||||
@ -213,46 +214,51 @@ public class MapLabels implements NBLabels {
|
||||
NBLabels updated = this;
|
||||
Map<String, String> defaultMap = defaults.asMap();
|
||||
for (String name : defaultMap.keySet()) {
|
||||
updated = updated.andDefault(name,defaultMap.get(name));
|
||||
updated = updated.andDefault(name, defaultMap.get(name));
|
||||
}
|
||||
return updated;
|
||||
}
|
||||
|
||||
@Override
|
||||
public MapLabels and(NBLabels labels) {
|
||||
return new MapLabels(this.labels,labels.asMap());
|
||||
return new MapLabels(this.labels, labels.asMap());
|
||||
}
|
||||
|
||||
@Override
|
||||
public NBLabels modifyName(final String nameToModify, final Function<String, String> transform) {
|
||||
if (!this.labels.containsKey(nameToModify))
|
||||
throw new RuntimeException("Missing name in labels for transform: '" + nameToModify + '\'');
|
||||
public NBLabels modifyName(
|
||||
final String nameToModify,
|
||||
final Function<String, String> transform
|
||||
) {
|
||||
if (!this.labels.containsKey(nameToModify)) throw new RuntimeException(
|
||||
"Missing name in labels for transform: '" + nameToModify + '\'');
|
||||
final LinkedHashMap<String, String> newLabels = new LinkedHashMap<>(this.labels);
|
||||
final String removedValue = newLabels.remove(nameToModify);
|
||||
final String newName = transform.apply(nameToModify);
|
||||
newLabels.put(newName,removedValue);
|
||||
newLabels.put(newName, removedValue);
|
||||
return new MapLabels(newLabels);
|
||||
}
|
||||
|
||||
@Override
|
||||
public NBLabels modifyValue(final String labelName, final Function<String, String> transform) {
|
||||
if(!this.labels.containsKey(labelName))
|
||||
throw new RuntimeException("Unable to find label name '" + labelName + "' for value transform.");
|
||||
if (!this.labels.containsKey(labelName)) throw new RuntimeException(
|
||||
"Unable to find label name '" + labelName + "' for value transform.");
|
||||
final LinkedHashMap<String, String> newMap = new LinkedHashMap<>(this.labels);
|
||||
final String value = newMap.remove(labelName);
|
||||
if (null == value) throw new RuntimeException("The value for named label '" + labelName + "' is null.");
|
||||
newMap.put(labelName,transform.apply(value));
|
||||
if (null == value) throw new RuntimeException(
|
||||
"The value for named label '" + labelName + "' is null.");
|
||||
newMap.put(labelName, transform.apply(value));
|
||||
return NBLabels.forMap(newMap);
|
||||
}
|
||||
|
||||
public String toString() {
|
||||
if (labels.size()==0) {
|
||||
if (labels.size() == 0) {
|
||||
return "{}";
|
||||
}
|
||||
StringBuilder sb = new StringBuilder("{");
|
||||
labels.forEach((k,v) -> {
|
||||
labels.forEach((k, v) -> {
|
||||
sb.append(k).append(":\\\"").append(v).append("\\\"").append(",");
|
||||
});
|
||||
sb.setLength(sb.length()-",".length());
|
||||
sb.setLength(sb.length() - ",".length());
|
||||
sb.append("}");
|
||||
|
||||
return sb.toString();
|
||||
@ -260,10 +266,11 @@ public class MapLabels implements NBLabels {
|
||||
|
||||
@Override
|
||||
public String valueOf(final String name) {
|
||||
if (!this.labels.containsKey(name))
|
||||
throw new RuntimeException("The specified key does not exist: '" + name + '\'');
|
||||
if (!this.labels.containsKey(name)) throw new RuntimeException(
|
||||
"The specified key does not exist: '" + name + '\'');
|
||||
final String only = labels.get(name);
|
||||
if (null == only) throw new RuntimeException("The specified value is null for key '" + name + '\'');
|
||||
if (null == only) throw new RuntimeException(
|
||||
"The specified value is null for key '" + name + '\'');
|
||||
return only;
|
||||
}
|
||||
|
||||
@ -285,25 +292,27 @@ public class MapLabels implements NBLabels {
|
||||
}
|
||||
|
||||
private String[] concat(String[] a, String[] b) {
|
||||
String[] c = new String[a.length+b.length];
|
||||
System.arraycopy(a,0,c,0,a.length);
|
||||
System.arraycopy(b,0,c,a.length,b.length);
|
||||
String[] c = new String[a.length + b.length];
|
||||
System.arraycopy(a, 0, c, 0, a.length);
|
||||
System.arraycopy(b, 0, c, a.length, b.length);
|
||||
return c;
|
||||
}
|
||||
|
||||
private static String[] getNamesArray(final Object... labelsAndValues) {
|
||||
String[] keys = new String[labelsAndValues.length>>1];
|
||||
String[] keys = new String[labelsAndValues.length >> 1];
|
||||
for (int i = 0; i < keys.length; i++) {
|
||||
keys[i]=labelsAndValues[i<<1].toString();
|
||||
keys[i] = labelsAndValues[i << 1].toString();
|
||||
}
|
||||
return keys;
|
||||
}
|
||||
|
||||
@NotNull
|
||||
private static Map<String, String> getStringStringMap(Object[] labelsAndValues) {
|
||||
if (0 != (labelsAndValues.length % 2))
|
||||
throw new RuntimeException("Must provide even number of keys and values: " + Arrays.toString(labelsAndValues));
|
||||
if (0 != (labelsAndValues.length % 2)) throw new RuntimeException(
|
||||
"Must provide even number of keys and values: " + Arrays.toString(labelsAndValues));
|
||||
final Map<String, String> childLabels = new LinkedHashMap<>();
|
||||
for (int i = 0; i < labelsAndValues.length; i+=2) childLabels.put(labelsAndValues[i].toString(), labelsAndValues[i + 1].toString());
|
||||
for (int i = 0; i < labelsAndValues.length; i += 2)
|
||||
childLabels.put(labelsAndValues[i].toString(), labelsAndValues[i + 1].toString());
|
||||
return childLabels;
|
||||
}
|
||||
|
||||
@ -324,31 +333,33 @@ public class MapLabels implements NBLabels {
|
||||
|
||||
|
||||
/**
|
||||
* Take the intersection of the two label sets, considering both key
|
||||
* and value for each label entry. If both have the same label name
|
||||
* but different values for it, then that label is not considered
|
||||
* common and it is not retained in the intersection.
|
||||
* @param otherLabels The label set to intersect
|
||||
Take the intersection of the two label sets, considering both key
|
||||
and value for each label entry. If both have the same label name
|
||||
but different values for it, then that label is not considered
|
||||
common and it is not retained in the intersection.
|
||||
@param otherLabels
|
||||
The label set to intersect
|
||||
*/
|
||||
@Override
|
||||
public NBLabels intersection(NBLabels otherLabels) {
|
||||
Map<String, String> other = otherLabels.asMap();
|
||||
Map<String,String> common = new LinkedHashMap<>();
|
||||
asMap().forEach((k,v) -> {
|
||||
Map<String, String> common = new LinkedHashMap<>();
|
||||
asMap().forEach((k, v) -> {
|
||||
if (other.containsKey(k) && other.get(k).equals(v)) {
|
||||
common.put(k,v);
|
||||
common.put(k, v);
|
||||
}
|
||||
});
|
||||
return NBLabels.forMap(common);
|
||||
}
|
||||
|
||||
/**
|
||||
* Subtract all matching labels from the other label set from this one,
|
||||
* considering label names and values. If the other label set contains
|
||||
* the same name but a different value, then it is not considered a
|
||||
* match and thus not removed from the labels of this element.
|
||||
* @param otherLabels Labels to remove, where key and value matches
|
||||
* @return The same, or a smaller set of labels for this element
|
||||
Subtract all matching labels from the other label set from this one,
|
||||
considering label names and values. If the other label set contains
|
||||
the same name but a different value, then it is not considered a
|
||||
match and thus not removed from the labels of this element.
|
||||
@param otherLabels
|
||||
Labels to remove, where key and value matches
|
||||
@return The same, or a smaller set of labels for this element
|
||||
*/
|
||||
@Override
|
||||
public NBLabels difference(NBLabels otherLabels) {
|
||||
@ -356,7 +367,7 @@ public class MapLabels implements NBLabels {
|
||||
NBLabels difference = NBLabels.forKV();
|
||||
for (String key : labels.keySet()) {
|
||||
if (!other.containsKey(key) || !other.get(key).equals(labels.get(key))) {
|
||||
difference = difference.and(key,labels.get(key));
|
||||
difference = difference.and(key, labels.get(key));
|
||||
}
|
||||
}
|
||||
return difference;
|
||||
|
@ -0,0 +1,88 @@
|
||||
package io.nosqlbench.nb.api.components.core;
|
||||
|
||||
/*
|
||||
* Copyright (c) nosqlbench
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
|
||||
import io.nosqlbench.nb.api.components.events.NBEvent;
|
||||
import io.nosqlbench.nb.api.config.standard.TestComponent;
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.junit.jupiter.api.Test;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.regex.Pattern;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import static org.assertj.core.api.Assertions.assertThat;
|
||||
import static org.junit.jupiter.api.Assertions.*;
|
||||
|
||||
public class NBComponentExecutionScopeTest {
|
||||
|
||||
@Test
|
||||
public void verifyComponentLifecycleHooks() {
|
||||
List<String> log = new ArrayList<>();
|
||||
try (LoggerComponent parent = new LoggerComponent(log, "parent", "parent")) {
|
||||
LoggerComponent child = new LoggerComponent(parent, log, "child", "child");
|
||||
}
|
||||
String concatLog = log.stream().collect(Collectors.joining("\n"));
|
||||
assertThat(concatLog).matches(Pattern.compile(
|
||||
"""
|
||||
LoggerComponent .parent="parent".->detachChild
|
||||
LoggerComponent .child="child",parent="parent".->beforeDetach
|
||||
LoggerComponent .child="child",parent="parent".<-beforeDetach
|
||||
LoggerComponent .parent="parent".<-detachChild""",
|
||||
Pattern.MULTILINE
|
||||
));
|
||||
}
|
||||
|
||||
private final static class LoggerComponent extends TestComponent {
|
||||
public final List<String> _log;
|
||||
|
||||
public LoggerComponent(List<String> log, String... labels) {
|
||||
super(labels);
|
||||
this._log = log;
|
||||
}
|
||||
|
||||
public LoggerComponent(NBComponent parent, List<String> log, String... labels) {
|
||||
super(parent, labels);
|
||||
this._log = log;
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public void beforeDetach() {
|
||||
_log.add(description() + "->beforeDetach");
|
||||
super.beforeDetach();
|
||||
_log.add(description() + "<-beforeDetach");
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onEvent(NBEvent event) {
|
||||
_log.add(event.toString());
|
||||
}
|
||||
|
||||
@Override
|
||||
public NBComponent detachChild(NBComponent... children) {
|
||||
_log.add(description() + "->detachChild");
|
||||
NBComponent result = super.detachChild(children);
|
||||
_log.add(description() + "<-detachChild");
|
||||
return result;
|
||||
}
|
||||
|
||||
}
|
||||
}
|
@ -744,15 +744,13 @@ public class NBCLIOptions {
|
||||
final String helpmsg = """
|
||||
Could not recognize command 'ARG'.
|
||||
This means that all of the following searches for a compatible command failed:
|
||||
1. commands: no scenario command named 'ARG' is known. (start, run, await, ...)
|
||||
2. scripts: no auto script named './scripts/auto/ARG.js' in the local filesystem.
|
||||
3. scripts: no auto script named 'scripts/auto/ARG.js' was found in the PROG binary.
|
||||
1. commands: no command named 'ARG' is known. (start, run, await, ...)
|
||||
4. workloads: no workload file named ARG[.yaml] was found in the local filesystem, even in include paths INCLUDES.
|
||||
5. workloads: no workload file named ARG[.yaml] was bundled in PROG binary, even in include paths INCLUDES.
|
||||
6. apps: no application named ARG was bundled in PROG.
|
||||
|
||||
You can discover available ways to invoke PROG by using the various --list-* commands:
|
||||
[ --list-commands, --list-scripts, --list-workloads (and --list-scenarios), --list-apps ]
|
||||
[ --list-commands, --list-drivers, --list-workloads (and --list-scenarios), --list-apps ]
|
||||
"""
|
||||
.replaceAll("ARG", cmdParam)
|
||||
.replaceAll("PROG", "nb5")
|
||||
|
@ -4,21 +4,12 @@
|
||||
# for a list of available topics run
|
||||
./nb help topics
|
||||
|
||||
# to see the web based docs run
|
||||
./nb docserver
|
||||
|
||||
# To run a simple built-in workload run:
|
||||
./nb cql-iot
|
||||
|
||||
# To get a list of scenarios to run:
|
||||
./nb --list-scenarios
|
||||
|
||||
# To get a list of scripts to run:
|
||||
./nb --list-scripts
|
||||
|
||||
# To provide your own contact points (comma separated), add the hosts= parameter
|
||||
./nb cql-iot hosts=host1,host2
|
||||
|
||||
# Details on all command line options are available with
|
||||
|
||||
./nb help commandline
|
||||
|
@ -1,31 +1,60 @@
|
||||
Running Activities and Scenarios via CLI
|
||||
========================================
|
||||
# Command Line Scripting
|
||||
|
||||
${PROG} always runs a scenario script. However, there are multiple ways to tell
|
||||
${PROG} what that script should be.
|
||||
Each NoSQLBench session runs a series of commands. Some of these commands
|
||||
may run asynchronously, like a background process, while other commands
|
||||
run in the foreground. This allows some commands to act as orchestration
|
||||
controls over others which act more like persistent processes. This occurs
|
||||
entirely within the NoSQLBench process.
|
||||
|
||||
Any argument in name=value format serves as a parameter to the
|
||||
script or activity that precedes it.
|
||||
Each command runs within a specific container, which encapsulates configuration,
|
||||
state, and visibility of results into episodes or partitions. This is useful
|
||||
for testing scenarios which may have separately named stages or similar.
|
||||
|
||||
To create a scenario script that simply runs a single activity to completion,
|
||||
use this format:
|
||||
~~~
|
||||
${PROG} activity <param>=<value> [...]
|
||||
~~~
|
||||
The primary way to feed commands to a NoSQLBench session is via command line scripting.
|
||||
All this means is that you are providing a series of valid commands which are
|
||||
implemented internally by nb5, and which execute in the order presented.
|
||||
|
||||
To create a scenario script that runs multiple activities concurrently,
|
||||
simply add more activities to the list:
|
||||
~~~
|
||||
${PROG} activity <param>=<value> [...] activity <param>=<value> [...]
|
||||
~~~
|
||||
Apart from the activity related commands, all commands are synchronous, as if they
|
||||
were simply a script. Activities can be *run* synchronously with respect to other
|
||||
commands, or they can be *start*ed asynchronously and then managed by other commands
|
||||
afterwards.
|
||||
|
||||
To execute a scenario script directly, simply use the format:
|
||||
~~~
|
||||
${PROG} script <scriptname> [param=value [...]]
|
||||
~~~
|
||||
## Conventions
|
||||
|
||||
Any argument in name=value format serves as a parameter to the script or activity that precedes it.
|
||||
|
||||
Commands can be specified one after another in a continuous stream of _command_
|
||||
_paramname_=_paramvalue_ ... without ambiguity.
|
||||
|
||||
## Examples
|
||||
To run a single activity:
|
||||
```
|
||||
${PROG} run <param>=<value> [...]
|
||||
```
|
||||
|
||||
To run multiple activities serially:
|
||||
```
|
||||
${PROG} run <param>=<value> [...] run <param>=<value> [...]
|
||||
```
|
||||
|
||||
## Available Commands
|
||||
|
||||
To see the list of available commands, simply:
|
||||
```
|
||||
${PROG} --list-commands
|
||||
```
|
||||
|
||||
To get help on any one of these commands, simply:
|
||||
```
|
||||
${PROG} help <command>
|
||||
```
|
||||
|
||||
These can all be used on the command line in any order. By combining these activity commands on
|
||||
the command line, you can construct a non-trivial testing scenarios, like control activity
|
||||
sequencing and concurrency, etc.
|
||||
|
||||
## Time & Size Units
|
||||
|
||||
Time & Size Units
|
||||
=================
|
||||
Anywhere you need to specify a time, you can use standard unit suffixes,
|
||||
like "1 day", "1m", etc. Both long names and short names work in any
|
||||
case. The valid time suffixes are ns, us, ms, s, m, h, d, w, y, for
|
||||
@ -33,93 +62,5 @@ nanoseconds, microseconds, milliseconds, seconds, minutes, hours,
|
||||
days, weeks, or years, respectively.
|
||||
|
||||
As well, when specifying sizes, standard SI and IEC units apply for suffixes like
|
||||
KB, KiB. For more details, see
|
||||
[The Wikipedia Page](https://en.wikipedia.org/wiki/Binary_prefix).
|
||||
|
||||
For Scenario Designers
|
||||
======================
|
||||
|
||||
You can build up a complex scenario by combining scripts and activities.
|
||||
If this scenario needs to have some cross-shared logic, that is up to you,
|
||||
the scenario designer.
|
||||
|
||||
## Script Parameters
|
||||
|
||||
Any arguments following a script in name=value form will be used to parameterize
|
||||
the script. Script parameters are simply macro tokens in the form <<NAME:default>>.
|
||||
All such parameters in the script will be substituted before the script executes,
|
||||
so parameters may be dropped into scripts ad-hoc.
|
||||
|
||||
## Session Names
|
||||
|
||||
By using the option --session-name <name>, you can name the session logfile
|
||||
that will be (over)written with execution details.
|
||||
~~~
|
||||
${PROG} --session-name testsession42
|
||||
~~~
|
||||
|
||||
## Metric Name
|
||||
|
||||
If you need to see what metrics are available for a particular activity type,
|
||||
you can ask ${PROG} to instantiate an activity of that type and discover the
|
||||
metrics, dumping out a list. The following form of the command shows you how
|
||||
to make a list that you can copy metric names from for scripting. If you provide
|
||||
an example activity alias that matches one of your scripts, you can use it exactly
|
||||
as it appears.
|
||||
~~~
|
||||
${PROG} --list-metrics driver=diag alias=anexample
|
||||
~~~
|
||||
This will dump a list of metric names in the shortened format that is most suitable
|
||||
for scenario script development. This format is required for the --list-metrics
|
||||
option, but it should be familiar and easy to copy and paste from other command lines.
|
||||
|
||||
## Scripting on the command line
|
||||
|
||||
There are a few commands available on the command line to allow for basic control
|
||||
of activities without having to edit the scenario script directly:
|
||||
|
||||
To start an activity without waiting for it to complete:
|
||||
~~~
|
||||
start <param>=<val> ...
|
||||
~~~
|
||||
|
||||
To start an activity and then wait for it to complete before continuing:
|
||||
~~~
|
||||
run <pram>=<value> ...
|
||||
~~~
|
||||
|
||||
To stop an activity by its alias while first waiting for a required thread (motor/slot) entering a specific SlotState:
|
||||
~~~
|
||||
stop <activity alias>
|
||||
~~~
|
||||
|
||||
To stop an activity by its alias, without first waiting for a required thread (motor/slot) entering a specific SlotState:
|
||||
~~~
|
||||
forceStop <activity alias>
|
||||
~~~
|
||||
|
||||
To wait for a particular activity that has been started to complete before continuing:
|
||||
~~~
|
||||
await <activity alias>
|
||||
~~~
|
||||
|
||||
To wait for a number of milliseconds before continuing:
|
||||
~~~
|
||||
waitmillis <milliseconds>
|
||||
~~~
|
||||
|
||||
To add a script fragment to the scenario script:
|
||||
~~~
|
||||
fragment '<ecmascript>...'
|
||||
~~~
|
||||
|
||||
These can all be used on the command line in any order. The scenario script is assembled
|
||||
from them before it is executed. If you want to see the resulting script, use the
|
||||
--show-script option to dump the script to the console instead of running it.
|
||||
|
||||
By combining these activity commands on the command line, you can construct a non-trivial
|
||||
scenario from other snippets, control activity sequencing and concurrency, etc. This does
|
||||
not replace what is possible for direct scripting, but it does allow for many custom
|
||||
test scenarios without it. If you want to do more advanced scripting, please consult
|
||||
the scenario designers guide.
|
||||
KB, KiB. For more details, see [The Wikipedia Page](https://en.wikipedia.org/wiki/Binary_prefix).
|
||||
|
||||
|
@ -35,12 +35,12 @@ public class NBCLIScenarioPreprocessorTemplateVarTest {
|
||||
cmds.forEach(System.out::println);
|
||||
|
||||
OpsDocList workload1 = OpsLoader.loadPath(cmds.get(0).getArgValue("workload"), cmds.get(0).getArgMap());
|
||||
OpTemplate optpl1 = workload1.getOps(true).get(0);
|
||||
OpTemplate optpl1 = workload1.getOps().matching("",true).get(0);
|
||||
System.out.println("op from cmd1:" + optpl1);
|
||||
assertThat(optpl1.getStmt()).contains("cycle {cycle} replaced replaced\n");
|
||||
|
||||
OpsDocList workload2 = OpsLoader.loadPath(cmds.get(1).getArgValue("workload"), cmds.get(1).getArgMap());
|
||||
OpTemplate optpl2 = workload2.getOps(true).get(0);
|
||||
OpTemplate optpl2 = workload2.getOps().matching("",true).get(0);
|
||||
System.out.println("op from cmd2:" + optpl2);
|
||||
assertThat(optpl2.getStmt()).contains("cycle {cycle} def1 def1\n");
|
||||
}
|
||||
@ -52,7 +52,7 @@ public class NBCLIScenarioPreprocessorTemplateVarTest {
|
||||
cmds.forEach(System.out::println);
|
||||
|
||||
OpsDocList workload1 = OpsLoader.loadPath(cmds.get(0).getArgValue("workload"), cmds.get(0).getArgMap());
|
||||
OpTemplate optpl1 = workload1.getOps(true).get(0);
|
||||
OpTemplate optpl1 = workload1.getOps().matching("",true).get(0);
|
||||
System.out.println("op from cmd1:" + optpl1);
|
||||
assertThat(optpl1.getStmt()).contains("cycle {cycle} overridden overridden\n");
|
||||
}
|
||||
|
@ -74,9 +74,10 @@ public class GrafanaRegionAnalyzer implements Runnable {
|
||||
//[2020-12-15T05:04:37.232Z[GMT] - 2020-12-15T05:04:37.232Z[GMT]]
|
||||
//span:interval
|
||||
//details:
|
||||
// params: ActivityDef:(4)/{keycount=5000000000L, hosts=node1, main-cycles=500, threads=1, workload=./keyvalue.yaml, cycles=2, stride=2, tags=block:'schema.*', password=cassandra, rf=3, pooling=16:16:500, driver=cql, rampup-cycles=5000000000, alias=keyvalue_default_schema, valuecount=5000000000L, errors=count, username=cassandra}
|
||||
// params: ActivityConfig:(4)/{keycount=5000000000L, hosts=node1, main-cycles=500,
|
||||
// threads=1, workload=./keyvalue.yaml, cycles=2, stride=2, tags=block:'schema.*', password=cassandra, rf=3, pooling=16:16:500, driver=cql, rampup-cycles=5000000000, alias=keyvalue_default_schema, valuecount=5000000000L, errors=count, username=cassandra}
|
||||
//labels:
|
||||
// layer: Activity
|
||||
// layer: StandardActivity
|
||||
// alias: keyvalue_default_schema
|
||||
// driver: cql
|
||||
// workload: ./keyvalue.yaml
|
||||
|
@ -16,7 +16,7 @@
|
||||
package io.nosqlbench.engine.api.activityapi.core;
|
||||
|
||||
/**
|
||||
* An ActionDispenser is created for each Activity instance within a scenario.
|
||||
* An ActionDispenser is created for each StandardActivity instance within a scenario.
|
||||
* When a thread is created, the motor and its input and action instances are resolved.
|
||||
* The ActionDispenser is responsible for choosing how the action is resolved,
|
||||
* whether that is a shared thread-safe action or an action per slot.
|
||||
@ -29,5 +29,5 @@ public interface ActionDispenser {
|
||||
* @param slot The numbered slot within the activity instance for this action.
|
||||
* @return A new or cached Action for the specified slot.
|
||||
*/
|
||||
Action getAction(int slot);
|
||||
SyncAction getAction(int slot);
|
||||
}
|
||||
|
@ -16,6 +16,8 @@
|
||||
|
||||
package io.nosqlbench.engine.api.activityapi.core;
|
||||
|
||||
import io.nosqlbench.engine.api.activityimpl.uniform.Activity;
|
||||
|
||||
import java.util.Map;
|
||||
|
||||
/**
|
||||
@ -23,5 +25,5 @@ import java.util.Map;
|
||||
* activities that are present in a configuration. Those dispensers will have th
|
||||
*/
|
||||
public interface ActivitiesAware {
|
||||
void setActivitiesMap(Map<String,Activity> activities);
|
||||
void setActivitiesMap(Map<String, Activity> activities);
|
||||
}
|
||||
|
@ -1,152 +0,0 @@
|
||||
/*
|
||||
* Copyright (c) 2022-2023 nosqlbench
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package io.nosqlbench.engine.api.activityapi.core;
|
||||
|
||||
import io.nosqlbench.nb.api.components.core.NBComponent;
|
||||
import io.nosqlbench.nb.api.engine.activityimpl.ActivityDef;
|
||||
import io.nosqlbench.nb.api.engine.activityimpl.ParameterMap;
|
||||
import io.nosqlbench.engine.api.activityapi.core.progress.ProgressCapable;
|
||||
import io.nosqlbench.engine.api.activityapi.core.progress.StateCapable;
|
||||
import io.nosqlbench.engine.api.activityapi.cyclelog.filters.IntPredicateDispenser;
|
||||
import io.nosqlbench.engine.api.activityapi.errorhandling.ErrorMetrics;
|
||||
import io.nosqlbench.engine.api.activityapi.input.InputDispenser;
|
||||
import io.nosqlbench.engine.api.activityapi.output.OutputDispenser;
|
||||
import io.nosqlbench.engine.api.activityapi.simrate.RateLimiter;
|
||||
import io.nosqlbench.engine.api.activityimpl.SimpleActivity;
|
||||
import io.nosqlbench.engine.api.activityimpl.motor.RunStateTally;
|
||||
|
||||
import java.io.InputStream;
|
||||
import java.io.PrintWriter;
|
||||
import java.util.function.Supplier;
|
||||
|
||||
/**
|
||||
* Provides the components needed to build and run an activity a runtime.
|
||||
* The easiest way to build a useful Activity is to extend {@link SimpleActivity}.
|
||||
*/
|
||||
public interface Activity extends Comparable<Activity>, ActivityDefObserver, ProgressCapable, StateCapable, NBComponent {
|
||||
|
||||
/**
|
||||
* Register an object which should be closed after this activity is shutdown.
|
||||
*
|
||||
* @param closeable An Autocloseable object
|
||||
*/
|
||||
void registerAutoCloseable(AutoCloseable closeable);
|
||||
|
||||
ActivityDef getActivityDef();
|
||||
|
||||
default String getAlias() {
|
||||
return this.getActivityDef().getAlias();
|
||||
}
|
||||
|
||||
default ParameterMap getParams() {
|
||||
return this.getActivityDef().getParams();
|
||||
}
|
||||
|
||||
default void initActivity() {
|
||||
}
|
||||
|
||||
/**
|
||||
* Close all autocloseables that have been registered with this Activity.
|
||||
*/
|
||||
void closeAutoCloseables();
|
||||
|
||||
MotorDispenser<?> getMotorDispenserDelegate();
|
||||
|
||||
void setMotorDispenserDelegate(MotorDispenser<?> motorDispenser);
|
||||
|
||||
InputDispenser getInputDispenserDelegate();
|
||||
|
||||
void setInputDispenserDelegate(InputDispenser inputDispenser);
|
||||
|
||||
ActionDispenser getActionDispenserDelegate();
|
||||
|
||||
void setActionDispenserDelegate(ActionDispenser actionDispenser);
|
||||
|
||||
IntPredicateDispenser getResultFilterDispenserDelegate();
|
||||
|
||||
void setResultFilterDispenserDelegate(IntPredicateDispenser resultFilterDispenser);
|
||||
|
||||
OutputDispenser getMarkerDispenserDelegate();
|
||||
|
||||
void setOutputDispenserDelegate(OutputDispenser outputDispenser);
|
||||
|
||||
@Override
|
||||
RunState getRunState();
|
||||
|
||||
void setRunState(RunState runState);
|
||||
|
||||
long getStartedAtMillis();
|
||||
|
||||
default void shutdownActivity() {
|
||||
}
|
||||
|
||||
default String getCycleSummary() {
|
||||
return this.getActivityDef().getCycleSummary();
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the current cycle rate limiter for this activity.
|
||||
* The cycle rate limiter is used to throttle the rate at which
|
||||
* cycles are dispatched across all threads in the activity
|
||||
* @return the cycle {@link RateLimiter}
|
||||
*/
|
||||
RateLimiter getCycleLimiter();
|
||||
|
||||
|
||||
/**
|
||||
* Get the current stride rate limiter for this activity.
|
||||
* The stride rate limiter is used to throttle the rate at which
|
||||
* new strides are dispatched across all threads in an activity.
|
||||
* @return The stride {@link RateLimiter}
|
||||
*/
|
||||
RateLimiter getStrideLimiter();
|
||||
|
||||
/**
|
||||
* Get or create the instrumentation needed for this activity. This provides
|
||||
* a single place to find and manage, and document instrumentation that is
|
||||
* uniform across all activities.
|
||||
*
|
||||
* @return A new or existing instrumentation object for this activity.
|
||||
*/
|
||||
ActivityInstrumentation getInstrumentation();
|
||||
|
||||
PrintWriter getConsoleOut();
|
||||
|
||||
InputStream getConsoleIn();
|
||||
|
||||
void setConsoleOut(PrintWriter writer);
|
||||
|
||||
ErrorMetrics getExceptionMetrics();
|
||||
|
||||
// /**
|
||||
// * When a driver needs to identify an error uniquely for the purposes of
|
||||
// * routing it to the correct error handler, or naming it in logs, or naming
|
||||
// * metrics, override this method in your activity.
|
||||
// * @return A function that can reliably and safely map an instance of Throwable to a stable name.
|
||||
// */
|
||||
// default Function<Throwable,String> getErrorNameMapper() {
|
||||
// return t -> t.getClass().getSimpleName();
|
||||
// }
|
||||
//
|
||||
int getMaxTries();
|
||||
|
||||
default int getHdrDigits() {
|
||||
return this.getParams().getOptionalInteger("hdr_digits").orElse(4);
|
||||
}
|
||||
|
||||
RunStateTally getRunStateTally();
|
||||
}
|
@ -1,36 +0,0 @@
|
||||
/*
|
||||
* Copyright (c) 2022-2023 nosqlbench
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package io.nosqlbench.engine.api.activityapi.core;
|
||||
|
||||
import io.nosqlbench.nb.api.engine.activityimpl.ActivityDef;
|
||||
|
||||
/**
|
||||
* Decorator interface for getting notified when an activities parameters are changed at runtime.
|
||||
*
|
||||
* This can be optionally implemented by Any Motor, Input, or Action. The eventing is mediated
|
||||
* through the ActivityExecutor in order to isolate the programmatic API from the internal API.
|
||||
*/
|
||||
public interface ActivityDefObserver {
|
||||
void onActivityDefUpdate(ActivityDef activityDef);
|
||||
|
||||
static void apply(ActivityDef def, Object... candidates) {
|
||||
for (Object candidate : candidates) {
|
||||
if (candidate instanceof ActivityDefObserver observer) {
|
||||
observer.onActivityDefUpdate(def);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
@ -25,13 +25,6 @@ import io.nosqlbench.nb.api.engine.metrics.instruments.NBMetricGauge;
|
||||
|
||||
import java.util.concurrent.Future;
|
||||
|
||||
/**
|
||||
* All the accessors of the metrics that will be used for each activity instance.
|
||||
* Implementors of this interface should ensure that the methods are synchronized
|
||||
* to avoid race conditions during lazy init from callers.
|
||||
*
|
||||
*
|
||||
*/
|
||||
public interface ActivityInstrumentation {
|
||||
|
||||
NBMetricGauge getOrCreateErrorsTotal();
|
||||
@ -44,103 +37,26 @@ public interface ActivityInstrumentation {
|
||||
|
||||
NBMetricGauge getOrCreateErrorRateTotal();
|
||||
|
||||
/**
|
||||
* The input timer measures how long it takes to get the cycle value to be used for
|
||||
* an operation.
|
||||
* @return a new or existing {@link Timer}
|
||||
*/
|
||||
Timer getOrCreateInputTimer();
|
||||
|
||||
/**
|
||||
* The strides service timer measures how long it takes to complete a stride of work.
|
||||
* @return a new or existing {@link Timer}
|
||||
*/
|
||||
Timer getOrCreateStridesServiceTimer();
|
||||
|
||||
/**
|
||||
* The strides response timer measures the total response time from the scheduled
|
||||
* time a stride should start to when it completed. Stride scheduling is only defined
|
||||
* when it is implied by a stride rate limiter, so this method should return null if
|
||||
* there is no strides rate limiter.
|
||||
* @return a new or existing {@link Timer} if appropriate, else null
|
||||
*/
|
||||
Timer getStridesResponseTimerOrNull();
|
||||
|
||||
/**
|
||||
* The cycles service timer measures how long it takes to complete a cycle of work.
|
||||
* @return a new or existing {@link Timer}
|
||||
*/
|
||||
Timer getOrCreateCyclesServiceTimer();
|
||||
|
||||
/**
|
||||
* The cycles response timer measures the total response time from the scheduled
|
||||
* time an operation should start to when it is completed. Cycle scheduling is only defined
|
||||
* when it is implied by a cycle rate limiter, so this method should return null if
|
||||
* there is no cycles rate limiter.
|
||||
* @return a new or existing {@link Timer} if appropriate, else null
|
||||
*/
|
||||
Timer getCyclesResponseTimerOrNull();
|
||||
|
||||
/**
|
||||
* The pending ops counter keeps track of how many ops are submitted or in-flight, but
|
||||
* which haven't been completed yet.
|
||||
* @return a new or existing {@link Counter}
|
||||
*/
|
||||
Counter getOrCreatePendingOpCounter();
|
||||
|
||||
/**
|
||||
* The bind timer keeps track of how long it takes for NoSQLBench to create an instance
|
||||
* of an executable operation, given the cycle. This is usually done by using an
|
||||
* {@link OpSequence} in conjunction with
|
||||
* an {@link OpDispenser}. This is named for "binding
|
||||
* a cycle to an operation".
|
||||
* @return a new or existing {@link Timer}
|
||||
*/
|
||||
Timer getOrCreateBindTimer();
|
||||
|
||||
/**
|
||||
* The execute timer keeps track of how long it takes to submit an operation to be executed
|
||||
* to an underlying native driver. For asynchronous APIs, such as those which return a
|
||||
* {@link Future}, this is simply the amount of time it takes to acquire the future.
|
||||
*
|
||||
* When possible, APIs should be used via their async methods, even if you are implementing
|
||||
* a {@link SyncAction}. This allows the execute timer to measure the hand-off to the underlying API,
|
||||
* and the result timer to measure the blocking calls to aquire the result.
|
||||
* @return a new or existing {@link Timer}
|
||||
*/
|
||||
Timer getOrCreateExecuteTimer();
|
||||
|
||||
/**
|
||||
* The result timer keeps track of how long it takes a native driver to service a request once submitted.
|
||||
* This timer, in contrast to the result-success timer ({@link #getOrCreateResultSuccessTimer()}),
|
||||
* is used to track all operations. That is, no matter
|
||||
* whether the operation succeeds or not, it should be tracked with this timer. The scope of this timer should
|
||||
* cover each attempt at an operation through a native driver. Retries are not to be combined in this measurement.
|
||||
* @return a new or existing {@link Timer}
|
||||
*/
|
||||
Timer getOrCreateResultTimer();
|
||||
|
||||
/**
|
||||
* The result-success timer keeps track of operations which had no exception. The measurements for this timer should
|
||||
* be exactly the same values as used for the result timer ({@link #getOrCreateResultTimer()}, except that
|
||||
* attempts to complete an operation which yield an exception should be excluded from the results. These two metrics
|
||||
* together provide a very high level sanity check against the error-specific metrics which can be reported by
|
||||
* the error handler logic.
|
||||
* @return a new or existing {@link Timer}
|
||||
*/
|
||||
Timer getOrCreateResultSuccessTimer();
|
||||
|
||||
/**
|
||||
* The tries histogram tracks how many tries it takes to complete an operation successfully, or not. This histogram
|
||||
* does not encode whether operations were successful or not. Ideally, if every attempt to complete an operation succeeds
|
||||
* on its first try, the data in this histogram should all be 1. In practice, systems which are running near their
|
||||
* capacity will see a few retried operations, and systems that are substantially over-driven will see many retried
|
||||
* operations. As the retries value increases the further down the percentile scale you go, you can detect system loading
|
||||
* patterns which are in excess of the real-time capability of the target system.
|
||||
*
|
||||
* This metric should be measured around every retry loop for a native operation.
|
||||
* @return a new or existing {@link Histogram}
|
||||
*/
|
||||
Histogram getOrCreateTriesHistogram();
|
||||
|
||||
Timer getOrCreateVerifierTimer();
|
||||
|
@ -0,0 +1,68 @@
|
||||
/*
|
||||
* Copyright (c) 2022-2023 nosqlbench
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package io.nosqlbench.engine.api.activityapi.core;
|
||||
|
||||
import io.nosqlbench.engine.api.activityapi.core.progress.ProgressCapable;
|
||||
import io.nosqlbench.engine.api.activityapi.core.progress.StateCapable;
|
||||
import io.nosqlbench.engine.api.activityapi.cyclelog.filters.IntPredicateDispenser;
|
||||
import io.nosqlbench.engine.api.activityapi.input.InputDispenser;
|
||||
import io.nosqlbench.engine.api.activityapi.output.OutputDispenser;
|
||||
import io.nosqlbench.engine.api.activityimpl.uniform.Activity;
|
||||
import io.nosqlbench.nb.api.components.core.NBComponent;
|
||||
import io.nosqlbench.nb.api.engine.activityimpl.ActivityConfig;
|
||||
|
||||
import java.io.InputStream;
|
||||
import java.io.PrintWriter;
|
||||
|
||||
/// This is a vestigial layer which will be removed. It originally provided a way
|
||||
/// to assemble ad-hoc activity logic from component factory delegates. This meant that the
|
||||
/// core activity engine could be wired differently from a set of variations in each component.
|
||||
/// The core engine has been consolidated at this point and modal behaviors pushed to variations
|
||||
/// of edge components -- particularly in op synthesis and modifiers to op behavior. Thus, this
|
||||
/// layer is no longer needed and should be removed.
|
||||
public interface ActivityWiring
|
||||
extends Comparable<ActivityWiring>, ProgressCapable, StateCapable, NBComponent
|
||||
{
|
||||
|
||||
ActivityConfig getActivityConfig();
|
||||
|
||||
MotorDispenser<?> getMotorDispenserDelegate();
|
||||
|
||||
void setMotorDispenserDelegate(MotorDispenser<?> motorDispenser);
|
||||
|
||||
InputDispenser getInputDispenserDelegate();
|
||||
|
||||
void setInputDispenserDelegate(InputDispenser inputDispenser);
|
||||
|
||||
ActionDispenser getActionDispenserDelegate();
|
||||
|
||||
void setActionDispenserDelegate(ActionDispenser actionDispenser);
|
||||
|
||||
IntPredicateDispenser getResultFilterDispenserDelegate();
|
||||
|
||||
void setResultFilterDispenserDelegate(IntPredicateDispenser resultFilterDispenser);
|
||||
|
||||
OutputDispenser getMarkerDispenserDelegate();
|
||||
|
||||
void setOutputDispenserDelegate(OutputDispenser outputDispenser);
|
||||
|
||||
PrintWriter getConsoleOut();
|
||||
|
||||
InputStream getConsoleIn();
|
||||
|
||||
void setConsoleOut(PrintWriter writer);
|
||||
}
|
@ -49,8 +49,7 @@ public interface AsyncAction<D> extends Action {
|
||||
* concurrency limits for the new cycle.
|
||||
*
|
||||
* Each action implementation is responsible for tracking and controlling
|
||||
* its own limits of concurrency. The {@link BaseAsyncAction} base class is a
|
||||
* convenient starting point for such implementations.
|
||||
* its own limits of concurrency.
|
||||
*
|
||||
* If the action is known to have additional open slots for an operations to
|
||||
* be started (according to the configured concurrency limits),
|
||||
|
@ -1,72 +0,0 @@
|
||||
/*
|
||||
* Copyright (c) 2022-2023 nosqlbench
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package io.nosqlbench.engine.api.activityapi.core;
|
||||
|
||||
import io.nosqlbench.engine.api.activityapi.core.ops.fluent.opfacets.TrackedOp;
|
||||
import io.nosqlbench.nb.api.engine.activityimpl.ActivityDef;
|
||||
import io.nosqlbench.nb.api.engine.activityimpl.ParameterMap;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
|
||||
/**
|
||||
*
|
||||
* @param <D> An type of state holder for an operation, holding everything unique to that cycle and operation
|
||||
* @param <A> An type of of an Activity, a state holder for a runtime instance of an Activity
|
||||
*/
|
||||
public abstract class BaseAsyncAction<D, A extends Activity> implements AsyncAction<D>, Stoppable, ActivityDefObserver {
|
||||
private final static Logger logger = LogManager.getLogger("BaseAsyncAction");
|
||||
|
||||
protected final A activity;
|
||||
|
||||
protected int slot;
|
||||
protected boolean running = true;
|
||||
|
||||
public BaseAsyncAction(A activity, int slot) {
|
||||
this.activity = activity;
|
||||
this.slot = slot;
|
||||
|
||||
onActivityDefUpdate(activity.getActivityDef());
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onActivityDefUpdate(ActivityDef activityDef) {
|
||||
ParameterMap params = activityDef.getParams();
|
||||
params.getOptionalInteger("async").orElseThrow(
|
||||
() -> new RuntimeException("the async parameter is required to activate async actions"));
|
||||
}
|
||||
|
||||
public boolean enqueue(TrackedOp<D> opc) {
|
||||
startOpCycle(opc);
|
||||
return (running);
|
||||
}
|
||||
|
||||
/**
|
||||
* Implementations that extend this base class can call this method in order to put
|
||||
* an operation in flight. Implementations should call either {@link TrackedOp#skip(int)}
|
||||
* or {@link TrackedOp#start()}}.
|
||||
*
|
||||
* @param opc A tracked operation with state of parameterized type D
|
||||
*/
|
||||
public abstract void startOpCycle(TrackedOp<D> opc);
|
||||
|
||||
@Override
|
||||
public void requestStop() {
|
||||
logger.info(() -> this + " requested to stop.");
|
||||
this.running = false;
|
||||
}
|
||||
|
||||
}
|
@ -17,26 +17,22 @@
|
||||
package io.nosqlbench.engine.api.activityapi.core;
|
||||
|
||||
import com.codahale.metrics.Counter;
|
||||
import com.codahale.metrics.Histogram;
|
||||
import com.codahale.metrics.Timer;
|
||||
import io.nosqlbench.nb.api.engine.activityimpl.ActivityDef;
|
||||
import io.nosqlbench.engine.api.activityimpl.uniform.Activity;
|
||||
import io.nosqlbench.nb.api.engine.activityimpl.ParameterMap;
|
||||
import io.nosqlbench.nb.api.engine.metrics.instruments.*;
|
||||
|
||||
public class ComponentActivityInstrumentation implements ActivityInstrumentation {
|
||||
public class ComponentActivityInstrumentation {
|
||||
|
||||
private static final String WAIT_TIME = "_waittime";
|
||||
private static final String SERVICE_TIME = "_servicetime";
|
||||
private static final String RESPONSE_TIME = "_responsetime";
|
||||
|
||||
private final Activity activity;
|
||||
private final ActivityDef def;
|
||||
private final ParameterMap params;
|
||||
private final int hdrdigits;
|
||||
private NBMetricTimer readInputTimer;
|
||||
private NBMetricTimer stridesServiceTimer;
|
||||
private NBMetricTimer stridesResponseTimer;
|
||||
private NBMetricTimer cyclesServiceTimer;
|
||||
private NBMetricTimer cyclesResponseTimer;
|
||||
private NBMetricCounter pendingOpsCounter;
|
||||
private NBMetricTimer bindTimer;
|
||||
@ -54,97 +50,12 @@ public class ComponentActivityInstrumentation implements ActivityInstrumentation
|
||||
|
||||
public ComponentActivityInstrumentation(final Activity activity) {
|
||||
this.activity = activity;
|
||||
def = activity.getActivityDef();
|
||||
params = this.def.getParams();
|
||||
hdrdigits = activity.getHdrDigits();
|
||||
this.hdrdigits = activity.getComponentProp("hdr_digits").map(Integer::parseInt).orElse(3);
|
||||
initMetrics();
|
||||
}
|
||||
|
||||
private void initMetrics() {
|
||||
readInputTimer = activity.create().timer(
|
||||
"read_input",
|
||||
this.hdrdigits,
|
||||
MetricCategory.Internals,
|
||||
"measures overhead of acquiring a cycle range for an activity thread"
|
||||
);
|
||||
stridesServiceTimer = activity.create().timer(
|
||||
"strides",
|
||||
this.hdrdigits,
|
||||
MetricCategory.Core,
|
||||
"service timer for a stride, which is the same as the op sequence length by default"
|
||||
);
|
||||
if (null != activity.getStrideLimiter()) {
|
||||
this.stridesResponseTimer = activity.create().timer(
|
||||
"strides" + ComponentActivityInstrumentation.RESPONSE_TIME,
|
||||
hdrdigits,
|
||||
MetricCategory.Core,
|
||||
"response timer for a stride, which is the same as the op sequence length by default;" +
|
||||
" response timers include scheduling delays which occur when an activity falls behind its target rate"
|
||||
);
|
||||
}
|
||||
this.cyclesServiceTimer = activity.create().timer(
|
||||
"cycles" + ComponentActivityInstrumentation.SERVICE_TIME,
|
||||
hdrdigits,
|
||||
MetricCategory.Core,
|
||||
"service timer for a cycle, including all of bind, execute, result and result_success;" +
|
||||
" service timers measure the time between submitting a request and receiving the response"
|
||||
);
|
||||
if (null != activity.getCycleLimiter()) {
|
||||
this.cyclesResponseTimer = activity.create().timer(
|
||||
"cycles" + ComponentActivityInstrumentation.RESPONSE_TIME,
|
||||
hdrdigits,
|
||||
MetricCategory.Core,
|
||||
"response timer for a cycle, including all of bind, execute, result and result_success;" +
|
||||
" response timers include scheduling delays which occur when an activity falls behind its target rate"
|
||||
);
|
||||
}
|
||||
this.pendingOpsCounter = activity.create().counter(
|
||||
"pending_ops",
|
||||
MetricCategory.Core,
|
||||
"Indicate the number of operations which have been started, but which have not been completed." +
|
||||
" This starts "
|
||||
);
|
||||
|
||||
this.bindTimer = activity.create().timer(
|
||||
"bind",
|
||||
hdrdigits,
|
||||
MetricCategory.Core,
|
||||
"Time the step within a cycle which binds generated data to an op template to synthesize an executable operation."
|
||||
);
|
||||
|
||||
this.executeTimer = activity.create().timer(
|
||||
"execute",
|
||||
hdrdigits,
|
||||
MetricCategory.Core,
|
||||
"Time how long it takes to submit a request and receive a result, including reading the result in the client."
|
||||
);
|
||||
this.resultTimer = activity.create().timer(
|
||||
"result",
|
||||
hdrdigits,
|
||||
MetricCategory.Core,
|
||||
"Time how long it takes to submit a request, receive a result, including binding, reading results, " +
|
||||
"and optionally verifying them, including all operations whether successful or not, for each attempted request."
|
||||
);
|
||||
this.resultSuccessTimer = activity.create().timer(
|
||||
"result_success",
|
||||
hdrdigits,
|
||||
MetricCategory.Core,
|
||||
"The execution time of successful operations, which includes submitting the operation, waiting for a response, and reading the result"
|
||||
);
|
||||
this.triesHistogram = activity.create().histogram(
|
||||
"tries",
|
||||
hdrdigits,
|
||||
MetricCategory.Core,
|
||||
"A histogram of all tries for an activity. Perfect results mean all quantiles return 1." +
|
||||
" Slight saturation is indicated by p99 or p95 returning higher values." +
|
||||
" Lower quantiles returning more than 1, or higher values at high quantiles indicate incremental overload."
|
||||
);
|
||||
this.verifierTimer = activity.create().timer(
|
||||
"verifier",
|
||||
hdrdigits,
|
||||
MetricCategory.Verification,
|
||||
"Time the execution of verifier code, if any"
|
||||
);
|
||||
this.errorRate1m = activity.create().gauge("error_rate_1m",
|
||||
() -> {
|
||||
double result_1m_rate = this.resultTimer.getOneMinuteRate();
|
||||
@ -204,87 +115,16 @@ public class ComponentActivityInstrumentation implements ActivityInstrumentation
|
||||
);
|
||||
}
|
||||
|
||||
@Override
|
||||
public NBMetricGauge getOrCreateErrorsTotal() {
|
||||
return this.errorsTotal;
|
||||
}
|
||||
@Override
|
||||
public NBMetricGauge getOrCreateErrorRate1m() {
|
||||
return this.errorRate1m;
|
||||
}
|
||||
@Override
|
||||
public NBMetricGauge getOrCreateErrorRate5m() {
|
||||
return this.errorRate5m;
|
||||
}
|
||||
@Override
|
||||
public NBMetricGauge getOrCreateErrorRate15m() {
|
||||
return this.errorRate15m;
|
||||
}
|
||||
@Override
|
||||
public NBMetricGauge getOrCreateErrorRateTotal() {
|
||||
return this.errorRateTotal;
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public Timer getOrCreateInputTimer() {
|
||||
return readInputTimer;
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public Timer getOrCreateStridesServiceTimer() {
|
||||
return stridesServiceTimer;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Timer getStridesResponseTimerOrNull() {
|
||||
return stridesResponseTimer;
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public Timer getOrCreateCyclesServiceTimer() {
|
||||
return cyclesServiceTimer;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Timer getCyclesResponseTimerOrNull() {
|
||||
return cyclesResponseTimer;
|
||||
}
|
||||
|
||||
@Override
|
||||
/// The pending ops counter keeps track of how many ops are submitted or in-flight, but
|
||||
/// which haven't been completed yet.
|
||||
public Counter getOrCreatePendingOpCounter() {
|
||||
return pendingOpsCounter;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Timer getOrCreateBindTimer() {
|
||||
return bindTimer;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Timer getOrCreateExecuteTimer() {
|
||||
return executeTimer;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Timer getOrCreateResultTimer() {
|
||||
return resultTimer;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Timer getOrCreateResultSuccessTimer() {
|
||||
return resultSuccessTimer;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Histogram getOrCreateTriesHistogram() {
|
||||
return triesHistogram;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Timer getOrCreateVerifierTimer() {
|
||||
return verifierTimer;
|
||||
}
|
||||
}
|
||||
|
@ -34,14 +34,6 @@ public interface Motor<T> extends Runnable, Stoppable {
|
||||
|
||||
Input getInput();
|
||||
|
||||
/**
|
||||
* Set the action on this motor. It will be applied to each input.
|
||||
*
|
||||
* @param action an instance of activityAction
|
||||
* @return this ActivityMotor, for method chaining
|
||||
*/
|
||||
Motor<T> setAction(Action action);
|
||||
|
||||
Action getAction();
|
||||
|
||||
/**
|
||||
|
@ -15,10 +15,11 @@
|
||||
*/
|
||||
package io.nosqlbench.engine.api.activityapi.core;
|
||||
|
||||
import io.nosqlbench.nb.api.engine.activityimpl.ActivityDef;
|
||||
import io.nosqlbench.nb.api.config.standard.NBConfiguration;
|
||||
import io.nosqlbench.nb.api.engine.activityimpl.ActivityConfig;
|
||||
|
||||
/**
|
||||
* A MotorDispenser is created for each Activity instance within a scenario.
|
||||
* A MotorDispenser is created for each StandardActivity instance within a scenario.
|
||||
* When a thread is created, the motor and its input and action instances are resolved.
|
||||
* The MotorDispenser is responsible for choosing how the motor is resolved,
|
||||
* whether that is a shared thread-safe motor or, more conventionally, a separate motor per slot.
|
||||
@ -29,9 +30,10 @@ public interface MotorDispenser<T> {
|
||||
* Resolve (find or create) a Motor instance for the slot specified.
|
||||
* The motor is not required to be per-slot (per-thread), but any shared inputs motors be thread safe.
|
||||
*
|
||||
* @param activityDef the ActivityDef which will be used to parameterize the returned motor
|
||||
* @param activityConfig the activity config which will be used to parameterize the returned
|
||||
* motor
|
||||
* @param slot The numbered slot within the activity instance for this motor
|
||||
* @return A new or cached Motor for the specified slot.
|
||||
*/
|
||||
Motor<T> getMotor(ActivityDef activityDef, int slot);
|
||||
Motor<T> getMotor(ActivityConfig activityConfig, int slot);
|
||||
}
|
||||
|
@ -18,14 +18,6 @@ package io.nosqlbench.engine.api.activityapi.core;
|
||||
|
||||
public interface SyncAction extends Action {
|
||||
|
||||
/**
|
||||
* <p>Apply a work function to an input value, producing an int status code.</p>
|
||||
* The meaning of status codes is activity specific, however the values Integer.MIN_VALUE,
|
||||
* and Integer.MAX_VALUE are reserved.
|
||||
*
|
||||
* @param cycle a long input
|
||||
* @return an int status
|
||||
*/
|
||||
default int runCycle(long cycle) {
|
||||
return (int) cycle % 100;
|
||||
}
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user