checkpoint; many fixes and enhancements:

* use config file with modular elements
* eliminated opaque DDL hacks, generating everything from AST data now
* (thus) obfuscation supported
* weighting based on partition counts
* (from weighting) Modulo bindings with quantized cardinalities
* binding re-use for identical definitions
* custom timeouts supported by op type
* extended CQL driver properties
* name-based workload phases
This commit is contained in:
Jonathan Shook 2022-07-17 03:39:19 -05:00
parent fb200e0b58
commit c0cb4c0226
21 changed files with 1088 additions and 415 deletions

View File

@ -18,13 +18,19 @@ package io.nosqlbench.adapter.cqld4.opdispensers;
import com.datastax.oss.driver.api.core.CqlSession;
import com.datastax.oss.driver.api.core.DefaultConsistencyLevel;
import com.datastax.oss.driver.api.core.config.DriverExecutionProfile;
import com.datastax.oss.driver.api.core.cql.Statement;
import com.datastax.oss.driver.api.core.metadata.Node;
import com.datastax.oss.driver.api.core.metadata.token.Token;
import io.nosqlbench.adapter.cqld4.Cqld4OpMetrics;
import io.nosqlbench.adapter.cqld4.optypes.Cqld4CqlOp;
import io.nosqlbench.engine.api.activityimpl.BaseOpDispenser;
import io.nosqlbench.engine.api.activityimpl.uniform.DriverAdapter;
import io.nosqlbench.engine.api.templating.ParsedOp;
import java.nio.ByteBuffer;
import java.time.Duration;
import java.util.Map;
import java.util.function.LongFunction;
public abstract class BaseCqlStmtDispenser extends BaseOpDispenser<Cqld4CqlOp> {
@ -67,8 +73,23 @@ public abstract class BaseCqlStmtDispenser extends BaseOpDispenser<Cqld4CqlOp> {
protected LongFunction<Statement> getEnhancedStmtFunc(LongFunction<Statement> basefunc, ParsedOp op) {
LongFunction<Statement> partial = basefunc;
partial = op.enhanceEnum(partial, "cl", DefaultConsistencyLevel.class, Statement::setConsistencyLevel);
partial = op.enhanceEnum(partial, "consistency_level", DefaultConsistencyLevel.class, Statement::setConsistencyLevel);
partial = op.enhanceEnum(partial, "scl", DefaultConsistencyLevel.class, Statement::setSerialConsistencyLevel);
partial = op.enhanceEnum(partial, "serial_consistency_level", DefaultConsistencyLevel.class, Statement::setSerialConsistencyLevel);
partial = op.enhanceFuncOptionally(partial, "idempotent", Boolean.class, Statement::setIdempotent);
partial = op.enhanceFuncOptionally(partial, "timeout", double.class, (statement, l) -> statement.setTimeout(Duration.ofMillis((long)(l*1000L))));
partial = op.enhanceFuncOptionally(partial, "custom_payload", Map.class, Statement::setCustomPayload);
partial = op.enhanceFuncOptionally(partial, "execution_profile", DriverExecutionProfile.class, Statement::setExecutionProfile);
partial = op.enhanceFuncOptionally(partial, "execution_profile_name", String.class, Statement::setExecutionProfileName);
partial = op.enhanceFuncOptionally(partial, "node", Node.class, Statement::setNode);
partial = op.enhanceFuncOptionally(partial, "now_in_seconds", int.class, Statement::setNowInSeconds);
partial = op.enhanceFuncOptionally(partial, "page_size", int.class, Statement::setPageSize);
partial = op.enhanceFuncOptionally(partial, "query_timestamp", long.class, Statement::setQueryTimestamp);
partial = op.enhanceFuncOptionally(partial, "routing_key", ByteBuffer.class, Statement::setRoutingKey);
partial = op.enhanceFuncOptionally(partial, "routing_keys", ByteBuffer[].class, Statement::setRoutingKey);
partial = op.enhanceFuncOptionally(partial, "routing_token", Token.class, Statement::setRoutingToken);
partial = op.enhanceFuncOptionally(partial, "tracing", boolean.class, Statement::setTracing);
return partial;
}

View File

@ -22,33 +22,20 @@ import io.nosqlbench.api.labels.Labeled;
import java.util.Map;
public class CqlColumnDef implements NBNamedElement, Labeled {
private String refDefinitionDdl;
private String refTypeDefddl;
private String table;
private CqlTable table;
private String keyspace;
private String name;
private String type;
private int position;
private ColType coltype;
public CqlColumnDef(String colname, String typedef, String refColumnDdl) {
public CqlColumnDef(CqlTable table, int position, String colname, String typedef) {
this.table = table;
this.type = typedef;
this.name = colname;
this.refDefinitionDdl = refColumnDdl;
}
public String getDefinitionDdl() {
return refDefinitionDdl;
}
public void setTypedfRefDdl(String textOfTypeDefOnly) {
this.refTypeDefddl = textOfTypeDefOnly;
}
public void setDefinitionRefDdl(String textOfWholeDefinition) {
this.refDefinitionDdl = textOfWholeDefinition;
}
public void setType(String type) {
public void setTypeDef(String type) {
this.type = type;
}
@ -64,8 +51,8 @@ public class CqlColumnDef implements NBNamedElement, Labeled {
return type.replaceAll(" ","");
}
public String getTable() {
return table;
public String getTableName() {
return table.getName();
}
public String getKeyspace() {
@ -80,10 +67,11 @@ public class CqlColumnDef implements NBNamedElement, Labeled {
@Override
public Map<String, String> getLabels() {
return Map.of(
"column", name,
"name", name,
"typedef", type,
"table", table,
"keyspace", keyspace
"table", table.getName(),
"keyspace", keyspace,
"type", "column"
);
}
@ -91,7 +79,7 @@ public class CqlColumnDef implements NBNamedElement, Labeled {
this.keyspace = keyspace;
}
public void setTable(String table) {
public void setTable(CqlTable table) {
this.table = table;
}
@ -102,4 +90,21 @@ public class CqlColumnDef implements NBNamedElement, Labeled {
public void setName(String name) {
this.name = name;
}
public String getSyntax() {
return getName()+" "+getTrimmedTypedef();
}
public boolean isPartitionKey() {
return table.isPartitionKey(position);
}
public boolean isLastPartitionKey() {
return table.isLastPartitionKey(position);
}
public boolean isClusteringColumn() {
return table.isClusteringColumn(position);
}
public boolean isLastClusteringColumn() {
return table.isLastClusteringColumn(position);
}
}

View File

@ -24,17 +24,14 @@ import java.util.Map;
public class CqlKeyspace implements NBNamedElement, Labeled {
String keyspaceName= "";
String refddl;
private String refReplDdl;
CGKeyspaceStats stats;
private boolean isDurableWrites;
private String replicationData;
public CqlKeyspace() {
}
public void setKeyspaceName(String newname) {
if (this.refddl!=null) {
this.refddl = refddl.replaceAll(this.keyspaceName, newname);
}
this.keyspaceName=newname;
}
@ -42,46 +39,42 @@ public class CqlKeyspace implements NBNamedElement, Labeled {
return this.keyspaceName;
}
public void setRefDdl(String refddl) {
this.refddl = refddl;
}
@Override
public String toString() {
return "CqlKeyspace{" +
"keyspaceName='" + keyspaceName + '\'' +
", refddl='" + refddl + '\'' +
", stats=" + stats +
", isDurableWrites=" + isDurableWrites +
", replicationData='" + replicationData + '\'' +
'}';
}
public String getRefddl() {
return refddl;
}
@Override
public Map<String, String> getLabels() {
return Map.of(
"keyspace", keyspaceName
"name", keyspaceName,
"type","keyspace"
);
}
public void setRefReplDdl(String newRefReplDdl) {
if (this.refddl!=null) {
this.refddl=this.refddl.replaceAll(this.refReplDdl,newRefReplDdl);
}
this.refReplDdl=newRefReplDdl;
}
public String getRefDdlWithReplFields(String replFields) {
refddl.replace(refReplDdl,replFields);
return refddl;
}
public String getReplRefDdl() {
return this.refReplDdl;
}
public void setStats(CGKeyspaceStats ksstats) {
this.stats=ksstats;
}
public boolean isDurableWrites() {
return isDurableWrites;
}
public void setDurableWrites(boolean isDurableWrites) {
this.isDurableWrites = isDurableWrites;
}
public void setReplicationData(String repldata) {
this.replicationData = repldata;
}
public String getReplicationData() {
return this.replicationData;
}
}

View File

@ -77,22 +77,6 @@ public class CqlModel {
}
}
}
// schemaStats.getKeyspaces().forEach((ksname, ksstats) -> {
// CqlKeyspace modelKs = getKeyspacesByName().get(ksname);
// if (modelKs!=null) {
// modelKs.setStats(ksstats);
// ksstats.getKeyspaceTables().forEach((tbname, tbstats) -> {
// Map<String, CqlTable> tabledefs = tableDefs.get(ksname);
// if (tabledefs!=null) {
// for (CqlTable tabledef : tabledefs.values()) {
// tabledef.setTableAttributes(tbstats);
// }
// }
// });
//
// }
// });
}
transient CqlKeyspace keyspace = null;
@ -117,7 +101,6 @@ public class CqlModel {
public void saveKeyspace(String text,String refddl) {
keyspace.setKeyspaceName(text);
keyspace.setRefDdl(refddl);
this.keyspaceDefs.put(text, keyspace);
keyspace=null;
}
@ -126,16 +109,15 @@ public class CqlModel {
table = new CqlTable();
}
public void saveTable(String keyspace, String text, String refddl) {
public void saveTable(String keyspace, String text) {
table.setKeyspace(keyspace);
table.setName(text);
table.setRefDdl(refddl);
this.tableDefs.computeIfAbsent(keyspace, ks->new LinkedHashMap<>()).put(text, table);
table = null;
}
public void saveColumnDefinition(String colname, String coltype, boolean isPrimaryKey, String refddl) {
this.table.addcolumnDef(colname, coltype, refddl);
public void saveColumnDefinition(String colname, String typedef, boolean isPrimaryKey, int position) {
this.table.addcolumnDef(colname, typedef, position);
if (isPrimaryKey) {
this.table.addPartitionKey(colname);
}
@ -202,21 +184,16 @@ public class CqlModel {
table.addClusteringColumn(ccolumn);
}
public void setReplicationText(String repldata) {
keyspace.setRefReplDdl(repldata);
}
public void newType() {
udt = new CqlType();
}
public void addTypeField(String name, String typedef, String typedefRefDdl) {
udt.addField(name, typedef, typedefRefDdl);
public void addTypeField(String name, String typedef) {
udt.addField(name, typedef);
}
public void saveType(String keyspace, String name, String refddl) {
public void saveType(String keyspace, String name) {
udt.setKeyspace(keyspace);
udt.setRefddl(refddl);
udt.setName(name);
Map<String, CqlType> ksTypes = this.types.computeIfAbsent(keyspace, ks -> new LinkedHashMap<>());
ksTypes.put(udt.getName(),udt);
@ -277,11 +254,11 @@ public class CqlModel {
}
}
public void renameTable(String keyspaceName, String tableName, String newTableName) {
Map<String, CqlTable> tablesInKeyspace = tableDefs.get(keyspaceName);
CqlTable table = tablesInKeyspace.remove(tableName);
public void renameTable(CqlTable extant, String newTableName) {
Map<String, CqlTable> tablesInKs = tableDefs.get(extant.getKeySpace());
CqlTable table = tablesInKs.get(extant.getName());
table.setName(newTableName);
tablesInKeyspace.put(newTableName,table);
tablesInKs.put(table.getName(),table);
}
public void renameType(String keyspaceName, String typeName, String newTypeName) {
@ -290,4 +267,24 @@ public class CqlModel {
cqlType.setName(newTypeName);
typesInKeyspace.put(newTypeName,cqlType);
}
public void setTableCompactStorage(boolean isCompactStorage) {
table.setCompactStorage(isCompactStorage);
}
public void setKeyspaceDurableWrites(String booleanLiteral) {
keyspace.setDurableWrites(Boolean.parseBoolean(booleanLiteral));
}
public void setReplicationData(String repldata) {
keyspace.setReplicationData(repldata);
}
public Map<String, Map<String, CqlType>> getTypesByKeyspaceAndName() {
return types;
}
public void addClusteringOrder(String colname, String order) {
table.addTableClusteringOrder(colname, order);
}
}

View File

@ -26,6 +26,7 @@ import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import java.util.List;
import java.util.stream.IntStream;
public class CqlModelBuilder extends CqlParserBaseListener {
private final static Logger logger = LogManager.getLogger(CqlModelBuilder.class);
@ -33,6 +34,7 @@ public class CqlModelBuilder extends CqlParserBaseListener {
private final CGErrorListener errorListener;
private final CqlModel model;
private long counted;
private int colindex;
public CqlModelBuilder(CGErrorListener errorListener) {
this.errorListener = errorListener;
@ -41,7 +43,7 @@ public class CqlModelBuilder extends CqlParserBaseListener {
@Override
public void exitEveryRule(ParserRuleContext ctx) {
if ((counted++&0b11111111111111)==0b10000000000000) {
if ((counted++ & 0b11111111111111) == 0b10000000000000) {
logger.trace("parsed " + counted + " elements...");
}
}
@ -51,8 +53,8 @@ public class CqlModelBuilder extends CqlParserBaseListener {
System.out.println("error parsing: " + node.toString());
ParseTree parent = node.getParent();
String errorNodeType = parent.getClass().getSimpleName();
// System.out.println("error type: " + errorNodeType);
// System.out.println("source interval: " + node.getSourceInterval());
logger.info("PARSE ERROR: " + errorNodeType + "\n"+ node.getSourceInterval());
super.visitErrorNode(node);
}
@ -73,7 +75,7 @@ public class CqlModelBuilder extends CqlParserBaseListener {
@Override
public void exitReplicationList(CqlParser.ReplicationListContext ctx) {
String repldata = textOf(ctx);
model.setReplicationText(repldata);
model.setReplicationData(repldata);
}
@Override
@ -102,7 +104,7 @@ public class CqlModelBuilder extends CqlParserBaseListener {
}
}
} else if (ctx.compoundKey() != null) {
model.addClusteringColumn(ctx.compoundKey().partitionKey().column().getText());
model.addPartitionKey(ctx.compoundKey().partitionKey().getText());
for (CqlParser.ClusteringKeyContext ccol : ctx.compoundKey().clusteringKeyList().clusteringKey()) {
model.addClusteringColumn(ccol.column().getText());
}
@ -119,8 +121,7 @@ public class CqlModelBuilder extends CqlParserBaseListener {
public void exitCreateType(CqlParser.CreateTypeContext ctx) {
String keyspace = ctx.keyspace().getText();
String name = ctx.type_().getText();
String refddl = textOf(ctx);
model.saveType(keyspace, name, refddl);
model.saveType(keyspace, name);
}
@ -132,8 +133,7 @@ public class CqlModelBuilder extends CqlParserBaseListener {
for (int idx = 0; idx < columns.size(); idx++) {
model.addTypeField(
columns.get(idx).getText(),
dataTypes.get(idx).getText(),
textOf(dataTypes.get(idx))
dataTypes.get(idx).getText()
);
}
@ -149,11 +149,51 @@ public class CqlModelBuilder extends CqlParserBaseListener {
public void exitCreateTable(CqlParser.CreateTableContext ctx) {
model.saveTable(
ctx.keyspace().getText(),
ctx.table().getText(),
textOf(ctx)
ctx.table().getText()
);
}
@Override
public void exitOrderDirection(CqlParser.OrderDirectionContext ctx) {
}
@Override
public void exitTableOptionItem(CqlParser.TableOptionItemContext ctx) {
if (ctx.kwCompactStorage()!=null) {
model.setTableCompactStorage(true);
}
super.exitTableOptionItem(ctx);
}
@Override
public void exitDurableWrites(CqlParser.DurableWritesContext ctx) {
model.setKeyspaceDurableWrites(ctx.booleanLiteral().getText());
}
@Override
public void exitClusteringOrder(CqlParser.ClusteringOrderContext ctx) {
List<String> columns = ctx.children.stream()
.filter(c -> c instanceof CqlParser.ColumnContext)
.map(c -> c.getText())
.toList();
List<String> orders = ctx.children.stream()
.filter(c -> c instanceof CqlParser.OrderDirectionContext)
.map(c -> c.getText())
.toList();
IntStream.range(0, columns.size())
.forEach(i -> model.addClusteringOrder(columns.get(i), orders.get(i)));
}
// @Override
// public void exitColumn(CqlParser.ColumnContext ctx) {
// super.exitColumn(ctx);
// }
private String textOf(ParserRuleContext ctx) {
int startIndex = ctx.start.getStartIndex();
int stopIndex = ctx.stop.getStopIndex();
@ -166,13 +206,18 @@ public class CqlModelBuilder extends CqlParserBaseListener {
public void enterColumnDefinition(CqlParser.ColumnDefinitionContext ctx) {
}
@Override
public void enterColumnDefinitionList(CqlParser.ColumnDefinitionListContext ctx) {
this.colindex = 0;
}
@Override
public void exitColumnDefinition(CqlParser.ColumnDefinitionContext ctx) {
model.saveColumnDefinition(
ctx.column().getText(),
textOf(ctx.dataType()),
ctx.primaryKeyColumn() != null,
textOf(ctx)
colindex++
);
}

View File

@ -20,25 +20,27 @@ import io.nosqlbench.api.config.NBNamedElement;
import io.nosqlbench.api.labels.Labeled;
import io.nosqlbench.converters.cql.exporters.CGTableStats;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.*;
import java.util.function.Function;
import java.util.stream.Collectors;
public class CqlTable implements NBNamedElement, Labeled {
String name = "";
String keyspace = "";
List<CqlColumnDef> coldefs = new ArrayList<>();
CGTableStats tableAttributes = null;
List<String> partitionKeys = new ArrayList<>();
List<String> clusteringColumns = new ArrayList<>();
private String refddl;
int[] partitioning = new int[0];
int[] clustering = new int[0];
List<String> clusteringOrders = new ArrayList<>();
List<CqlColumnDef> coldefs = new ArrayList<>();
private boolean compactStorage;
public CqlTable() {
}
public boolean isCompactStorage() {
return compactStorage;
}
public CGTableStats getTableAttributes() {
return tableAttributes;
}
@ -53,13 +55,10 @@ public class CqlTable implements NBNamedElement, Labeled {
public void setName(String tableName) {
this.name = tableName;
for (CqlColumnDef coldef : coldefs) {
coldef.setTable(tableName);
}
}
public void addcolumnDef(String colname, String typedef, String refColumnDdl) {
coldefs.add(new CqlColumnDef(colname, typedef, refColumnDdl));
public void addcolumnDef(String colname, String typedef, int position) {
coldefs.add(new CqlColumnDef(this, coldefs.size(), colname, typedef));
}
@Override
@ -81,27 +80,12 @@ public class CqlTable implements NBNamedElement, Labeled {
public void setKeyspace(String newKsName) {
for (CqlColumnDef coldef : coldefs) {
coldef.setKeyspace(keyspace);
if (coldef.getDefinitionDdl()!=null) {
coldef.setDefinitionRefDdl(coldef.getDefinitionDdl().replaceAll(keyspace,newKsName));
}
}
if (this.refddl!=null) {
this.refddl = this.refddl.replaceAll(this.keyspace,newKsName);
coldef.setKeyspace(newKsName);
}
this.keyspace = newKsName;
}
public String getRefDdl() {
return this.refddl;
}
public void setRefDdl(String refddl) {
this.refddl = refddl;
}
public String getKeySpace() {
return this.keyspace;
}
@ -110,24 +94,50 @@ public class CqlTable implements NBNamedElement, Labeled {
public Map<String, String> getLabels() {
return Map.of(
"keyspace", this.keyspace,
"table", this.name
"name", this.name,
"type", "table"
);
}
public void addPartitionKey(String pkey) {
this.partitionKeys.add(pkey);
int[] newdefs = new int[partitioning.length + 1];
System.arraycopy(partitioning, 0, newdefs, 0, partitioning.length);
for (int i = 0; i < coldefs.size(); i++) {
if (coldefs.get(i).getName().equals(pkey)) {
newdefs[newdefs.length - 1] = i;
break;
}
}
this.partitioning = newdefs;
}
public void addClusteringColumn(String ccol) {
this.clusteringColumns.add(ccol);
int[] newdefs = new int[clustering.length + 1];
System.arraycopy(clustering, 0, newdefs, 0, clustering.length);
for (int i = 0; i < coldefs.size(); i++) {
if (coldefs.get(i).getName().equals(ccol)) {
newdefs[newdefs.length - 1] = i;
break;
}
}
this.clustering = newdefs;
}
public void addTableClusteringOrder(String colname, String order) {
clusteringOrders.add(order);
}
public List<String> getClusteringOrders() {
return clusteringOrders;
}
public List<String> getPartitionKeys() {
return this.partitionKeys;
return Arrays.stream(partitioning).mapToObj(i -> this.coldefs.get(i).getName()).toList();
}
public List<String> getClusteringColumns() {
return this.clusteringColumns;
return Arrays.stream(clustering).mapToObj(i -> this.coldefs.get(i).getName()).toList();
}
public CqlColumnDef getColumnDefForName(String colname) {
@ -142,18 +152,43 @@ public class CqlTable implements NBNamedElement, Labeled {
return def.orElseThrow();
}
public void renameColumns(Function<String,String> renamer) {
public void renameColumns(Function<String, String> renamer) {
for (CqlColumnDef coldef : coldefs) {
coldef.setName(renamer.apply(coldef.getName()));
}
}
public List<CqlColumnDef> getNonKeyColumnDefinitions() {
return coldefs.stream()
.filter(n -> !partitionKeys.contains(n.getName()))
.filter(n -> !clusteringColumns.contains(n.getName()))
.toList();
int last = partitioning[partitioning.length - 1];
last = (clustering.length > 0 ? clustering[clustering.length - 1] : last);
List<CqlColumnDef> nonkeys = new ArrayList<>();
for (int nonkey = last; nonkey < coldefs.size(); nonkey++) {
nonkeys.add(coldefs.get(nonkey));
}
return nonkeys;
}
public void setCompactStorage(boolean isCompactStorage) {
this.compactStorage = isCompactStorage;
}
public String getFullName() {
return (this.keyspace != null ? this.keyspace + "." : "") + this.name;
}
public boolean isPartitionKey(int position) {
return position < partitioning.length;
}
public boolean isLastPartitionKey(int position) {
return position == partitioning.length - 1;
}
public boolean isClusteringColumn(int position) {
return clustering.length > 0 && position < clustering[clustering.length - 1] && position >= clustering[0];
}
public boolean isLastClusteringColumn(int position) {
return clustering.length > 0 && position == clustering[clustering.length - 1];
}
}

View File

@ -17,15 +17,17 @@
package io.nosqlbench.converters.cql.cqlast;
import io.nosqlbench.api.config.NBNamedElement;
import io.nosqlbench.api.labels.Labeled;
import java.util.LinkedHashMap;
import java.util.Map;
import java.util.function.Function;
public class CqlType implements NBNamedElement {
public class CqlType implements NBNamedElement, Labeled {
private String keyspace;
private String name;
private String refddl;
private final Map<String,String> fields = new LinkedHashMap<>();
private Map<String,String> fields = new LinkedHashMap<>();
public void setKeyspace(String newksname) {
this.keyspace = newksname;
@ -37,10 +39,6 @@ public class CqlType implements NBNamedElement {
this.name = name;
}
public void setRefddl(String ddl) {
this.refddl = ddl;
}
public String getKeyspace() {
return keyspace;
}
@ -49,7 +47,7 @@ public class CqlType implements NBNamedElement {
return this.name;
}
public void addField(String name, String typedef, String typedefRefDdl) {
public void addField(String name, String typedef) {
this.fields.put(name, typedef);
}
@ -57,7 +55,18 @@ public class CqlType implements NBNamedElement {
return fields;
}
public String getRefDdl() {
return this.refddl;
@Override
public Map<String, String> getLabels() {
return Map.of(
"keyspace", this.keyspace,
"type","udt",
"name",name
);
}
public void renameColumns(Function<String, String> renamer) {
Map<String,String> newColumns = new LinkedHashMap<>();
fields.forEach((k,v)->newColumns.put(renamer.apply(k),v));
this.fields = newColumns;
}
}

View File

@ -18,10 +18,7 @@ package io.nosqlbench.converters.cql.exporters;
import com.google.gson.Gson;
import com.google.gson.GsonBuilder;
import io.nosqlbench.converters.cql.cqlast.CqlColumnDef;
import io.nosqlbench.converters.cql.cqlast.CqlKeyspace;
import io.nosqlbench.converters.cql.cqlast.CqlModel;
import io.nosqlbench.converters.cql.cqlast.CqlTable;
import io.nosqlbench.converters.cql.cqlast.*;
import io.nosqlbench.converters.cql.exporters.binders.*;
import io.nosqlbench.converters.cql.exporters.transformers.CGTransformersInit;
import io.nosqlbench.converters.cql.parser.CqlModelParser;
@ -41,6 +38,7 @@ import java.nio.file.Path;
import java.nio.file.StandardOpenOption;
import java.util.*;
import java.util.function.Function;
import java.util.stream.Collectors;
/**
* The unit of generation is simply everything that is provided to the exporter together.
@ -57,13 +55,25 @@ public class CGWorkloadExporter {
private NamingFolio namer;
private BindingsAccumulator bindings = new BindingsAccumulator(namer, List.of(defaultBindings));
private CqlModel model;
private final CqlModel model;
private final Map<String, String> bindingsMap = new LinkedHashMap<>();
private final int DEFAULT_RESOLUTION = 10000;
String replication;
String namingTemplate;
private List<String> includedKeyspaces;
private double partitionMultiplier;
private final Map<String, Double> timeouts = new HashMap<String, Double>(Map.of(
"create", 60.0,
"truncate", 900.0,
"drop", 900.0,
"scan", 30.0,
"select", 10.0,
"insert", 10.0,
"delete", 10.0,
"update", 10.0
));
private boolean elideUnusedTables;
private Map<String, List<String>> blockplan = Map.of();
public CGWorkloadExporter(CqlModel model, CGTransformersInit transformers) {
this.model = model;
@ -137,6 +147,11 @@ public class CGWorkloadExporter {
String partition_multipler = cfgmap.get("partition_multiplier").toString();
exporter.setPartitionMultiplier(Double.parseDouble(partition_multipler));
exporter.configureTimeouts(cfgmap.get("timeouts"));
exporter.configureElideUnusedTables(cfgmap.get("elide_unused_tables"));
exporter.configureBlocks(cfgmap.get("blockplan"));
String workload = exporter.getWorkloadAsYaml();
try {
Files.writeString(
@ -155,21 +170,7 @@ public class CGWorkloadExporter {
}
}
private void setPartitionMultiplier(double multipler) {
this.partitionMultiplier = multipler;
}
private void setIncludedKeyspaces(List<String> includeKeyspaces) {
this.includedKeyspaces = includeKeyspaces;
}
public void setNamingTemplate(String namingTemplate) {
this.namingTemplate = namingTemplate;
this.namer = new NamingFolio(namingTemplate);
this.bindings = new BindingsAccumulator(namer, List.of(defaultBindings));
}
public Map<String, Object> getWorkload() {
public Map<String, Object> generateBlocks() {
namer.informNamerOfAllKnownNames(model);
Map<String, Object> workload = new LinkedHashMap<>();
@ -178,207 +179,219 @@ public class CGWorkloadExporter {
workload.put("bindings", bindingsMap);
Map<String, Object> blocks = new LinkedHashMap<>();
workload.put("blocks", blocks);
blocks.put("schema-keyspaces", genKeyspacesSchemaBlock(model));
blocks.put("schema-tables", genTablesSchemaBlock(model));
blocks.put("schema-types", genTypesSchemaBlock(model));
blocks.put("truncate", genTruncateBlock(model));
blocks.put("rampup", genRampupBlock(model));
blocks.put("main", genMainBlock(model));
for (Map.Entry<String, List<String>> blocknameAndComponents : blockplan.entrySet()) {
String blockname = blocknameAndComponents.getKey();
List<String> components = blocknameAndComponents.getValue();
LinkedHashMap<String, Object> block = new LinkedHashMap<>(
Map.of("params", new LinkedHashMap<String, Object>())
);
for (String component : components) {
Map<String, Object> additions = switch (component) {
case "schema-keyspaces" -> genCreateKeyspacesOpTemplates(model, blockname);
case "schema-tables" -> genCreateTablesOpTemplates(model, blockname);
case "schema-types" -> genCreateTypesOpTemplates(model, blockname);
case "drop-types" -> genDropTypesBlock(model, blockname);
case "drop-tables" -> genDropTablesBlock(model, blockname);
case "drop-keyspaces" -> genDropKeyspacesOpTemplates(model, blockname);
case "truncate-tables" -> genTruncateTablesOpTemplates(model, blockname);
case "insert" -> genInsertOpTemplates(model, blockname);
case "select" -> genSelectOpTemplates(model, blockname);
case "scan-10" -> genScanOpTemplates(model, blockname);
case "update" -> genUpdateOpTemplates(model, blockname);
default -> throw new RuntimeException("Unable to create block entries for " + component + ".");
};
block.putAll(additions);
}
simplifyTimeouts(block);
blocks.put(blockname, block);
}
bindingsMap.putAll(bindings.getAccumulatedBindings());
return workload;
}
private Map<String, Object> genTypesSchemaBlock(CqlModel model) {
return Map.of();
}
private Map<String, Object> genScenarios(CqlModel model) {
return Map.of(
"default", Map.of(
"schema", "run driver=cql tags=block:'schema-.*' threads===UNDEF cycles===UNDEF",
"schema-keyspaces", "run driver=cql tags=block:'schema-keyspaces' threads===UNDEF cycles===UNDEF",
"schema-tables", "run driver=cql tags=block:'schema-tables' threads===UNDEF cycles===UNDEF",
"schema-types", "run driver=cql tags=block:'schema-types' threads===UNDEF cycles===UNDEF",
"rampup", "run driver=cql tags=block:rampup threads=auto cycles===TEMPLATE(rampup-cycles,10000)",
"main", "run driver=cql tags=block:main threads=auto cycles===TEMPLATE(main-cycles,10000)"
),
"truncate", "run driver=cql tags=block:truncate threads===UNDEF cycles===UNDEF"
);
}
private Map<String, Object> genMainBlock(CqlModel model) {
Map<String, Object> mainOpTemplates = new LinkedHashMap<>();
for (CqlTable table : model.getTableDefs()) {
if (!isCounterTable(table)) {
Optional<String> insertTemplate = this.genInsertTemplate(table);
if (insertTemplate.isPresent()) {
mainOpTemplates.put(namer.nameFor(table, "optype", "insert"),
Map.of(
"stmt", insertTemplate.get(),
"ratio", writeRatioFor(table)
)
);
} else {
throw new RuntimeException("Unable to generate main insert template for table '" + table + "'");
}
} else {
logger.info("skipped insert for counter table '" + table.getName() + "'");
}
Optional<String> updateTemplate = this.genUpdateTemplate(table);
if (updateTemplate.isPresent()) {
mainOpTemplates.put(namer.nameFor(table, "optype", "update"),
Map.of(
"stmt", updateTemplate.get(),
"ratio", writeRatioFor(table)
)
);
} else {
throw new RuntimeException("Unable to generate main insert template for table '" + table + "'");
}
Optional<String> selectTemplate = this.genSelectTemplate(table);
if (selectTemplate.isPresent()) {
mainOpTemplates.put(namer.nameFor(table, "optype", "select"),
Map.of(
"stmt", selectTemplate.get(),
"ratio", readRatioFor(table))
);
} else {
throw new RuntimeException("Unable to generate main select template for table '" + table + "'");
}
}
return Map.of("ops", mainOpTemplates);
}
private boolean isCounterTable(CqlTable table) {
return table.getColumnDefinitions().stream()
.anyMatch(cd -> cd.getTrimmedTypedef().equalsIgnoreCase("counter"));
}
private int readRatioFor(CqlTable table) {
if (table.getTableAttributes()==null ||table.getTableAttributes().size() == 0) {
return 1;
}
double weighted_reads = Double.parseDouble(table.getTableAttributes().getAttribute("weighted_reads"));
return (int) (weighted_reads * DEFAULT_RESOLUTION);
}
private int writeRatioFor(CqlTable table) {
if (table.getTableAttributes()==null ||table.getTableAttributes().size() == 0) {
return 1;
}
double weighted_writes = Double.parseDouble(table.getTableAttributes().getAttribute("weighted_writes"));
return (int) (weighted_writes * DEFAULT_RESOLUTION);
}
private Map<String, Object> genRampupBlock(CqlModel model) {
Map<String, String> rampupOpTemplates = new LinkedHashMap<>();
for (CqlTable table : model.getTableDefs()) {
if (!isCounterTable(table)) {
Optional<String> insert = genInsertTemplate(table);
if (insert.isPresent()) {
rampupOpTemplates.put(namer.nameFor(table, "optype", "insert"), insert.get());
} else {
throw new RuntimeException("Unable to create rampup template for table '" + table + "'");
}
}
}
return Map.of("ops", rampupOpTemplates);
}
private Optional<String> genSelectTemplate(CqlTable table) {
try {
return Optional.of("select * from " + table.getKeySpace() + "." + table.getName() +
"\n WHERE " + genPredicateTemplate(table));
} catch (UnresolvedBindingException ube) {
logger.error(ube);
return Optional.empty();
}
}
private String genPredicateTemplate(CqlTable table) {
StringBuilder sb = new StringBuilder();
List<CqlColumnDef> pkeys = new ArrayList<>();
for (String pkey : table.getPartitionKeys()) {
CqlColumnDef coldef = table.getColumnDefForName(pkey);
pkeys.add(coldef);
}
for (String ccol : table.getClusteringColumns()) {
CqlColumnDef coldef = table.getColumnDefForName(ccol);
pkeys.add(coldef);
}
pkeys.stream().map(this::genPredicatePart)
.forEach(p -> {
sb.append(p).append("\n AND ");
private void simplifyTimeouts(Map<String, Object> block) {
Map<Double, List<String>> byTimeout = new LinkedHashMap<>();
Map<String, Object> ops = (Map<String, Object>) block.get("ops");
ops.forEach((opname, opmap) -> {
double timeout = (double) (((Map<String, Object>) opmap).get("timeout"));
byTimeout.computeIfAbsent(timeout, d -> new ArrayList<>()).add(opname);
});
sb.setLength(sb.length() - "\n AND ".length());
return sb.toString();
List<Double> timeouts = byTimeout.keySet().stream().sorted(Double::compare).toList();
if (timeouts.size() == 1) {
((Map<String, Object>) block.computeIfAbsent("params", p -> new LinkedHashMap<>())).put("timeout", timeouts.get(0));
Set<String> opnames = ((Map<String, Object>) block.get("ops")).keySet();
for (String opname : opnames) {
Map<String, Object> opmap = (Map<String, Object>) ops.get(opname);
Map<String, Object> newOp = new LinkedHashMap<>(opmap);
newOp.remove("timeout");
ops.put(opname, newOp);
}
}
}
private String genPredicatePart(CqlColumnDef def) {
String typeName = def.getTrimmedTypedef();
Binding binding = bindings.forColumn(def);
return def.getName() + "={" + binding.name() + "}";
private void configureBlocks(Object generate_blocks_spec) {
if (generate_blocks_spec == null) {
throw new RuntimeException("Error with generate blocks, required parameter 'blockplan' is missing");
}
if (generate_blocks_spec instanceof Map blocksmap) {
Map<String, List<String>> planmap = new LinkedHashMap<>();
for (Map.Entry<String, String> blockplan : ((Map<String, String>) blocksmap).entrySet()) {
planmap.put(blockplan.getKey(), Arrays.stream(blockplan.getValue().split(", ")).toList());
}
this.blockplan = planmap;
} else {
throw new RuntimeException("Unrecognized type '" + generate_blocks_spec.getClass().getSimpleName() + "' for 'blockplan' config.");
}
}
private Optional<String> genUpdateTemplate(CqlTable table) {
try {
private void configureElideUnusedTables(Object elide_unused_tables) {
if (elide_unused_tables == null) {
this.elideUnusedTables = false;
} else {
this.elideUnusedTables = Boolean.parseBoolean(elide_unused_tables.toString());
}
}
return Optional.of("""
update KEYSPACE.TABLE
set ASSIGNMENTS
where PREDICATES;
public void configureTimeouts(Object spec) {
if (spec instanceof Map specmap) {
for (Object key : specmap.keySet()) {
if (this.timeouts.containsKey(key.toString())) {
Object value = specmap.get(key.toString());
if (value instanceof Number number) {
this.timeouts.put(key.toString(), number.doubleValue());
logger.info("configured '" + key + "' timeout as " + this.timeouts.get(key.toString()) + "S");
}
} else {
throw new RuntimeException("timeout type '" + key + "' unknown. Known types: " + this.timeouts.keySet());
}
}
}
}
private void setPartitionMultiplier(double multipler) {
this.partitionMultiplier = multipler;
}
public void setNamingTemplate(String namingTemplate) {
this.namingTemplate = namingTemplate;
this.namer = new NamingFolio(namingTemplate);
this.bindings = new BindingsAccumulator(namer, List.of(defaultBindings));
}
private LinkedHashMap<String, Object> genScenarios(CqlModel model) {
return new LinkedHashMap<>() {{
put("default",
new LinkedHashMap<>() {{
put("schema", "run driver=cql tags=block:'schema-.*' threads===UNDEF cycles===UNDEF");
put("rampup", "run driver=cql tags=block:'rampup-.*' threads=auto cycles===TEMPLATE(rampup-cycles,10000)");
put("main", "run driver=cql tags=block:'main-.*' threads=auto cycles===TEMPLATE(main-cycles,10000)");
}});
put("truncate", "run driver=cql tags=block:'truncate-.*' threads===UNDEF cycles===UNDEF");
put("schema-keyspaces", "run driver=cql tags=block:'schema-keyspaces' threads===UNDEF cycles===UNDEF");
put("schema-types", "run driver=cql tags=block:'schema-types' threads===UNDEF cycles===UNDEF");
put("schema-tables", "run driver=cql tags=block:'schema-tables' threads===UNDEF cycles===UNDEF");
put("drop", "run driver=cql tags=block:'drop-.*' threads===UNDEF cycles===UNDEF");
put("drop-tables", "run driver=cql tags=block:'drop-tables' threads===UNDEF cycles===UNDEF");
put("drop-types", "run driver=cql tags=block:'drop-types' threads===UNDEF cycles===UNDEF");
put("drop-keyspaces", "run driver=cql tags=block:'drop-keyspaces' threads===UNDEF cycles===UNDEF");
}};
}
private Map<String, Object> genScanOpTemplates(CqlModel model, String blockname) {
Map<String, Object> blockdata = new LinkedHashMap<>();
Map<String, Object> ops = new LinkedHashMap<>();
blockdata.put("ops", ops);
for (CqlTable table : model.getTableDefs()) {
ops.put(
namer.nameFor(table, "optype", "scan", "blockname", blockname),
Map.of(
"prepared", genScanSyntax(table),
"timeout", timeouts.get("scan"),
"ratio", readRatioFor(table)
)
);
}
return blockdata;
}
private String genScanSyntax(CqlTable table) {
return """
select * from KEYSPACE.TABLE
PREDICATE
LIMIT;
"""
.replaceAll("KEYSPACE", table.getKeySpace())
.replaceAll("TABLE", table.getName())
.replaceAll("PREDICATES", genPredicateTemplate(table))
.replaceAll("ASSIGNMENTS", genAssignments(table)));
} catch (UnresolvedBindingException ube) {
return Optional.empty();
}
.replace("KEYSPACE", table.getKeySpace())
.replace("TABLE", table.getName())
.replace("PREDICATE", genPredicateTemplate(table, -1))
.replace("LIMIT", genLimitSyntax(table));
}
private String genAssignments(CqlTable table) {
StringBuilder sb = new StringBuilder();
for (CqlColumnDef coldef : table.getNonKeyColumnDefinitions()) {
if (coldef.isCounter()) {
sb.append(coldef.getName()).append("=")
.append(coldef.getName()).append("+").append("{").append(bindings.forColumn(coldef).name()).append("}")
.append(", ");
} else {
sb.append(coldef.getName()).append("=")
.append("{").append(bindings.forColumn(coldef).name()).append("}")
.append(", ");
private Map<String, Object> genSelectOpTemplates(CqlModel model, String blockname) {
Map<String, Object> blockdata = new LinkedHashMap<>();
Map<String, Object> ops = new LinkedHashMap<>();
blockdata.put("ops", ops);
for (CqlTable table : model.getTableDefs()) {
ops.put(
namer.nameFor(table, "optype", "select", "blockname", blockname),
Map.of(
"prepared", genSelectSyntax(table),
"timeout", timeouts.get("select"),
"ratio", readRatioFor(table)
)
);
}
}
if (sb.length() > 0) {
sb.setLength(sb.length() - ", ".length());
} else {
logger.debug("no assignments in this?\n" + table.getRefDdl());
}
return sb.toString();
return blockdata;
}
private Optional<String> genInsertTemplate(CqlTable table) {
try {
return Optional.of("""
insert into KEYSPACE.TABLE\n
private String genSelectSyntax(CqlTable table) {
return """
select * from KEYSPACE.TABLE
PREDICATE
LIMIT;
"""
.replace("KEYSPACE", table.getKeySpace())
.replace("TABLE", table.getName())
.replace("PREDICATE", genPredicateTemplate(table, 0))
.replace("LIMIT", genLimitSyntax(table));
}
private String genLimitSyntax(CqlTable table) {
return " LIMIT 10";
}
private Map<String, Object> genInsertOpTemplates(CqlModel model, String blockname) {
Map<String, Object> blockdata = new LinkedHashMap<>();
Map<String, Object> ops = new LinkedHashMap<>();
blockdata.put("ops", ops);
for (CqlTable table : model.getTableDefs()) {
ops.put(
namer.nameFor(table, "optype", "insert", "blockname", blockname),
Map.of(
"prepared", genInsertSyntax(table),
"timeout", timeouts.get("insert"),
"ratio", writeRatioFor(table)
)
);
}
return blockdata;
}
private String genInsertSyntax(CqlTable table) {
if (isCounterTable(table)) {
logger.warn("skipping insert on counter table '" + table.getFullName());
}
return """
insert into KEYSPACE.TABLE
( FIELDNAMES )
VALUES
( BINDINGS );
@ -392,28 +405,186 @@ public class CGWorkloadExporter {
.replaceAll("BINDINGS",
String.join(", ",
table.getColumnDefinitions().stream()
.map(c -> "{" + bindings.forColumn(c).name() + "}").toList())));
.map(cdef -> {
if (cdef.isLastPartitionKey()) {
return dividedBinding(cdef, table);
} else {
return bindings.forColumn(cdef);
}
})
.map(c -> "{" + c.getName() + "}").toList()));
}
//
// List<CqlColumnDef> cdefs = table.getColumnDefinitions();
// return Optional.of("insert into " +
// table.getKeySpace() + "." + table.getName() + "\n" +
// " ( " + cdefs.stream().map(CqlColumnDef::getName)
// .collect(Collectors.joining(" , ")) +
// " )\n values\n (" +
// cdefs
// .stream()
// .map(cd -> {
// Binding binding = bindings.forColumn(cd);
// return "{" + binding.name() + "}";
// })
// .collect(Collectors.joining(","))
// + ");");
} catch (UnresolvedBindingException ube) {
return Optional.empty();
private Binding dividedBinding(CqlColumnDef columnDef, CqlTable tableDef) {
CGTableStats stats = tableDef.getTableAttributes();
if (stats==null) {
return bindings.forColumn(columnDef);
}
String partitionsSpec = stats.getAttribute("Number of partitions (estimate)");
if (partitionsSpec==null) {
}
double estimatedPartitions = Double.parseDouble(partitionsSpec);
long modulo = (long)(estimatedPartitions*=partitionMultiplier);
if (modulo==0) {
return bindings.forColumn(columnDef);
}
modulo = quantizeModuloByMagnitude(modulo,1);
logger.info("Set partition modulo for " + tableDef.getFullName() + " to " + modulo);
Binding binding = bindings.forColumn(columnDef, "Mod(" + modulo + "L); ");
return binding;
}
public static long quantizeModuloByMagnitude(long modulo, int significand) {
double initial = modulo;
double log10 = Math.log10(initial);
int zeroes = (int)log10;
zeroes = Math.max(1,zeroes-(significand-1));
long fractional = (long) Math.pow(10,zeroes);
long partial = ((long)initial/fractional) * fractional;
long nextPartial = partial+fractional;
if (Math.abs(initial-partial)<=Math.abs(initial-nextPartial)) {
return partial;
} else {
return nextPartial;
}
}
private Map<String, Object> genUpdateOpTemplates(CqlModel model, String blockname) {
Map<String, Object> blockdata = new LinkedHashMap<>();
Map<String, Object> ops = new LinkedHashMap<>();
blockdata.put("ops", ops);
for (CqlTable table : model.getTableDefs()) {
ops.put(
namer.nameFor(table, "optype", "update", "blockname", blockname),
Map.of(
"prepared", genUpdateSyntax(table),
"timeout", timeouts.get("update"),
"ratio", writeRatioFor(table)
)
);
}
return blockdata;
}
private boolean isCounterTable(CqlTable table) {
return table.getColumnDefinitions().stream()
.anyMatch(cd -> cd.getTrimmedTypedef().equalsIgnoreCase("counter"));
}
private int totalRatioFor(CqlTable table) {
if (table.getTableAttributes() == null || table.getTableAttributes().size() == 0) {
return 1;
}
return readRatioFor(table) + writeRatioFor(table);
}
private int readRatioFor(CqlTable table) {
if (table.getTableAttributes() == null || table.getTableAttributes().size() == 0) {
return 1;
}
double weighted_reads = Double.parseDouble(table.getTableAttributes().getAttribute("weighted_reads"));
return (int) (weighted_reads * DEFAULT_RESOLUTION);
}
private int writeRatioFor(CqlTable table) {
if (table.getTableAttributes() == null || table.getTableAttributes().size() == 0) {
return 1;
}
double weighted_writes = Double.parseDouble(table.getTableAttributes().getAttribute("weighted_writes"));
return (int) (weighted_writes * DEFAULT_RESOLUTION);
}
/**
* If keycount is 0, all key fields including partition and clustering fields
* are qualfied with predicates.
* If keycount is positive, then only that many will be included.
* If keycount is negative, then that many keyfields will be removed from the
* predicate starting with the rightmost (innermost) fields first.
*
* @param table
* @param keycount
* @return
*/
private String genPredicateTemplate(CqlTable table, int keycount) {
StringBuilder sb = new StringBuilder();
LinkedList<CqlColumnDef> pkeys = new LinkedList<>();
for (String pkey : table.getPartitionKeys()) {
CqlColumnDef coldef = table.getColumnDefForName(pkey);
pkeys.push(coldef);
}
for (String ccol : table.getClusteringColumns()) {
CqlColumnDef coldef = table.getColumnDefForName(ccol);
pkeys.push(coldef);
}
if (keycount > 0) {
while (pkeys.size() > keycount) {
pkeys.pop();
}
} else if (keycount < 0) {
for (int i = 0; i > keycount; i--) {
pkeys.pop();
}
}
var lastcount = keycount;
keycount = Math.max(table.getPartitionKeys().size(), keycount);
if (keycount != lastcount) {
logger.debug("minimum keycount for " + table.getFullName() + " adjusted from " + lastcount + " to " + keycount);
}
// TODO; constraints on predicates based on valid constructions
pkeys.stream().map(this::genPredicatePart)
.forEach(p -> {
sb.append(p).append("\n AND ");
});
if (sb.length() > 0) {
sb.setLength(sb.length() - "\n AND ".length());
}
return sb.toString();
}
private String genPredicatePart(CqlColumnDef def) {
String typeName = def.getTrimmedTypedef();
Binding binding = bindings.forColumn(def);
return def.getName() + "={" + binding.getName() + "}";
}
private String genUpdateSyntax(CqlTable table) {
return """
update KEYSPACE.TABLE
set ASSIGNMENTS
where PREDICATES;
"""
.replaceAll("KEYSPACE", table.getKeySpace())
.replaceAll("TABLE", table.getName())
.replaceAll("PREDICATES", genPredicateTemplate(table, 0))
.replaceAll("ASSIGNMENTS", genAssignments(table));
}
private String genAssignments(CqlTable table) {
StringBuilder sb = new StringBuilder();
for (CqlColumnDef coldef : table.getNonKeyColumnDefinitions()) {
if (coldef.isCounter()) {
sb.append(coldef.getName()).append("=")
.append(coldef.getName()).append("+").append("{").append(bindings.forColumn(coldef).getName()).append("}")
.append(", ");
} else {
sb.append(coldef.getName()).append("=")
.append("{").append(bindings.forColumn(coldef).getName()).append("}")
.append(", ");
}
}
if (sb.length() > 0) {
sb.setLength(sb.length() - ", ".length());
}
return sb.toString();
}
public String getWorkloadAsYaml() {
DumpSettings dumpSettings = DumpSettings.builder()
.setDefaultFlowStyle(FlowStyle.BLOCK)
@ -429,7 +600,7 @@ public class CGWorkloadExporter {
BaseRepresenter r;
Dump dump = new Dump(dumpSettings);
Map<String, Object> workload = getWorkload();
Map<String, Object> workload = generateBlocks();
return dump.dumpToString(workload);
}
@ -440,47 +611,144 @@ public class CGWorkloadExporter {
public String getWorkoadAsJson() {
Gson gson = new GsonBuilder().setPrettyPrinting().create();
Map<String, Object> workload = getWorkload();
Map<String, Object> workload = generateBlocks();
return gson.toJson(workload);
}
private Object genTableCrudTemplates(CqlTable table) {
return Map.of();
private Map<String, Object> genDropTablesBlock(CqlModel model, String blockname) {
Map<String, Object> dropTablesBlock = new LinkedHashMap<>();
Map<String, Object> ops = new LinkedHashMap<>();
dropTablesBlock.put("ops", ops);
for (CqlTable table : model.getTableDefs()) {
ops.put(
namer.nameFor(table, "optype", "drop", "blockname", blockname),
Map.of(
"simple", "drop table " + table.getFullName() + ";",
"timeout", timeouts.get("drop")
)
);
}
return dropTablesBlock;
}
private Map<String, Object> genTruncateBlock(CqlModel model) {
private Map<String, Object> genDropTypesBlock(CqlModel model, String blockname) {
Map<String, Object> dropTypesBlock = new LinkedHashMap<>();
Map<String, Object> ops = new LinkedHashMap<>();
dropTypesBlock.put("ops", ops);
for (CqlType type : model.getTypes()) {
ops.put(
namer.nameFor(type, "optype", "drop-type", "blockname", blockname),
Map.of(
"simple", "drop type " + type.getKeyspace() + "." + type.getName() + ";",
"timeout", timeouts.get("drop")
)
);
}
return dropTypesBlock;
}
private Map<String, Object> genDropKeyspacesOpTemplates(CqlModel model, String blockname) {
Map<String, Object> dropTypesBlock = new LinkedHashMap<>();
Map<String, Object> ops = new LinkedHashMap<>();
dropTypesBlock.put("ops", ops);
for (CqlType type : model.getTypes()) {
ops.put(
namer.nameFor(type, "optype", "drop-keyspace", "blockname", blockname),
Map.of(
"simple", "drop keyspace " + type.getKeyspace() + ";",
"timeout", timeouts.get("drop")
)
);
}
return dropTypesBlock;
}
private Map<String, Object> genTruncateTablesOpTemplates(CqlModel model, String blockname) {
Map<String, Object> truncateblock = new LinkedHashMap<>();
Map<String, Object> ops = new LinkedHashMap<>();
truncateblock.put("ops", ops);
for (CqlTable table : model.getTableDefs()) {
ops.put(
namer.nameFor(table, "optype", "truncate"),
"truncate " + table.getKeySpace() + "." + table.getName() + ";"
namer.nameFor(table, "optype", "truncate", "blockname", blockname),
Map.of(
"simple", "truncate " + table.getFullName() + ";",
"timeout", timeouts.get("truncate")
)
);
}
return truncateblock;
}
private Map<String, Object> genKeyspacesSchemaBlock(CqlModel model) {
private Map<String, Object> genCreateKeyspacesOpTemplates(CqlModel model, String blockname) {
Map<String, Object> schemablock = new LinkedHashMap<>();
Map<String, Object> ops = new LinkedHashMap<>();
for (CqlKeyspace ks : model.getKeyspacesByName().values()) {
ops.put("create-keyspace-" + ks.getName(), ks.getRefddl() + ";");
ops.put(
namer.nameFor(ks, "optype", "create", "blockname", blockname),
Map.of(
"simple", genKeyspaceDDL(ks),
"timeout", timeouts.get("create")
)
);
}
schemablock.put("ops", ops);
return schemablock;
}
private Map<String, Object> genTablesSchemaBlock(CqlModel model) {
private Map<String, Object> genCreateTypesOpTemplates(CqlModel model, String blockname) {
Map<String, Object> blockdata = new LinkedHashMap<>();
Map<String, Object> ops = new LinkedHashMap<>();
blockdata.put("ops", ops);
for (String keyspace : model.getTypesByKeyspaceAndName().keySet()) {
for (CqlType type : model.getTypesByKeyspaceAndName().get(keyspace).values()) {
ops.put(
namer.nameFor(type, "optype", "create", "blockname", blockname),
Map.of(
"simple", genTypeDDL(type),
"timeout", timeouts.get("create")
)
);
}
}
return blockdata;
}
private String genKeyspaceDDL(CqlKeyspace keyspace) {
return """
create keyspace KEYSPACE
with replication = {REPLICATION}DURABLEWRITES?;
"""
.replace("KEYSPACE", keyspace.getName())
.replace("REPLICATION", keyspace.getReplicationData())
.replace("DURABLEWRITES?", keyspace.isDurableWrites() ? "" : "\n and durable writes = false")
;
}
private Map<String, Object> genCreateTablesOpTemplates(CqlModel model, String blockname) {
Map<String, Object> schemablock = new LinkedHashMap<>();
Map<String, Object> ops = new LinkedHashMap<>();
for (String ksname : model.getTablesByNameByKeyspace().keySet()) {
for (CqlTable cqltable : model.getTablesByNameByKeyspace().get(ksname).values()) {
ops.put("create-table-" + ksname + "." + cqltable.getName(), cqltable.getRefDdl() + ":");
if (elideUnusedTables && totalRatioFor(cqltable) == 0.0d) {
logger.info("eliding table " + ksname + "." + cqltable.getName() + " since its total op ratio was " + totalRatioFor(cqltable));
continue;
}
ops.put(
namer.nameFor(cqltable, "optype", "create", "blockname", blockname),
Map.of(
"simple", genTableDDL(cqltable),
"timeout", timeouts.get("create")
)
);
}
}
@ -489,4 +757,74 @@ public class CGWorkloadExporter {
}
private String genTypeDDL(CqlType type) {
return """
create type KEYSPACE.TYPENAME (
TYPEDEF
);
"""
.replace("KEYSPACE", type.getKeyspace())
.replace("TYPENAME", type.getName())
.replace("TYPEDEF", type.getFields().entrySet().stream()
.map(entry -> entry.getKey() + " " + entry.getValue()).collect(Collectors.joining(",\n")));
}
private Object genTableDDL(CqlTable cqltable) {
if (cqltable.isCompactStorage()) {
logger.warn("COMPACT STORAGE is not supported, eliding this option for table '" + cqltable.getFullName() + "'");
}
return """
create table if not exists KEYSPACE.TABLE (
COLUMN_DEFS,
primary key (PRIMARYKEY)
)CLUSTERING;
"""
.replace("KEYSPACE", cqltable.getKeySpace())
.replace("TABLE", cqltable.getName())
.replace("COLUMN_DEFS", genTableColumnDDL(cqltable))
.replace("PRIMARYKEY", genPrimaryKeyDDL(cqltable))
.replace("CLUSTERING", genTableClusteringOrderDDL(cqltable));
}
private String genPrimaryKeyDDL(CqlTable cqltable) {
StringBuilder sb = new StringBuilder("(");
for (String partitionKey : cqltable.getPartitionKeys()) {
sb.append(partitionKey).append(", ");
}
sb.setLength(sb.length() - ", ".length());
sb.append(")");
for (String clusteringColumn : cqltable.getClusteringColumns()) {
sb.append(", ").append(clusteringColumn);
}
return sb.toString();
}
private String genTableClusteringOrderDDL(CqlTable cqltable) {
if (cqltable.getClusteringOrders().size() == 0) {
return "";
} else {
StringBuilder sb = new StringBuilder(" with clustering order by (\n");
for (int i = 0; i < cqltable.getClusteringOrders().size(); i++) {
sb.append(cqltable.getClusteringColumns().get(i));
sb.append(" ");
sb.append(cqltable.getClusteringOrders().get(i));
sb.append(",\n");
}
if (sb.length() > 0) {
sb.setLength(sb.length() - ",\n".length());
}
sb.append(")");
return sb.toString();
}
}
private String genTableColumnDDL(CqlTable cqltable) {
return cqltable.getColumnDefinitions().stream()
.map(cd -> cd.getName() + " " + cd.getTrimmedTypedef())
.collect(Collectors.joining(",\n"));
}
}

View File

@ -16,6 +16,37 @@
package io.nosqlbench.converters.cql.exporters.binders;
public record Binding(String name, String recipe) {
public class Binding {
String name;
String recipe;
public Binding(String name, String recipe) {
this.name = name;
this.recipe = recipe;
}
public Binding withPreFunctions(String... prefixes) {
StringBuilder sb = new StringBuilder();
for (String prefix : prefixes) {
String toAdd=prefix.trim();
if (!toAdd.endsWith(";")) {
toAdd+=";";
}
sb.append(toAdd);
}
sb.append(recipe);
return new Binding(getName(),sb.toString());
}
public String getRecipe() {
return recipe;
}
public String getName() {
return name;
}
public Binding withNameIncrement(long l) {
return new Binding(name+ l,recipe);
}
}

View File

@ -31,34 +31,51 @@ public class BindingsAccumulator {
private final NamingFolio namer;
private final List<BindingsLibrary> libraries;
private final Map<String, String> accumulated = new LinkedHashMap<>();
private final Map<String,String> accumulatedByRecipe = new LinkedHashMap<>();
private final NamingStyle namingStyle = NamingStyle.SymbolicType;
LinkedHashMap<String,Integer> counts = new LinkedHashMap<>();
private final LinkedHashMap<String,Integer> counts = new LinkedHashMap<>();
private long enumeration=0L;
public BindingsAccumulator(NamingFolio namer, List<BindingsLibrary> libraries) {
this.namer = namer;
this.libraries = libraries;
}
public Binding forColumn(CqlColumnDef def, String... extra) {
public Binding forColumn(CqlColumnDef def, String... prefixes) {
return forColumn(def,Map.of(), prefixes);
}
public Binding forColumn(CqlColumnDef def, Map<String,String> extra, String... prefixes) {
for (BindingsLibrary library : libraries) {
Optional<Binding> binding = switch (namingStyle) {
Optional<Binding> optionalBinding = switch (namingStyle) {
case FullyQualified -> this.resolveFullyQualifiedBinding(def, extra);
case SymbolicType -> this.resolveSymbolicBinding(def, extra);
case CondensedKeyspace -> this.resolvedCondensedBinding(def, extra);
};
if (binding.isPresent()) {
registerBinding(binding.get());
return binding.get();
if (optionalBinding.isPresent()) {
Binding binding = optionalBinding.get();
if (prefixes.length>0) {
binding = binding.withPreFunctions(prefixes).withNameIncrement(++enumeration);
String extant = accumulatedByRecipe.get(binding.getRecipe());
if (extant!=null) {
binding= new Binding(extant,accumulated.get(extant));
}
}
registerBinding(binding);
return binding;
}
}
throw new UnresolvedBindingException(def);
}
private Optional<Binding> resolvedCondensedBinding(CqlColumnDef def, String[] extra) {
private Optional<Binding> resolvedCondensedBinding(CqlColumnDef def, Map<String,String> extra) {
throw new RuntimeException("Implement me!");
}
private Optional<Binding> resolveSymbolicBinding(CqlColumnDef def, String[] extra) {
private Optional<Binding> resolveSymbolicBinding(CqlColumnDef def, Map<String,String> extra) {
for (BindingsLibrary library : libraries) {
Optional<Binding> binding = library.resolveBindingsFor(def);
if (binding.isPresent()) {
@ -69,13 +86,13 @@ public class BindingsAccumulator {
}
private Optional<Binding> resolveFullyQualifiedBinding(CqlColumnDef def, String[] extra) {
private Optional<Binding> resolveFullyQualifiedBinding(CqlColumnDef def, Map<String,String> extra) {
for (BindingsLibrary library : libraries) {
Optional<Binding> bindingRecipe = library.resolveBindingsFor(def);
if (bindingRecipe.isPresent()) {
Binding found = bindingRecipe.get();
String name = namer.nameFor(def, extra);
Binding renamedBinding = new Binding(name,found.recipe());
Binding renamedBinding = new Binding(name,found.getRecipe());
return Optional.of(renamedBinding);
}
}
@ -83,8 +100,9 @@ public class BindingsAccumulator {
}
private void registerBinding(Binding newBinding) {
String name = newBinding.name();
accumulated.put(name, newBinding.recipe());
String name = newBinding.getName();
accumulated.put(name, newBinding.getRecipe());
accumulatedByRecipe.put(newBinding.getRecipe(), name);
counts.put(name, counts.get(name)==null? 1 : counts.get(name)+1);
}

View File

@ -42,7 +42,7 @@ public class NamingFolio {
private final Map<String, Labeled> graph = new LinkedHashMap<>();
private final CGElementNamer namer;
public final static String DEFAULT_NAMER_SPEC = "[COLUMN]-[TYPEDEF-][TABLE][-KEYSPACE]";
public final static String DEFAULT_NAMER_SPEC = "[BLOCKNAME-][OPTYPE-][COLUMN]-[TYPEDEF-][TABLE][-KEYSPACE]";
NamingStyle namingStyle = NamingStyle.SymbolicType;
public NamingFolio(String namerspec) {
@ -75,6 +75,12 @@ public class NamingFolio {
return name;
}
public String nameFor(Labeled labeled, Map<String,String> fields) {
Map<String, String> labelsPlus = labeled.getLabelsAnd(fields);
String name = namer.apply(labelsPlus);
return name;
}
public void informNamerOfAllKnownNames(CqlModel model) {
for (CqlTable table : model.getTableDefs()) {

View File

@ -1,8 +1,9 @@
text_transformers:
- class: CGRegexReplacer
config:
replacers:
- /a/b/
# unimplemented
#text_transformers:
# - class: CGRegexReplacer
# config:
# replacers:
# - /a/b/
model_transformers:
@ -30,8 +31,9 @@ model_transformers:
# # Removes Keyspace DDL statements
# - class: CGKeySpaceDDLRemover
# Adds IF NOT EXIST to all DDL
- class: CGIfNotExistsInjector
# This is now a generator behavior that is done automatically
# # Adds IF NOT EXIST to all DDL
# - class: CGIfNotExistsInjector
# Replaces UDTs with blobs until we have full UDT generation capability
- class: CGUdtReplacer
@ -44,15 +46,59 @@ model_transformers:
# Uses nodetool histogram stats to weight reads and writes over all ops
- class: CGRatioCalculator
# This needs support of the AST, without relying on the refddl text images
# In other words, it doesn't work reliably with the substitution methods
# Don't enable it until the next revision
# # replaces names of keyspaces, tables, and columns with generated values
# - class: CGNameObfuscator
# replaces names of keyspaces, tables, and columns with generated values
- class: CGNameObfuscator
naming_template: "[OPTYPE-][COLUMN-][TYPEDEF-][TABLE!]-[KEYSPACE]"
naming_template: "[OPTYPE-][KEYSPACE-][TYPE-][NAME]"
partition_multiplier: 30
# for more distinction in metric names if needed:
#naming_template: "[BLOCKNAME-][OPTYPE-][KEYSPACE-][TYPE-][NAME]"
# how many partitions the primary key is limited to, compared to the value
# found in the node tool stats. If you have the actual value from complete
# statistics, this can be 1.0, but it is generally not accurate to take
# stats from one node. This allows for sizing up those estimates according
# to anecdotal information.
partition_multiplier: 30.0
# Timeouts for each operation category. These are specified in terms of seconds.
# fractional values are allowed. If not specified, all operations will default to
# using 10 seconds. Internally, these are configured as milliseconds.
timeouts:
create: 60.0
truncate: 900.0
drop: 900.0
scan: 30.0
select: 10.0
insert: 10.0
update: 10.0
delete: 10.0
# if this is set, and the total ratio for a table is 0.0D, then it will
# not be added to the table DDL
elide_unused_tables: false
# future use, not active right now
blockplan:
# not needed when tags=block:'schema.*'
# schema: schema-keyspaces, schema-tables, schema-types
schema-keyspaces: schema-keyspaces
schema-tables: schema-tables
schema-types: schema-types
truncate-tables: truncate-tables
drop-types: drop-types
drop-tables: drop-tables
drop-keyspaces: drop-keyspaces
# not needed when tags=block:'drop.*'
# drop: drop-types, drop-tables, drop-keyspaces
rampup: insert
main-insert: insert
main-select: select
main-scan: scan-10
main-update: update
# not needed when tags=block:'main.*'
# main: insert, select, scan-10, update

View File

@ -17,7 +17,7 @@
package io.nosqlbench.converters.cql.exporters.transformers;
import io.nosqlbench.api.config.NBNamedElement;
import io.nosqlbench.converters.cql.cqlast.CqlTable;
import io.nosqlbench.api.labels.Labeled;
import io.nosqlbench.virtdata.library.basics.shared.from_long.to_string.Combinations;
import java.util.HashMap;
@ -37,25 +37,24 @@ public class CGCachingNameRemapper {
this.namefunc = function;
}
public synchronized String nameForType(String type, String originalName) {
String canonical = type+"__"+originalName;
return getOrCreateName(canonical);
public synchronized String nameForType(String type, String originalName, String prefix) {
String canonical = type+"_"+originalName;
return getOrCreateName(canonical, prefix);
}
public synchronized String nameFor(NBNamedElement element) {
String canonical = element.getClass().getSimpleName()+"--"+element.getName();
return getOrCreateName(canonical);
public synchronized String nameFor(NBNamedElement element, String prefix) {
String canonical = element.getClass().getSimpleName()+"-"+element.getName();
return getOrCreateName(canonical, prefix);
}
private String getOrCreateName(String canonical) {
private String getOrCreateName(String canonical, String prefix) {
if (!remapped.containsKey(canonical)) {
String newname = namefunc.apply(index++);
String newname = prefix+namefunc.apply(index++);
remapped.put(canonical,newname);
}
return remapped.get(canonical);
}
public Function<String, String> mapperForType(CqlTable cqlTable) {
return in -> this.nameForType(cqlTable.getClass().getSimpleName(),in);
public Function<String, String> mapperForType(Labeled cqlTable, String prefix) {
return in -> this.nameForType(cqlTable.getClass().getSimpleName(),in, prefix);
}
}

View File

@ -16,37 +16,37 @@
package io.nosqlbench.converters.cql.exporters.transformers;
import io.nosqlbench.converters.cql.cqlast.CqlKeyspace;
import io.nosqlbench.converters.cql.cqlast.CqlModel;
import io.nosqlbench.converters.cql.cqlast.CqlTable;
import io.nosqlbench.converters.cql.cqlast.CqlType;
/**
* @deprecated Superseded by direct rendering from AST in generator
*/
public class CGIfNotExistsInjector implements CGModelTransformer {
@Override
public CqlModel apply(CqlModel model) {
for (CqlKeyspace keyspace : model.getKeyspaceDefs()) {
keyspace.setRefDdl(keyspace.getRefddl().replaceAll(
"(?m)(?s)(?i)(\\s*CREATE (TABLE|KEYSPACE|TYPE) +)(?!IF NOT EXISTS)",
"$1IF NOT EXISTS "
));
}
for (CqlTable table : model.getTableDefs()) {
String refddl = table.getRefDdl();
String replaced = refddl.replaceAll(
"(?m)(?s)(?i)(\\s*CREATE (TABLE|KEYSPACE|TYPE) +)(?!IF NOT EXISTS)",
"$1IF NOT EXISTS "
);
table.setRefDdl(replaced);
}
for (CqlType type : model.getTypes()) {
type.setRefddl(type.getRefDdl().replaceAll(
"(?m)(?s)(?i)(\\s*CREATE (TABLE|KEYSPACE|TYPE) +)(?!IF NOT EXISTS)",
"$1IF NOT EXISTS "
));
}
// for (CqlKeyspace keyspace : model.getKeyspaceDefs()) {
// keyspace.setRefDdl(keyspace.getRefddl().replaceAll(
// "(?m)(?s)(?i)(\\s*CREATE (TABLE|KEYSPACE|TYPE) +)(?!IF NOT EXISTS)",
// "$1IF NOT EXISTS "
// ));
// }
// for (CqlTable table : model.getTableDefs()) {
// String refddl = table.getRefDdl();
// String replaced = refddl.replaceAll(
// "(?m)(?s)(?i)(\\s*CREATE (TABLE|KEYSPACE|TYPE) +)(?!IF NOT EXISTS)",
// "$1IF NOT EXISTS "
// );
//
// table.setRefDdl(replaced);
// }
// for (CqlType type : model.getTypes()) {
// type.setRefddl(type.getRefDdl().replaceAll(
// "(?m)(?s)(?i)(\\s*CREATE (TABLE|KEYSPACE|TYPE) +)(?!IF NOT EXISTS)",
// "$1IF NOT EXISTS "
// ));
//
// }
return model;
}
}

View File

@ -38,19 +38,22 @@ public class CGNameObfuscator implements CGModelTransformer {
public CqlModel apply(CqlModel model) {
for (String keyspaceName : model.getAllKnownKeyspaceNames()) {
String newKeyspaceName = remapper.nameForType("keyspace",keyspaceName);
String newKeyspaceName = remapper.nameForType("keyspace",keyspaceName,"ks_");
model.renamekeyspace(keyspaceName,newKeyspaceName);
}
for (CqlTable cqlTable : model.getTableDefs()) {
String tablename = cqlTable.getName();
String newTableName = remapper.nameFor(cqlTable);
model.renameTable(cqlTable.getKeySpace(), tablename, newTableName);
cqlTable.renameColumns(remapper.mapperForType(cqlTable));
String newTableName = remapper.nameFor(cqlTable,"tbl_");
model.renameTable(cqlTable, newTableName);
cqlTable.renameColumns(remapper.mapperForType(cqlTable, "col_"));
}
for (CqlType type : model.getTypes()) {
String typeName = type.getName();
String newTypeName = remapper.nameFor(type);
String newTypeName = remapper.nameFor(type,"typ_");
model.renameType(type.getKeyspace(),typeName,newTypeName);
type.renameColumns(remapper.mapperForType(type, "typ"));
}

View File

@ -27,7 +27,7 @@ public class CGReplicationSettingInjector implements CGModelTransformer, CGTrans
@Override
public CqlModel apply(CqlModel model) {
for (CqlKeyspace keyspace : model.getKeyspaceDefs()) {
keyspace.setRefReplDdl(this.replicationFields);
keyspace.setReplicationData(this.replicationFields);
}
return model;
}

View File

@ -29,14 +29,10 @@ public class CGUdtReplacer implements CGModelTransformer {
List<String> toReplace = model.getTypes().stream().map(t -> t.getKeyspace() + "." + t.getName()).toList();
for (CqlTable table : model.getTableDefs()) {
for (CqlColumnDef coldef : table.getColumnDefinitions()) {
String coldefDdl = coldef.getDefinitionDdl();
String typedef = coldef.getTrimmedTypedef();
for (String searchFor : toReplace) {
if (coldefDdl.contains(searchFor)) {
String typedef = coldef.getType();
coldef.setType("blob");
String replaced = coldef.getDefinitionDdl().replace(typedef, "blob");
coldef.setDefinitionRefDdl(replaced);
table.setRefDdl(table.getRefDdl().replace(typedef,"blob"));
if (typedef.contains(searchFor)) {
coldef.setTypeDef("blob");
}
}
}

View File

@ -131,6 +131,97 @@ names for the classic form have not changed.
gremlin: >-
g.V().hasLabel("device").has("deviceid", UUID.fromString('{deviceid})')
## CQL Op Template - Optional Fields
If any of these are provided as op template fields or as op params, or
as activity params, then they will have the described effect. The calls
to set these parameters on an individual statement are only incurred if
they are provided. Otherwise, defaults are used. These options can be
applied to any of the statement forms above.
```yaml
params:
# Set the consistency level for this statement
# For Astra, use only LOCAL_QUORUM
# Otherwise, one of
# ALL|EACH_QUORUM|QUORUM|LOCAL_QUORUM|ONE|TWO|THREE|LOCAL_ONE|ANY
cl: LOCAL_QUORUM
# or consistency_level: ...
# Set the serial consistency level for this statement.
# Note, you probably don't need this unless you are using LWTs
# SERIAL ~ QUORUM, LOCAL_SERIAL ~ LOCAL_QUORUM
scl: LOCAL_SERIAL
# or serial_consistency_level: ...
# Set a statement as idempotent. This is important for determining
# when ops can be trivially retried with no concern for unexpected
# mutation in the event that it succeeds multiple times.
# true or false
idempotent: false
# Set the timeout for the operation, from the driver's perspective,
# in seconds. "2 seconds" is the default, but DDL statements, truncate or drop
# statements will generally need more. If you want millisconds, just use
# fractional seconds, like 0.500
timeout: 2.0
## The following options are meant for advanced testing scenarios only,
## and are not generally meant to be used in typical application-level,
## data mode, performance or scale testing. These expose properties
## which should not be set for general use. These allow for very specific
## scenarios to be constructed for core system-level testing.
## Some of them will only work with specially provided bindings which
## can provide the correct instantiated object type.
# replace the payload with a map of String->ByteBuffer for this operation
# type: Map<String, ByteBuffer>
custom_payload: ...
# set an instantiated ExecutionProfile to be used for this operation
# type: com.datastax.oss.driver.api.core.config.DriverExecutionProfile
execution_profile: ...
# set a named execution profile to be used for this operation
# type: String
execution_profile_name: ...
# set a resolved target node to be used for this operation
# type: com.datastax.oss.driver.api.core.metadata.Node
node: ...
# set the timestamp to be used as the "now" reference for this operation
# type: int
now_in_seconds: ...
# set the page size for this operation
# type: int
page_size: ...
# set the query timestamp for this operation (~ USING TIMESTAMP)
# type: long
query_timestamp:
# set the routing key for this operation, as a single bytebuffer
# type: ByteArray
routing_key: ...
# set the routing key for this operation as an array of bytebuffers
# type: ByteArray[]
routing_keys: ...
# set the routing token for this operation
# type: com.datastax.oss.driver.api.core.metadata.token.Token
routing_token: ...
# enable (or disable) tracing for this operation
# This should be used with great care, as tracing imposed overhead
# far and above most point queries or writes. Use it sparsely or only
# for functional investigation
# type: boolean
tracing: ...
```
## Driver Cache
@ -140,6 +231,8 @@ simply set a `space` parameter in your op templates, with a dynamic value.
__WARNING__: If you use the driver cache feature, be aware that creating a large
number of driver instances will be very expensive. Generally driver instances are meant
to be initialized and then shared throughout the life-cycle of an application process.
Thus, if you are doing multi-instance driver testing, it is best to use bindings
functions for the `space` parameter which have bounded cardinality per host.

View File

@ -0,0 +1,33 @@
/*
* Copyright (c) 2022 nosqlbench
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.nosqlbench.converters.cql.exporters;
import org.junit.jupiter.api.Test;
import static org.assertj.core.api.Assertions.assertThat;
public class CGWorkloadExporterTest {
@Test
public void testQuantizer() {
assertThat(CGWorkloadExporter.quantizeModuloByMagnitude(23L,1)).isEqualTo(20L);
assertThat(CGWorkloadExporter.quantizeModuloByMagnitude(234234343L,2)).isEqualTo(230000000L);
assertThat(CGWorkloadExporter.quantizeModuloByMagnitude(275234343L,3)).isEqualTo(275000000L);
assertThat(CGWorkloadExporter.quantizeModuloByMagnitude(275734343L,3)).isEqualTo(276000000L);
}
}

View File

@ -25,10 +25,15 @@ public interface Labeled {
default Map<String, String> getLabelsAnd(String... keyvalues) {
LinkedHashMap<String, String> map = new LinkedHashMap<>(getLabels());
for (int idx = 0; idx < keyvalues.length; idx+=2) {
map.put(keyvalues[0],keyvalues[1]);
map.put(keyvalues[idx],keyvalues[idx+1]);
}
return map;
}
default Map<String, String> getLabelsAnd(Map<String,String> extra) {
LinkedHashMap<String,String> map = new LinkedHashMap<>(getLabels());
map.putAll(extra);
return map;
}
static MapLabels forMap(Map<String,String> labels) {
return new MapLabels(labels);

View File

@ -78,7 +78,7 @@
</includes>
</fileset>
</filesets>
<followSymlinks>false</followSymlinks>
<followSymLinks>false</followSymLinks>
</configuration>
</plugin>