diff --git a/adapter-cqld4/src/main/java/io/nosqlbench/adapter/cqld4/opdispensers/BaseCqlStmtDispenser.java b/adapter-cqld4/src/main/java/io/nosqlbench/adapter/cqld4/opdispensers/BaseCqlStmtDispenser.java index 43c057b91..4b8c72ad0 100644 --- a/adapter-cqld4/src/main/java/io/nosqlbench/adapter/cqld4/opdispensers/BaseCqlStmtDispenser.java +++ b/adapter-cqld4/src/main/java/io/nosqlbench/adapter/cqld4/opdispensers/BaseCqlStmtDispenser.java @@ -18,13 +18,19 @@ package io.nosqlbench.adapter.cqld4.opdispensers; import com.datastax.oss.driver.api.core.CqlSession; import com.datastax.oss.driver.api.core.DefaultConsistencyLevel; +import com.datastax.oss.driver.api.core.config.DriverExecutionProfile; import com.datastax.oss.driver.api.core.cql.Statement; +import com.datastax.oss.driver.api.core.metadata.Node; +import com.datastax.oss.driver.api.core.metadata.token.Token; import io.nosqlbench.adapter.cqld4.Cqld4OpMetrics; import io.nosqlbench.adapter.cqld4.optypes.Cqld4CqlOp; import io.nosqlbench.engine.api.activityimpl.BaseOpDispenser; import io.nosqlbench.engine.api.activityimpl.uniform.DriverAdapter; import io.nosqlbench.engine.api.templating.ParsedOp; +import java.nio.ByteBuffer; +import java.time.Duration; +import java.util.Map; import java.util.function.LongFunction; public abstract class BaseCqlStmtDispenser extends BaseOpDispenser { @@ -67,8 +73,23 @@ public abstract class BaseCqlStmtDispenser extends BaseOpDispenser { protected LongFunction getEnhancedStmtFunc(LongFunction basefunc, ParsedOp op) { LongFunction partial = basefunc; partial = op.enhanceEnum(partial, "cl", DefaultConsistencyLevel.class, Statement::setConsistencyLevel); + partial = op.enhanceEnum(partial, "consistency_level", DefaultConsistencyLevel.class, Statement::setConsistencyLevel); partial = op.enhanceEnum(partial, "scl", DefaultConsistencyLevel.class, Statement::setSerialConsistencyLevel); + partial = op.enhanceEnum(partial, "serial_consistency_level", DefaultConsistencyLevel.class, Statement::setSerialConsistencyLevel); partial = op.enhanceFuncOptionally(partial, "idempotent", Boolean.class, Statement::setIdempotent); + partial = op.enhanceFuncOptionally(partial, "timeout", double.class, (statement, l) -> statement.setTimeout(Duration.ofMillis((long)(l*1000L)))); + partial = op.enhanceFuncOptionally(partial, "custom_payload", Map.class, Statement::setCustomPayload); + partial = op.enhanceFuncOptionally(partial, "execution_profile", DriverExecutionProfile.class, Statement::setExecutionProfile); + partial = op.enhanceFuncOptionally(partial, "execution_profile_name", String.class, Statement::setExecutionProfileName); + partial = op.enhanceFuncOptionally(partial, "node", Node.class, Statement::setNode); + partial = op.enhanceFuncOptionally(partial, "now_in_seconds", int.class, Statement::setNowInSeconds); + partial = op.enhanceFuncOptionally(partial, "page_size", int.class, Statement::setPageSize); + partial = op.enhanceFuncOptionally(partial, "query_timestamp", long.class, Statement::setQueryTimestamp); + partial = op.enhanceFuncOptionally(partial, "routing_key", ByteBuffer.class, Statement::setRoutingKey); + partial = op.enhanceFuncOptionally(partial, "routing_keys", ByteBuffer[].class, Statement::setRoutingKey); + partial = op.enhanceFuncOptionally(partial, "routing_token", Token.class, Statement::setRoutingToken); + partial = op.enhanceFuncOptionally(partial, "tracing", boolean.class, Statement::setTracing); + return partial; } diff --git a/adapter-cqld4/src/main/java/io/nosqlbench/converters/cql/cqlast/CqlColumnDef.java b/adapter-cqld4/src/main/java/io/nosqlbench/converters/cql/cqlast/CqlColumnDef.java index 700289571..9158b7420 100644 --- a/adapter-cqld4/src/main/java/io/nosqlbench/converters/cql/cqlast/CqlColumnDef.java +++ b/adapter-cqld4/src/main/java/io/nosqlbench/converters/cql/cqlast/CqlColumnDef.java @@ -22,33 +22,20 @@ import io.nosqlbench.api.labels.Labeled; import java.util.Map; public class CqlColumnDef implements NBNamedElement, Labeled { - private String refDefinitionDdl; - private String refTypeDefddl; - private String table; + private CqlTable table; private String keyspace; private String name; private String type; private int position; private ColType coltype; - public CqlColumnDef(String colname, String typedef, String refColumnDdl) { + public CqlColumnDef(CqlTable table, int position, String colname, String typedef) { + this.table = table; this.type = typedef; this.name = colname; - this.refDefinitionDdl = refColumnDdl; } - public String getDefinitionDdl() { - return refDefinitionDdl; - } - - public void setTypedfRefDdl(String textOfTypeDefOnly) { - this.refTypeDefddl = textOfTypeDefOnly; - } - public void setDefinitionRefDdl(String textOfWholeDefinition) { - this.refDefinitionDdl = textOfWholeDefinition; - } - - public void setType(String type) { + public void setTypeDef(String type) { this.type = type; } @@ -64,8 +51,8 @@ public class CqlColumnDef implements NBNamedElement, Labeled { return type.replaceAll(" ",""); } - public String getTable() { - return table; + public String getTableName() { + return table.getName(); } public String getKeyspace() { @@ -80,10 +67,11 @@ public class CqlColumnDef implements NBNamedElement, Labeled { @Override public Map getLabels() { return Map.of( - "column", name, + "name", name, "typedef", type, - "table", table, - "keyspace", keyspace + "table", table.getName(), + "keyspace", keyspace, + "type", "column" ); } @@ -91,7 +79,7 @@ public class CqlColumnDef implements NBNamedElement, Labeled { this.keyspace = keyspace; } - public void setTable(String table) { + public void setTable(CqlTable table) { this.table = table; } @@ -102,4 +90,21 @@ public class CqlColumnDef implements NBNamedElement, Labeled { public void setName(String name) { this.name = name; } + + public String getSyntax() { + return getName()+" "+getTrimmedTypedef(); + } + + public boolean isPartitionKey() { + return table.isPartitionKey(position); + } + public boolean isLastPartitionKey() { + return table.isLastPartitionKey(position); + } + public boolean isClusteringColumn() { + return table.isClusteringColumn(position); + } + public boolean isLastClusteringColumn() { + return table.isLastClusteringColumn(position); + } } diff --git a/adapter-cqld4/src/main/java/io/nosqlbench/converters/cql/cqlast/CqlKeyspace.java b/adapter-cqld4/src/main/java/io/nosqlbench/converters/cql/cqlast/CqlKeyspace.java index d10ebea82..01f107367 100644 --- a/adapter-cqld4/src/main/java/io/nosqlbench/converters/cql/cqlast/CqlKeyspace.java +++ b/adapter-cqld4/src/main/java/io/nosqlbench/converters/cql/cqlast/CqlKeyspace.java @@ -24,17 +24,14 @@ import java.util.Map; public class CqlKeyspace implements NBNamedElement, Labeled { String keyspaceName= ""; - String refddl; - private String refReplDdl; CGKeyspaceStats stats; + private boolean isDurableWrites; + private String replicationData; public CqlKeyspace() { } public void setKeyspaceName(String newname) { - if (this.refddl!=null) { - this.refddl = refddl.replaceAll(this.keyspaceName, newname); - } this.keyspaceName=newname; } @@ -42,46 +39,42 @@ public class CqlKeyspace implements NBNamedElement, Labeled { return this.keyspaceName; } - public void setRefDdl(String refddl) { - this.refddl = refddl; - } @Override public String toString() { return "CqlKeyspace{" + "keyspaceName='" + keyspaceName + '\'' + - ", refddl='" + refddl + '\'' + + ", stats=" + stats + + ", isDurableWrites=" + isDurableWrites + + ", replicationData='" + replicationData + '\'' + '}'; } - public String getRefddl() { - return refddl; - } - @Override public Map getLabels() { return Map.of( - "keyspace", keyspaceName + "name", keyspaceName, + "type","keyspace" ); } - public void setRefReplDdl(String newRefReplDdl) { - if (this.refddl!=null) { - this.refddl=this.refddl.replaceAll(this.refReplDdl,newRefReplDdl); - } - this.refReplDdl=newRefReplDdl; - } - - public String getRefDdlWithReplFields(String replFields) { - refddl.replace(refReplDdl,replFields); - return refddl; - } - - public String getReplRefDdl() { - return this.refReplDdl; - } - public void setStats(CGKeyspaceStats ksstats) { this.stats=ksstats; } + + public boolean isDurableWrites() { + return isDurableWrites; + } + + public void setDurableWrites(boolean isDurableWrites) { + this.isDurableWrites = isDurableWrites; + } + + public void setReplicationData(String repldata) { + this.replicationData = repldata; + } + + public String getReplicationData() { + return this.replicationData; + } } diff --git a/adapter-cqld4/src/main/java/io/nosqlbench/converters/cql/cqlast/CqlModel.java b/adapter-cqld4/src/main/java/io/nosqlbench/converters/cql/cqlast/CqlModel.java index 9723b797c..1b4d9d136 100644 --- a/adapter-cqld4/src/main/java/io/nosqlbench/converters/cql/cqlast/CqlModel.java +++ b/adapter-cqld4/src/main/java/io/nosqlbench/converters/cql/cqlast/CqlModel.java @@ -77,22 +77,6 @@ public class CqlModel { } } } -// schemaStats.getKeyspaces().forEach((ksname, ksstats) -> { -// CqlKeyspace modelKs = getKeyspacesByName().get(ksname); -// if (modelKs!=null) { -// modelKs.setStats(ksstats); -// ksstats.getKeyspaceTables().forEach((tbname, tbstats) -> { -// Map tabledefs = tableDefs.get(ksname); -// if (tabledefs!=null) { -// for (CqlTable tabledef : tabledefs.values()) { -// tabledef.setTableAttributes(tbstats); -// } -// } -// }); -// -// } -// }); - } transient CqlKeyspace keyspace = null; @@ -117,7 +101,6 @@ public class CqlModel { public void saveKeyspace(String text,String refddl) { keyspace.setKeyspaceName(text); - keyspace.setRefDdl(refddl); this.keyspaceDefs.put(text, keyspace); keyspace=null; } @@ -126,16 +109,15 @@ public class CqlModel { table = new CqlTable(); } - public void saveTable(String keyspace, String text, String refddl) { + public void saveTable(String keyspace, String text) { table.setKeyspace(keyspace); table.setName(text); - table.setRefDdl(refddl); this.tableDefs.computeIfAbsent(keyspace, ks->new LinkedHashMap<>()).put(text, table); table = null; } - public void saveColumnDefinition(String colname, String coltype, boolean isPrimaryKey, String refddl) { - this.table.addcolumnDef(colname, coltype, refddl); + public void saveColumnDefinition(String colname, String typedef, boolean isPrimaryKey, int position) { + this.table.addcolumnDef(colname, typedef, position); if (isPrimaryKey) { this.table.addPartitionKey(colname); } @@ -202,21 +184,16 @@ public class CqlModel { table.addClusteringColumn(ccolumn); } - public void setReplicationText(String repldata) { - keyspace.setRefReplDdl(repldata); - } - public void newType() { udt = new CqlType(); } - public void addTypeField(String name, String typedef, String typedefRefDdl) { - udt.addField(name, typedef, typedefRefDdl); + public void addTypeField(String name, String typedef) { + udt.addField(name, typedef); } - public void saveType(String keyspace, String name, String refddl) { + public void saveType(String keyspace, String name) { udt.setKeyspace(keyspace); - udt.setRefddl(refddl); udt.setName(name); Map ksTypes = this.types.computeIfAbsent(keyspace, ks -> new LinkedHashMap<>()); ksTypes.put(udt.getName(),udt); @@ -277,11 +254,11 @@ public class CqlModel { } } - public void renameTable(String keyspaceName, String tableName, String newTableName) { - Map tablesInKeyspace = tableDefs.get(keyspaceName); - CqlTable table = tablesInKeyspace.remove(tableName); + public void renameTable(CqlTable extant, String newTableName) { + Map tablesInKs = tableDefs.get(extant.getKeySpace()); + CqlTable table = tablesInKs.get(extant.getName()); table.setName(newTableName); - tablesInKeyspace.put(newTableName,table); + tablesInKs.put(table.getName(),table); } public void renameType(String keyspaceName, String typeName, String newTypeName) { @@ -290,4 +267,24 @@ public class CqlModel { cqlType.setName(newTypeName); typesInKeyspace.put(newTypeName,cqlType); } + + public void setTableCompactStorage(boolean isCompactStorage) { + table.setCompactStorage(isCompactStorage); + } + + public void setKeyspaceDurableWrites(String booleanLiteral) { + keyspace.setDurableWrites(Boolean.parseBoolean(booleanLiteral)); + } + + public void setReplicationData(String repldata) { + keyspace.setReplicationData(repldata); + } + + public Map> getTypesByKeyspaceAndName() { + return types; + } + + public void addClusteringOrder(String colname, String order) { + table.addTableClusteringOrder(colname, order); + } } diff --git a/adapter-cqld4/src/main/java/io/nosqlbench/converters/cql/cqlast/CqlModelBuilder.java b/adapter-cqld4/src/main/java/io/nosqlbench/converters/cql/cqlast/CqlModelBuilder.java index c0513c0f3..ad23b275b 100644 --- a/adapter-cqld4/src/main/java/io/nosqlbench/converters/cql/cqlast/CqlModelBuilder.java +++ b/adapter-cqld4/src/main/java/io/nosqlbench/converters/cql/cqlast/CqlModelBuilder.java @@ -26,6 +26,7 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import java.util.List; +import java.util.stream.IntStream; public class CqlModelBuilder extends CqlParserBaseListener { private final static Logger logger = LogManager.getLogger(CqlModelBuilder.class); @@ -33,6 +34,7 @@ public class CqlModelBuilder extends CqlParserBaseListener { private final CGErrorListener errorListener; private final CqlModel model; private long counted; + private int colindex; public CqlModelBuilder(CGErrorListener errorListener) { this.errorListener = errorListener; @@ -41,7 +43,7 @@ public class CqlModelBuilder extends CqlParserBaseListener { @Override public void exitEveryRule(ParserRuleContext ctx) { - if ((counted++&0b11111111111111)==0b10000000000000) { + if ((counted++ & 0b11111111111111) == 0b10000000000000) { logger.trace("parsed " + counted + " elements..."); } } @@ -51,8 +53,8 @@ public class CqlModelBuilder extends CqlParserBaseListener { System.out.println("error parsing: " + node.toString()); ParseTree parent = node.getParent(); String errorNodeType = parent.getClass().getSimpleName(); -// System.out.println("error type: " + errorNodeType); -// System.out.println("source interval: " + node.getSourceInterval()); + + logger.info("PARSE ERROR: " + errorNodeType + "\n"+ node.getSourceInterval()); super.visitErrorNode(node); } @@ -73,7 +75,7 @@ public class CqlModelBuilder extends CqlParserBaseListener { @Override public void exitReplicationList(CqlParser.ReplicationListContext ctx) { String repldata = textOf(ctx); - model.setReplicationText(repldata); + model.setReplicationData(repldata); } @Override @@ -102,7 +104,7 @@ public class CqlModelBuilder extends CqlParserBaseListener { } } } else if (ctx.compoundKey() != null) { - model.addClusteringColumn(ctx.compoundKey().partitionKey().column().getText()); + model.addPartitionKey(ctx.compoundKey().partitionKey().getText()); for (CqlParser.ClusteringKeyContext ccol : ctx.compoundKey().clusteringKeyList().clusteringKey()) { model.addClusteringColumn(ccol.column().getText()); } @@ -119,8 +121,7 @@ public class CqlModelBuilder extends CqlParserBaseListener { public void exitCreateType(CqlParser.CreateTypeContext ctx) { String keyspace = ctx.keyspace().getText(); String name = ctx.type_().getText(); - String refddl = textOf(ctx); - model.saveType(keyspace, name, refddl); + model.saveType(keyspace, name); } @@ -132,8 +133,7 @@ public class CqlModelBuilder extends CqlParserBaseListener { for (int idx = 0; idx < columns.size(); idx++) { model.addTypeField( columns.get(idx).getText(), - dataTypes.get(idx).getText(), - textOf(dataTypes.get(idx)) + dataTypes.get(idx).getText() ); } @@ -149,11 +149,51 @@ public class CqlModelBuilder extends CqlParserBaseListener { public void exitCreateTable(CqlParser.CreateTableContext ctx) { model.saveTable( ctx.keyspace().getText(), - ctx.table().getText(), - textOf(ctx) + ctx.table().getText() ); } + @Override + public void exitOrderDirection(CqlParser.OrderDirectionContext ctx) { + } + + @Override + public void exitTableOptionItem(CqlParser.TableOptionItemContext ctx) { + if (ctx.kwCompactStorage()!=null) { + model.setTableCompactStorage(true); + } + super.exitTableOptionItem(ctx); + } + + @Override + public void exitDurableWrites(CqlParser.DurableWritesContext ctx) { + model.setKeyspaceDurableWrites(ctx.booleanLiteral().getText()); + + + } + + @Override + public void exitClusteringOrder(CqlParser.ClusteringOrderContext ctx) { + + List columns = ctx.children.stream() + .filter(c -> c instanceof CqlParser.ColumnContext) + .map(c -> c.getText()) + .toList(); + + List orders = ctx.children.stream() + .filter(c -> c instanceof CqlParser.OrderDirectionContext) + .map(c -> c.getText()) + .toList(); + + IntStream.range(0, columns.size()) + .forEach(i -> model.addClusteringOrder(columns.get(i), orders.get(i))); + } + +// @Override +// public void exitColumn(CqlParser.ColumnContext ctx) { +// super.exitColumn(ctx); +// } + private String textOf(ParserRuleContext ctx) { int startIndex = ctx.start.getStartIndex(); int stopIndex = ctx.stop.getStopIndex(); @@ -166,13 +206,18 @@ public class CqlModelBuilder extends CqlParserBaseListener { public void enterColumnDefinition(CqlParser.ColumnDefinitionContext ctx) { } + @Override + public void enterColumnDefinitionList(CqlParser.ColumnDefinitionListContext ctx) { + this.colindex = 0; + } + @Override public void exitColumnDefinition(CqlParser.ColumnDefinitionContext ctx) { model.saveColumnDefinition( ctx.column().getText(), textOf(ctx.dataType()), ctx.primaryKeyColumn() != null, - textOf(ctx) + colindex++ ); } diff --git a/adapter-cqld4/src/main/java/io/nosqlbench/converters/cql/cqlast/CqlTable.java b/adapter-cqld4/src/main/java/io/nosqlbench/converters/cql/cqlast/CqlTable.java index 98f4a7c17..2b313697c 100644 --- a/adapter-cqld4/src/main/java/io/nosqlbench/converters/cql/cqlast/CqlTable.java +++ b/adapter-cqld4/src/main/java/io/nosqlbench/converters/cql/cqlast/CqlTable.java @@ -20,25 +20,27 @@ import io.nosqlbench.api.config.NBNamedElement; import io.nosqlbench.api.labels.Labeled; import io.nosqlbench.converters.cql.exporters.CGTableStats; -import java.util.ArrayList; -import java.util.List; -import java.util.Map; -import java.util.Optional; +import java.util.*; import java.util.function.Function; import java.util.stream.Collectors; public class CqlTable implements NBNamedElement, Labeled { String name = ""; String keyspace = ""; - List coldefs = new ArrayList<>(); CGTableStats tableAttributes = null; - List partitionKeys = new ArrayList<>(); - List clusteringColumns = new ArrayList<>(); - private String refddl; + int[] partitioning = new int[0]; + int[] clustering = new int[0]; + List clusteringOrders = new ArrayList<>(); + List coldefs = new ArrayList<>(); + private boolean compactStorage; public CqlTable() { } + public boolean isCompactStorage() { + return compactStorage; + } + public CGTableStats getTableAttributes() { return tableAttributes; } @@ -53,13 +55,10 @@ public class CqlTable implements NBNamedElement, Labeled { public void setName(String tableName) { this.name = tableName; - for (CqlColumnDef coldef : coldefs) { - coldef.setTable(tableName); - } } - public void addcolumnDef(String colname, String typedef, String refColumnDdl) { - coldefs.add(new CqlColumnDef(colname, typedef, refColumnDdl)); + public void addcolumnDef(String colname, String typedef, int position) { + coldefs.add(new CqlColumnDef(this, coldefs.size(), colname, typedef)); } @Override @@ -81,27 +80,12 @@ public class CqlTable implements NBNamedElement, Labeled { public void setKeyspace(String newKsName) { for (CqlColumnDef coldef : coldefs) { - coldef.setKeyspace(keyspace); - if (coldef.getDefinitionDdl()!=null) { - coldef.setDefinitionRefDdl(coldef.getDefinitionDdl().replaceAll(keyspace,newKsName)); - } - } - if (this.refddl!=null) { - this.refddl = this.refddl.replaceAll(this.keyspace,newKsName); + coldef.setKeyspace(newKsName); } this.keyspace = newKsName; } - public String getRefDdl() { - return this.refddl; - } - - public void setRefDdl(String refddl) { - this.refddl = refddl; - } - - public String getKeySpace() { return this.keyspace; } @@ -110,24 +94,50 @@ public class CqlTable implements NBNamedElement, Labeled { public Map getLabels() { return Map.of( "keyspace", this.keyspace, - "table", this.name + "name", this.name, + "type", "table" ); } public void addPartitionKey(String pkey) { - this.partitionKeys.add(pkey); + int[] newdefs = new int[partitioning.length + 1]; + System.arraycopy(partitioning, 0, newdefs, 0, partitioning.length); + for (int i = 0; i < coldefs.size(); i++) { + if (coldefs.get(i).getName().equals(pkey)) { + newdefs[newdefs.length - 1] = i; + break; + } + } + this.partitioning = newdefs; } public void addClusteringColumn(String ccol) { - this.clusteringColumns.add(ccol); + int[] newdefs = new int[clustering.length + 1]; + System.arraycopy(clustering, 0, newdefs, 0, clustering.length); + for (int i = 0; i < coldefs.size(); i++) { + if (coldefs.get(i).getName().equals(ccol)) { + newdefs[newdefs.length - 1] = i; + break; + } + } + this.clustering = newdefs; } + public void addTableClusteringOrder(String colname, String order) { + clusteringOrders.add(order); + } + + public List getClusteringOrders() { + return clusteringOrders; + } + + public List getPartitionKeys() { - return this.partitionKeys; + return Arrays.stream(partitioning).mapToObj(i -> this.coldefs.get(i).getName()).toList(); } public List getClusteringColumns() { - return this.clusteringColumns; + return Arrays.stream(clustering).mapToObj(i -> this.coldefs.get(i).getName()).toList(); } public CqlColumnDef getColumnDefForName(String colname) { @@ -142,18 +152,43 @@ public class CqlTable implements NBNamedElement, Labeled { return def.orElseThrow(); } - public void renameColumns(Function renamer) { + public void renameColumns(Function renamer) { for (CqlColumnDef coldef : coldefs) { coldef.setName(renamer.apply(coldef.getName())); } - } public List getNonKeyColumnDefinitions() { - return coldefs.stream() - .filter(n -> !partitionKeys.contains(n.getName())) - .filter(n -> !clusteringColumns.contains(n.getName())) - .toList(); + int last = partitioning[partitioning.length - 1]; + last = (clustering.length > 0 ? clustering[clustering.length - 1] : last); + List nonkeys = new ArrayList<>(); + for (int nonkey = last; nonkey < coldefs.size(); nonkey++) { + nonkeys.add(coldefs.get(nonkey)); + } + return nonkeys; } + public void setCompactStorage(boolean isCompactStorage) { + this.compactStorage = isCompactStorage; + } + + public String getFullName() { + return (this.keyspace != null ? this.keyspace + "." : "") + this.name; + } + + public boolean isPartitionKey(int position) { + return position < partitioning.length; + } + + public boolean isLastPartitionKey(int position) { + return position == partitioning.length - 1; + } + + public boolean isClusteringColumn(int position) { + return clustering.length > 0 && position < clustering[clustering.length - 1] && position >= clustering[0]; + } + + public boolean isLastClusteringColumn(int position) { + return clustering.length > 0 && position == clustering[clustering.length - 1]; + } } diff --git a/adapter-cqld4/src/main/java/io/nosqlbench/converters/cql/cqlast/CqlType.java b/adapter-cqld4/src/main/java/io/nosqlbench/converters/cql/cqlast/CqlType.java index b804fc744..3822168b3 100644 --- a/adapter-cqld4/src/main/java/io/nosqlbench/converters/cql/cqlast/CqlType.java +++ b/adapter-cqld4/src/main/java/io/nosqlbench/converters/cql/cqlast/CqlType.java @@ -17,15 +17,17 @@ package io.nosqlbench.converters.cql.cqlast; import io.nosqlbench.api.config.NBNamedElement; +import io.nosqlbench.api.labels.Labeled; import java.util.LinkedHashMap; import java.util.Map; +import java.util.function.Function; -public class CqlType implements NBNamedElement { +public class CqlType implements NBNamedElement, Labeled { private String keyspace; private String name; private String refddl; - private final Map fields = new LinkedHashMap<>(); + private Map fields = new LinkedHashMap<>(); public void setKeyspace(String newksname) { this.keyspace = newksname; @@ -37,10 +39,6 @@ public class CqlType implements NBNamedElement { this.name = name; } - public void setRefddl(String ddl) { - this.refddl = ddl; - } - public String getKeyspace() { return keyspace; } @@ -49,7 +47,7 @@ public class CqlType implements NBNamedElement { return this.name; } - public void addField(String name, String typedef, String typedefRefDdl) { + public void addField(String name, String typedef) { this.fields.put(name, typedef); } @@ -57,7 +55,18 @@ public class CqlType implements NBNamedElement { return fields; } - public String getRefDdl() { - return this.refddl; + @Override + public Map getLabels() { + return Map.of( + "keyspace", this.keyspace, + "type","udt", + "name",name + ); + } + + public void renameColumns(Function renamer) { + Map newColumns = new LinkedHashMap<>(); + fields.forEach((k,v)->newColumns.put(renamer.apply(k),v)); + this.fields = newColumns; } } diff --git a/adapter-cqld4/src/main/java/io/nosqlbench/converters/cql/exporters/CGWorkloadExporter.java b/adapter-cqld4/src/main/java/io/nosqlbench/converters/cql/exporters/CGWorkloadExporter.java index ee7119eeb..7434f2de4 100644 --- a/adapter-cqld4/src/main/java/io/nosqlbench/converters/cql/exporters/CGWorkloadExporter.java +++ b/adapter-cqld4/src/main/java/io/nosqlbench/converters/cql/exporters/CGWorkloadExporter.java @@ -18,10 +18,7 @@ package io.nosqlbench.converters.cql.exporters; import com.google.gson.Gson; import com.google.gson.GsonBuilder; -import io.nosqlbench.converters.cql.cqlast.CqlColumnDef; -import io.nosqlbench.converters.cql.cqlast.CqlKeyspace; -import io.nosqlbench.converters.cql.cqlast.CqlModel; -import io.nosqlbench.converters.cql.cqlast.CqlTable; +import io.nosqlbench.converters.cql.cqlast.*; import io.nosqlbench.converters.cql.exporters.binders.*; import io.nosqlbench.converters.cql.exporters.transformers.CGTransformersInit; import io.nosqlbench.converters.cql.parser.CqlModelParser; @@ -41,6 +38,7 @@ import java.nio.file.Path; import java.nio.file.StandardOpenOption; import java.util.*; import java.util.function.Function; +import java.util.stream.Collectors; /** * The unit of generation is simply everything that is provided to the exporter together. @@ -57,13 +55,25 @@ public class CGWorkloadExporter { private NamingFolio namer; private BindingsAccumulator bindings = new BindingsAccumulator(namer, List.of(defaultBindings)); - private CqlModel model; + private final CqlModel model; private final Map bindingsMap = new LinkedHashMap<>(); private final int DEFAULT_RESOLUTION = 10000; String replication; String namingTemplate; private List includedKeyspaces; private double partitionMultiplier; + private final Map timeouts = new HashMap(Map.of( + "create", 60.0, + "truncate", 900.0, + "drop", 900.0, + "scan", 30.0, + "select", 10.0, + "insert", 10.0, + "delete", 10.0, + "update", 10.0 + )); + private boolean elideUnusedTables; + private Map> blockplan = Map.of(); public CGWorkloadExporter(CqlModel model, CGTransformersInit transformers) { this.model = model; @@ -137,6 +147,11 @@ public class CGWorkloadExporter { String partition_multipler = cfgmap.get("partition_multiplier").toString(); exporter.setPartitionMultiplier(Double.parseDouble(partition_multipler)); + exporter.configureTimeouts(cfgmap.get("timeouts")); + exporter.configureElideUnusedTables(cfgmap.get("elide_unused_tables")); + + exporter.configureBlocks(cfgmap.get("blockplan")); + String workload = exporter.getWorkloadAsYaml(); try { Files.writeString( @@ -155,21 +170,7 @@ public class CGWorkloadExporter { } } - private void setPartitionMultiplier(double multipler) { - this.partitionMultiplier = multipler; - } - - private void setIncludedKeyspaces(List includeKeyspaces) { - this.includedKeyspaces = includeKeyspaces; - } - - public void setNamingTemplate(String namingTemplate) { - this.namingTemplate = namingTemplate; - this.namer = new NamingFolio(namingTemplate); - this.bindings = new BindingsAccumulator(namer, List.of(defaultBindings)); - } - - public Map getWorkload() { + public Map generateBlocks() { namer.informNamerOfAllKnownNames(model); Map workload = new LinkedHashMap<>(); @@ -178,93 +179,309 @@ public class CGWorkloadExporter { workload.put("bindings", bindingsMap); Map blocks = new LinkedHashMap<>(); workload.put("blocks", blocks); - blocks.put("schema-keyspaces", genKeyspacesSchemaBlock(model)); - blocks.put("schema-tables", genTablesSchemaBlock(model)); - blocks.put("schema-types", genTypesSchemaBlock(model)); - blocks.put("truncate", genTruncateBlock(model)); - blocks.put("rampup", genRampupBlock(model)); - blocks.put("main", genMainBlock(model)); + + for (Map.Entry> blocknameAndComponents : blockplan.entrySet()) { + String blockname = blocknameAndComponents.getKey(); + List components = blocknameAndComponents.getValue(); + + LinkedHashMap block = new LinkedHashMap<>( + Map.of("params", new LinkedHashMap()) + ); + for (String component : components) { + Map additions = switch (component) { + case "schema-keyspaces" -> genCreateKeyspacesOpTemplates(model, blockname); + case "schema-tables" -> genCreateTablesOpTemplates(model, blockname); + case "schema-types" -> genCreateTypesOpTemplates(model, blockname); + case "drop-types" -> genDropTypesBlock(model, blockname); + case "drop-tables" -> genDropTablesBlock(model, blockname); + case "drop-keyspaces" -> genDropKeyspacesOpTemplates(model, blockname); + case "truncate-tables" -> genTruncateTablesOpTemplates(model, blockname); + case "insert" -> genInsertOpTemplates(model, blockname); + case "select" -> genSelectOpTemplates(model, blockname); + case "scan-10" -> genScanOpTemplates(model, blockname); + case "update" -> genUpdateOpTemplates(model, blockname); + default -> throw new RuntimeException("Unable to create block entries for " + component + "."); + }; + block.putAll(additions); + } + simplifyTimeouts(block); + blocks.put(blockname, block); + } bindingsMap.putAll(bindings.getAccumulatedBindings()); return workload; } - private Map genTypesSchemaBlock(CqlModel model) { - return Map.of(); + private void simplifyTimeouts(Map block) { + Map> byTimeout = new LinkedHashMap<>(); + Map ops = (Map) block.get("ops"); + ops.forEach((opname, opmap) -> { + double timeout = (double) (((Map) opmap).get("timeout")); + byTimeout.computeIfAbsent(timeout, d -> new ArrayList<>()).add(opname); + }); + List timeouts = byTimeout.keySet().stream().sorted(Double::compare).toList(); + if (timeouts.size() == 1) { + ((Map) block.computeIfAbsent("params", p -> new LinkedHashMap<>())).put("timeout", timeouts.get(0)); + Set opnames = ((Map) block.get("ops")).keySet(); + for (String opname : opnames) { + + Map opmap = (Map) ops.get(opname); + Map newOp = new LinkedHashMap<>(opmap); + newOp.remove("timeout"); + ops.put(opname, newOp); + } + } } - private Map genScenarios(CqlModel model) { - return Map.of( - "default", Map.of( - "schema", "run driver=cql tags=block:'schema-.*' threads===UNDEF cycles===UNDEF", - "schema-keyspaces", "run driver=cql tags=block:'schema-keyspaces' threads===UNDEF cycles===UNDEF", - "schema-tables", "run driver=cql tags=block:'schema-tables' threads===UNDEF cycles===UNDEF", - "schema-types", "run driver=cql tags=block:'schema-types' threads===UNDEF cycles===UNDEF", - "rampup", "run driver=cql tags=block:rampup threads=auto cycles===TEMPLATE(rampup-cycles,10000)", - "main", "run driver=cql tags=block:main threads=auto cycles===TEMPLATE(main-cycles,10000)" - ), - "truncate", "run driver=cql tags=block:truncate threads===UNDEF cycles===UNDEF" - ); + private void configureBlocks(Object generate_blocks_spec) { + if (generate_blocks_spec == null) { + throw new RuntimeException("Error with generate blocks, required parameter 'blockplan' is missing"); + } + if (generate_blocks_spec instanceof Map blocksmap) { + Map> planmap = new LinkedHashMap<>(); + for (Map.Entry blockplan : ((Map) blocksmap).entrySet()) { + planmap.put(blockplan.getKey(), Arrays.stream(blockplan.getValue().split(", ")).toList()); + } + this.blockplan = planmap; + } else { + throw new RuntimeException("Unrecognized type '" + generate_blocks_spec.getClass().getSimpleName() + "' for 'blockplan' config."); + } } - private Map genMainBlock(CqlModel model) { - Map mainOpTemplates = new LinkedHashMap<>(); + private void configureElideUnusedTables(Object elide_unused_tables) { + if (elide_unused_tables == null) { + this.elideUnusedTables = false; + } else { + this.elideUnusedTables = Boolean.parseBoolean(elide_unused_tables.toString()); + } + } - for (CqlTable table : model.getTableDefs()) { + public void configureTimeouts(Object spec) { + if (spec instanceof Map specmap) { + for (Object key : specmap.keySet()) { + if (this.timeouts.containsKey(key.toString())) { + Object value = specmap.get(key.toString()); + if (value instanceof Number number) { + this.timeouts.put(key.toString(), number.doubleValue()); + logger.info("configured '" + key + "' timeout as " + this.timeouts.get(key.toString()) + "S"); + } - if (!isCounterTable(table)) { - - Optional insertTemplate = this.genInsertTemplate(table); - if (insertTemplate.isPresent()) { - mainOpTemplates.put(namer.nameFor(table, "optype", "insert"), - Map.of( - "stmt", insertTemplate.get(), - "ratio", writeRatioFor(table) - ) - ); } else { - throw new RuntimeException("Unable to generate main insert template for table '" + table + "'"); + throw new RuntimeException("timeout type '" + key + "' unknown. Known types: " + this.timeouts.keySet()); } - } else { - logger.info("skipped insert for counter table '" + table.getName() + "'"); + } - - Optional updateTemplate = this.genUpdateTemplate(table); - if (updateTemplate.isPresent()) { - mainOpTemplates.put(namer.nameFor(table, "optype", "update"), - Map.of( - "stmt", updateTemplate.get(), - "ratio", writeRatioFor(table) - ) - ); - } else { - throw new RuntimeException("Unable to generate main insert template for table '" + table + "'"); - } - - - Optional selectTemplate = this.genSelectTemplate(table); - if (selectTemplate.isPresent()) { - mainOpTemplates.put(namer.nameFor(table, "optype", "select"), - Map.of( - "stmt", selectTemplate.get(), - "ratio", readRatioFor(table)) - ); - } else { - throw new RuntimeException("Unable to generate main select template for table '" + table + "'"); - } - } - return Map.of("ops", mainOpTemplates); } + private void setPartitionMultiplier(double multipler) { + this.partitionMultiplier = multipler; + } + + public void setNamingTemplate(String namingTemplate) { + this.namingTemplate = namingTemplate; + this.namer = new NamingFolio(namingTemplate); + this.bindings = new BindingsAccumulator(namer, List.of(defaultBindings)); + } + + private LinkedHashMap genScenarios(CqlModel model) { + return new LinkedHashMap<>() {{ + put("default", + new LinkedHashMap<>() {{ + put("schema", "run driver=cql tags=block:'schema-.*' threads===UNDEF cycles===UNDEF"); + put("rampup", "run driver=cql tags=block:'rampup-.*' threads=auto cycles===TEMPLATE(rampup-cycles,10000)"); + put("main", "run driver=cql tags=block:'main-.*' threads=auto cycles===TEMPLATE(main-cycles,10000)"); + + }}); + put("truncate", "run driver=cql tags=block:'truncate-.*' threads===UNDEF cycles===UNDEF"); + put("schema-keyspaces", "run driver=cql tags=block:'schema-keyspaces' threads===UNDEF cycles===UNDEF"); + put("schema-types", "run driver=cql tags=block:'schema-types' threads===UNDEF cycles===UNDEF"); + put("schema-tables", "run driver=cql tags=block:'schema-tables' threads===UNDEF cycles===UNDEF"); + put("drop", "run driver=cql tags=block:'drop-.*' threads===UNDEF cycles===UNDEF"); + put("drop-tables", "run driver=cql tags=block:'drop-tables' threads===UNDEF cycles===UNDEF"); + put("drop-types", "run driver=cql tags=block:'drop-types' threads===UNDEF cycles===UNDEF"); + put("drop-keyspaces", "run driver=cql tags=block:'drop-keyspaces' threads===UNDEF cycles===UNDEF"); + + }}; + } + + private Map genScanOpTemplates(CqlModel model, String blockname) { + Map blockdata = new LinkedHashMap<>(); + Map ops = new LinkedHashMap<>(); + blockdata.put("ops", ops); + for (CqlTable table : model.getTableDefs()) { + ops.put( + namer.nameFor(table, "optype", "scan", "blockname", blockname), + Map.of( + "prepared", genScanSyntax(table), + "timeout", timeouts.get("scan"), + "ratio", readRatioFor(table) + ) + ); + } + return blockdata; + } + + private String genScanSyntax(CqlTable table) { + return """ + select * from KEYSPACE.TABLE + PREDICATE + LIMIT; + """ + .replace("KEYSPACE", table.getKeySpace()) + .replace("TABLE", table.getName()) + .replace("PREDICATE", genPredicateTemplate(table, -1)) + .replace("LIMIT", genLimitSyntax(table)); + } + + + private Map genSelectOpTemplates(CqlModel model, String blockname) { + Map blockdata = new LinkedHashMap<>(); + Map ops = new LinkedHashMap<>(); + blockdata.put("ops", ops); + for (CqlTable table : model.getTableDefs()) { + ops.put( + namer.nameFor(table, "optype", "select", "blockname", blockname), + Map.of( + "prepared", genSelectSyntax(table), + "timeout", timeouts.get("select"), + "ratio", readRatioFor(table) + ) + ); + } + return blockdata; + } + + private String genSelectSyntax(CqlTable table) { + return """ + select * from KEYSPACE.TABLE + PREDICATE + LIMIT; + """ + .replace("KEYSPACE", table.getKeySpace()) + .replace("TABLE", table.getName()) + .replace("PREDICATE", genPredicateTemplate(table, 0)) + .replace("LIMIT", genLimitSyntax(table)); + } + + private String genLimitSyntax(CqlTable table) { + return " LIMIT 10"; + } + + private Map genInsertOpTemplates(CqlModel model, String blockname) { + Map blockdata = new LinkedHashMap<>(); + Map ops = new LinkedHashMap<>(); + blockdata.put("ops", ops); + for (CqlTable table : model.getTableDefs()) { + ops.put( + namer.nameFor(table, "optype", "insert", "blockname", blockname), + Map.of( + "prepared", genInsertSyntax(table), + "timeout", timeouts.get("insert"), + "ratio", writeRatioFor(table) + ) + ); + } + return blockdata; + } + + private String genInsertSyntax(CqlTable table) { + if (isCounterTable(table)) { + logger.warn("skipping insert on counter table '" + table.getFullName()); + } + + return """ + insert into KEYSPACE.TABLE + ( FIELDNAMES ) + VALUES + ( BINDINGS ); + """ + .replace("KEYSPACE", table.getKeySpace()) + .replace("TABLE", table.getName()) + .replace("FIELDNAMES", + String.join(", ", + table.getColumnDefinitions().stream() + .map(CqlColumnDef::getName).toList())) + .replaceAll("BINDINGS", + String.join(", ", + table.getColumnDefinitions().stream() + .map(cdef -> { + if (cdef.isLastPartitionKey()) { + return dividedBinding(cdef, table); + } else { + return bindings.forColumn(cdef); + } + }) + .map(c -> "{" + c.getName() + "}").toList())); + } + + private Binding dividedBinding(CqlColumnDef columnDef, CqlTable tableDef) { + CGTableStats stats = tableDef.getTableAttributes(); + if (stats==null) { + return bindings.forColumn(columnDef); + } + String partitionsSpec = stats.getAttribute("Number of partitions (estimate)"); + if (partitionsSpec==null) { + } + double estimatedPartitions = Double.parseDouble(partitionsSpec); + long modulo = (long)(estimatedPartitions*=partitionMultiplier); + if (modulo==0) { + return bindings.forColumn(columnDef); + } + modulo = quantizeModuloByMagnitude(modulo,1); + logger.info("Set partition modulo for " + tableDef.getFullName() + " to " + modulo); + Binding binding = bindings.forColumn(columnDef, "Mod(" + modulo + "L); "); + return binding; + } + + public static long quantizeModuloByMagnitude(long modulo, int significand) { + double initial = modulo; + double log10 = Math.log10(initial); + int zeroes = (int)log10; + zeroes = Math.max(1,zeroes-(significand-1)); + long fractional = (long) Math.pow(10,zeroes); + long partial = ((long)initial/fractional) * fractional; + long nextPartial = partial+fractional; + if (Math.abs(initial-partial)<=Math.abs(initial-nextPartial)) { + return partial; + } else { + return nextPartial; + } + } + + private Map genUpdateOpTemplates(CqlModel model, String blockname) { + Map blockdata = new LinkedHashMap<>(); + Map ops = new LinkedHashMap<>(); + blockdata.put("ops", ops); + for (CqlTable table : model.getTableDefs()) { + ops.put( + namer.nameFor(table, "optype", "update", "blockname", blockname), + Map.of( + "prepared", genUpdateSyntax(table), + "timeout", timeouts.get("update"), + "ratio", writeRatioFor(table) + ) + ); + } + return blockdata; + } + + private boolean isCounterTable(CqlTable table) { return table.getColumnDefinitions().stream() .anyMatch(cd -> cd.getTrimmedTypedef().equalsIgnoreCase("counter")); } + private int totalRatioFor(CqlTable table) { + if (table.getTableAttributes() == null || table.getTableAttributes().size() == 0) { + return 1; + } + return readRatioFor(table) + writeRatioFor(table); + + } private int readRatioFor(CqlTable table) { - if (table.getTableAttributes()==null ||table.getTableAttributes().size() == 0) { + if (table.getTableAttributes() == null || table.getTableAttributes().size() == 0) { return 1; } double weighted_reads = Double.parseDouble(table.getTableAttributes().getAttribute("weighted_reads")); @@ -272,60 +489,60 @@ public class CGWorkloadExporter { } private int writeRatioFor(CqlTable table) { - if (table.getTableAttributes()==null ||table.getTableAttributes().size() == 0) { + if (table.getTableAttributes() == null || table.getTableAttributes().size() == 0) { return 1; } double weighted_writes = Double.parseDouble(table.getTableAttributes().getAttribute("weighted_writes")); return (int) (weighted_writes * DEFAULT_RESOLUTION); } + /** + * If keycount is 0, all key fields including partition and clustering fields + * are qualfied with predicates. + * If keycount is positive, then only that many will be included. + * If keycount is negative, then that many keyfields will be removed from the + * predicate starting with the rightmost (innermost) fields first. + * + * @param table + * @param keycount + * @return + */ + private String genPredicateTemplate(CqlTable table, int keycount) { - private Map genRampupBlock(CqlModel model) { - Map rampupOpTemplates = new LinkedHashMap<>(); - - - for (CqlTable table : model.getTableDefs()) { - if (!isCounterTable(table)) { - Optional insert = genInsertTemplate(table); - if (insert.isPresent()) { - rampupOpTemplates.put(namer.nameFor(table, "optype", "insert"), insert.get()); - } else { - throw new RuntimeException("Unable to create rampup template for table '" + table + "'"); - } - } - } - - return Map.of("ops", rampupOpTemplates); - } - - private Optional genSelectTemplate(CqlTable table) { - - try { - return Optional.of("select * from " + table.getKeySpace() + "." + table.getName() + - "\n WHERE " + genPredicateTemplate(table)); - } catch (UnresolvedBindingException ube) { - logger.error(ube); - return Optional.empty(); - } - } - - private String genPredicateTemplate(CqlTable table) { StringBuilder sb = new StringBuilder(); - List pkeys = new ArrayList<>(); + LinkedList pkeys = new LinkedList<>(); for (String pkey : table.getPartitionKeys()) { CqlColumnDef coldef = table.getColumnDefForName(pkey); - pkeys.add(coldef); + pkeys.push(coldef); } for (String ccol : table.getClusteringColumns()) { CqlColumnDef coldef = table.getColumnDefForName(ccol); - pkeys.add(coldef); + pkeys.push(coldef); } + if (keycount > 0) { + while (pkeys.size() > keycount) { + pkeys.pop(); + } + } else if (keycount < 0) { + for (int i = 0; i > keycount; i--) { + pkeys.pop(); + } + } + var lastcount = keycount; + keycount = Math.max(table.getPartitionKeys().size(), keycount); + if (keycount != lastcount) { + logger.debug("minimum keycount for " + table.getFullName() + " adjusted from " + lastcount + " to " + keycount); + } + + // TODO; constraints on predicates based on valid constructions pkeys.stream().map(this::genPredicatePart) .forEach(p -> { sb.append(p).append("\n AND "); }); - sb.setLength(sb.length() - "\n AND ".length()); + if (sb.length() > 0) { + sb.setLength(sb.length() - "\n AND ".length()); + } return sb.toString(); } @@ -333,25 +550,19 @@ public class CGWorkloadExporter { String typeName = def.getTrimmedTypedef(); Binding binding = bindings.forColumn(def); - return def.getName() + "={" + binding.name() + "}"; + return def.getName() + "={" + binding.getName() + "}"; } - private Optional genUpdateTemplate(CqlTable table) { - try { - - return Optional.of(""" - update KEYSPACE.TABLE - set ASSIGNMENTS - where PREDICATES; - """ - .replaceAll("KEYSPACE", table.getKeySpace()) - .replaceAll("TABLE", table.getName()) - .replaceAll("PREDICATES", genPredicateTemplate(table)) - .replaceAll("ASSIGNMENTS", genAssignments(table))); - - } catch (UnresolvedBindingException ube) { - return Optional.empty(); - } + private String genUpdateSyntax(CqlTable table) { + return """ + update KEYSPACE.TABLE + set ASSIGNMENTS + where PREDICATES; + """ + .replaceAll("KEYSPACE", table.getKeySpace()) + .replaceAll("TABLE", table.getName()) + .replaceAll("PREDICATES", genPredicateTemplate(table, 0)) + .replaceAll("ASSIGNMENTS", genAssignments(table)); } private String genAssignments(CqlTable table) { @@ -359,60 +570,20 @@ public class CGWorkloadExporter { for (CqlColumnDef coldef : table.getNonKeyColumnDefinitions()) { if (coldef.isCounter()) { sb.append(coldef.getName()).append("=") - .append(coldef.getName()).append("+").append("{").append(bindings.forColumn(coldef).name()).append("}") + .append(coldef.getName()).append("+").append("{").append(bindings.forColumn(coldef).getName()).append("}") .append(", "); } else { sb.append(coldef.getName()).append("=") - .append("{").append(bindings.forColumn(coldef).name()).append("}") + .append("{").append(bindings.forColumn(coldef).getName()).append("}") .append(", "); } } if (sb.length() > 0) { sb.setLength(sb.length() - ", ".length()); - } else { - logger.debug("no assignments in this?\n" + table.getRefDdl()); } return sb.toString(); } - private Optional genInsertTemplate(CqlTable table) { - try { - return Optional.of(""" - insert into KEYSPACE.TABLE\n - ( FIELDNAMES ) - VALUES - ( BINDINGS ); - """ - .replace("KEYSPACE", table.getKeySpace()) - .replace("TABLE", table.getName()) - .replace("FIELDNAMES", - String.join(", ", - table.getColumnDefinitions().stream() - .map(CqlColumnDef::getName).toList())) - .replaceAll("BINDINGS", - String.join(", ", - table.getColumnDefinitions().stream() - .map(c -> "{" + bindings.forColumn(c).name() + "}").toList()))); - -// -// List cdefs = table.getColumnDefinitions(); -// return Optional.of("insert into " + -// table.getKeySpace() + "." + table.getName() + "\n" + -// " ( " + cdefs.stream().map(CqlColumnDef::getName) -// .collect(Collectors.joining(" , ")) + -// " )\n values\n (" + -// cdefs -// .stream() -// .map(cd -> { -// Binding binding = bindings.forColumn(cd); -// return "{" + binding.name() + "}"; -// }) -// .collect(Collectors.joining(",")) -// + ");"); - } catch (UnresolvedBindingException ube) { - return Optional.empty(); - } - } public String getWorkloadAsYaml() { DumpSettings dumpSettings = DumpSettings.builder() @@ -429,7 +600,7 @@ public class CGWorkloadExporter { BaseRepresenter r; Dump dump = new Dump(dumpSettings); - Map workload = getWorkload(); + Map workload = generateBlocks(); return dump.dumpToString(workload); } @@ -440,47 +611,144 @@ public class CGWorkloadExporter { public String getWorkoadAsJson() { Gson gson = new GsonBuilder().setPrettyPrinting().create(); - Map workload = getWorkload(); + Map workload = generateBlocks(); return gson.toJson(workload); } - private Object genTableCrudTemplates(CqlTable table) { - return Map.of(); + + private Map genDropTablesBlock(CqlModel model, String blockname) { + Map dropTablesBlock = new LinkedHashMap<>(); + Map ops = new LinkedHashMap<>(); + dropTablesBlock.put("ops", ops); + for (CqlTable table : model.getTableDefs()) { + ops.put( + namer.nameFor(table, "optype", "drop", "blockname", blockname), + Map.of( + "simple", "drop table " + table.getFullName() + ";", + "timeout", timeouts.get("drop") + ) + ); + } + return dropTablesBlock; } - private Map genTruncateBlock(CqlModel model) { + private Map genDropTypesBlock(CqlModel model, String blockname) { + Map dropTypesBlock = new LinkedHashMap<>(); + Map ops = new LinkedHashMap<>(); + dropTypesBlock.put("ops", ops); + for (CqlType type : model.getTypes()) { + ops.put( + namer.nameFor(type, "optype", "drop-type", "blockname", blockname), + Map.of( + "simple", "drop type " + type.getKeyspace() + "." + type.getName() + ";", + "timeout", timeouts.get("drop") + ) + ); + } + return dropTypesBlock; + } + + private Map genDropKeyspacesOpTemplates(CqlModel model, String blockname) { + Map dropTypesBlock = new LinkedHashMap<>(); + Map ops = new LinkedHashMap<>(); + dropTypesBlock.put("ops", ops); + for (CqlType type : model.getTypes()) { + ops.put( + namer.nameFor(type, "optype", "drop-keyspace", "blockname", blockname), + Map.of( + "simple", "drop keyspace " + type.getKeyspace() + ";", + "timeout", timeouts.get("drop") + ) + ); + } + return dropTypesBlock; + } + + + private Map genTruncateTablesOpTemplates(CqlModel model, String blockname) { Map truncateblock = new LinkedHashMap<>(); Map ops = new LinkedHashMap<>(); truncateblock.put("ops", ops); for (CqlTable table : model.getTableDefs()) { ops.put( - namer.nameFor(table, "optype", "truncate"), - "truncate " + table.getKeySpace() + "." + table.getName() + ";" + namer.nameFor(table, "optype", "truncate", "blockname", blockname), + Map.of( + "simple", "truncate " + table.getFullName() + ";", + "timeout", timeouts.get("truncate") + ) + ); } return truncateblock; } - private Map genKeyspacesSchemaBlock(CqlModel model) { + private Map genCreateKeyspacesOpTemplates(CqlModel model, String blockname) { Map schemablock = new LinkedHashMap<>(); Map ops = new LinkedHashMap<>(); for (CqlKeyspace ks : model.getKeyspacesByName().values()) { - ops.put("create-keyspace-" + ks.getName(), ks.getRefddl() + ";"); + ops.put( + namer.nameFor(ks, "optype", "create", "blockname", blockname), + Map.of( + "simple", genKeyspaceDDL(ks), + "timeout", timeouts.get("create") + ) + ); } schemablock.put("ops", ops); return schemablock; } - private Map genTablesSchemaBlock(CqlModel model) { + private Map genCreateTypesOpTemplates(CqlModel model, String blockname) { + Map blockdata = new LinkedHashMap<>(); + Map ops = new LinkedHashMap<>(); + blockdata.put("ops", ops); + + for (String keyspace : model.getTypesByKeyspaceAndName().keySet()) { + for (CqlType type : model.getTypesByKeyspaceAndName().get(keyspace).values()) { + ops.put( + namer.nameFor(type, "optype", "create", "blockname", blockname), + Map.of( + "simple", genTypeDDL(type), + "timeout", timeouts.get("create") + ) + ); + } + } + return blockdata; + + } + + private String genKeyspaceDDL(CqlKeyspace keyspace) { + return """ + create keyspace KEYSPACE + with replication = {REPLICATION}DURABLEWRITES?; + """ + .replace("KEYSPACE", keyspace.getName()) + .replace("REPLICATION", keyspace.getReplicationData()) + .replace("DURABLEWRITES?", keyspace.isDurableWrites() ? "" : "\n and durable writes = false") + ; + } + + private Map genCreateTablesOpTemplates(CqlModel model, String blockname) { Map schemablock = new LinkedHashMap<>(); Map ops = new LinkedHashMap<>(); for (String ksname : model.getTablesByNameByKeyspace().keySet()) { for (CqlTable cqltable : model.getTablesByNameByKeyspace().get(ksname).values()) { - ops.put("create-table-" + ksname + "." + cqltable.getName(), cqltable.getRefDdl() + ":"); + if (elideUnusedTables && totalRatioFor(cqltable) == 0.0d) { + logger.info("eliding table " + ksname + "." + cqltable.getName() + " since its total op ratio was " + totalRatioFor(cqltable)); + continue; + } + ops.put( + namer.nameFor(cqltable, "optype", "create", "blockname", blockname), + Map.of( + "simple", genTableDDL(cqltable), + "timeout", timeouts.get("create") + ) + ); } } @@ -489,4 +757,74 @@ public class CGWorkloadExporter { } + private String genTypeDDL(CqlType type) { + return """ + create type KEYSPACE.TYPENAME ( + TYPEDEF + ); + """ + .replace("KEYSPACE", type.getKeyspace()) + .replace("TYPENAME", type.getName()) + .replace("TYPEDEF", type.getFields().entrySet().stream() + .map(entry -> entry.getKey() + " " + entry.getValue()).collect(Collectors.joining(",\n"))); + } + + private Object genTableDDL(CqlTable cqltable) { + if (cqltable.isCompactStorage()) { + logger.warn("COMPACT STORAGE is not supported, eliding this option for table '" + cqltable.getFullName() + "'"); + } + + return """ + create table if not exists KEYSPACE.TABLE ( + COLUMN_DEFS, + primary key (PRIMARYKEY) + )CLUSTERING; + """ + .replace("KEYSPACE", cqltable.getKeySpace()) + .replace("TABLE", cqltable.getName()) + .replace("COLUMN_DEFS", genTableColumnDDL(cqltable)) + .replace("PRIMARYKEY", genPrimaryKeyDDL(cqltable)) + .replace("CLUSTERING", genTableClusteringOrderDDL(cqltable)); + + } + + private String genPrimaryKeyDDL(CqlTable cqltable) { + StringBuilder sb = new StringBuilder("("); + for (String partitionKey : cqltable.getPartitionKeys()) { + sb.append(partitionKey).append(", "); + } + sb.setLength(sb.length() - ", ".length()); + sb.append(")"); + for (String clusteringColumn : cqltable.getClusteringColumns()) { + sb.append(", ").append(clusteringColumn); + } + return sb.toString(); + } + + private String genTableClusteringOrderDDL(CqlTable cqltable) { + if (cqltable.getClusteringOrders().size() == 0) { + return ""; + } else { + StringBuilder sb = new StringBuilder(" with clustering order by (\n"); + for (int i = 0; i < cqltable.getClusteringOrders().size(); i++) { + sb.append(cqltable.getClusteringColumns().get(i)); + sb.append(" "); + sb.append(cqltable.getClusteringOrders().get(i)); + sb.append(",\n"); + } + if (sb.length() > 0) { + sb.setLength(sb.length() - ",\n".length()); + } + sb.append(")"); + return sb.toString(); + } + } + + private String genTableColumnDDL(CqlTable cqltable) { + return cqltable.getColumnDefinitions().stream() + .map(cd -> cd.getName() + " " + cd.getTrimmedTypedef()) + .collect(Collectors.joining(",\n")); + } + + } diff --git a/adapter-cqld4/src/main/java/io/nosqlbench/converters/cql/exporters/binders/Binding.java b/adapter-cqld4/src/main/java/io/nosqlbench/converters/cql/exporters/binders/Binding.java index 70658849d..19b6a6992 100644 --- a/adapter-cqld4/src/main/java/io/nosqlbench/converters/cql/exporters/binders/Binding.java +++ b/adapter-cqld4/src/main/java/io/nosqlbench/converters/cql/exporters/binders/Binding.java @@ -16,6 +16,37 @@ package io.nosqlbench.converters.cql.exporters.binders; -public record Binding(String name, String recipe) { +public class Binding { + String name; + String recipe; + public Binding(String name, String recipe) { + this.name = name; + this.recipe = recipe; + } + + public Binding withPreFunctions(String... prefixes) { + StringBuilder sb = new StringBuilder(); + for (String prefix : prefixes) { + String toAdd=prefix.trim(); + if (!toAdd.endsWith(";")) { + toAdd+=";"; + } + sb.append(toAdd); + } + sb.append(recipe); + return new Binding(getName(),sb.toString()); + } + + public String getRecipe() { + return recipe; + } + + public String getName() { + return name; + } + + public Binding withNameIncrement(long l) { + return new Binding(name+ l,recipe); + } } diff --git a/adapter-cqld4/src/main/java/io/nosqlbench/converters/cql/exporters/binders/BindingsAccumulator.java b/adapter-cqld4/src/main/java/io/nosqlbench/converters/cql/exporters/binders/BindingsAccumulator.java index 877a6ee7b..0ef8a172f 100644 --- a/adapter-cqld4/src/main/java/io/nosqlbench/converters/cql/exporters/binders/BindingsAccumulator.java +++ b/adapter-cqld4/src/main/java/io/nosqlbench/converters/cql/exporters/binders/BindingsAccumulator.java @@ -31,34 +31,51 @@ public class BindingsAccumulator { private final NamingFolio namer; private final List libraries; private final Map accumulated = new LinkedHashMap<>(); + private final Map accumulatedByRecipe = new LinkedHashMap<>(); private final NamingStyle namingStyle = NamingStyle.SymbolicType; - LinkedHashMap counts = new LinkedHashMap<>(); + private final LinkedHashMap counts = new LinkedHashMap<>(); + private long enumeration=0L; public BindingsAccumulator(NamingFolio namer, List libraries) { this.namer = namer; this.libraries = libraries; } - public Binding forColumn(CqlColumnDef def, String... extra) { + public Binding forColumn(CqlColumnDef def, String... prefixes) { + return forColumn(def,Map.of(), prefixes); + } + public Binding forColumn(CqlColumnDef def, Map extra, String... prefixes) { for (BindingsLibrary library : libraries) { - Optional binding = switch (namingStyle) { + Optional optionalBinding = switch (namingStyle) { case FullyQualified -> this.resolveFullyQualifiedBinding(def, extra); case SymbolicType -> this.resolveSymbolicBinding(def, extra); case CondensedKeyspace -> this.resolvedCondensedBinding(def, extra); }; - if (binding.isPresent()) { - registerBinding(binding.get()); - return binding.get(); + + if (optionalBinding.isPresent()) { + Binding binding = optionalBinding.get(); + + if (prefixes.length>0) { + binding = binding.withPreFunctions(prefixes).withNameIncrement(++enumeration); + String extant = accumulatedByRecipe.get(binding.getRecipe()); + if (extant!=null) { + binding= new Binding(extant,accumulated.get(extant)); + } + } + + registerBinding(binding); + return binding; } } throw new UnresolvedBindingException(def); } - private Optional resolvedCondensedBinding(CqlColumnDef def, String[] extra) { + + private Optional resolvedCondensedBinding(CqlColumnDef def, Map extra) { throw new RuntimeException("Implement me!"); } - private Optional resolveSymbolicBinding(CqlColumnDef def, String[] extra) { + private Optional resolveSymbolicBinding(CqlColumnDef def, Map extra) { for (BindingsLibrary library : libraries) { Optional binding = library.resolveBindingsFor(def); if (binding.isPresent()) { @@ -69,13 +86,13 @@ public class BindingsAccumulator { } - private Optional resolveFullyQualifiedBinding(CqlColumnDef def, String[] extra) { + private Optional resolveFullyQualifiedBinding(CqlColumnDef def, Map extra) { for (BindingsLibrary library : libraries) { Optional bindingRecipe = library.resolveBindingsFor(def); if (bindingRecipe.isPresent()) { Binding found = bindingRecipe.get(); String name = namer.nameFor(def, extra); - Binding renamedBinding = new Binding(name,found.recipe()); + Binding renamedBinding = new Binding(name,found.getRecipe()); return Optional.of(renamedBinding); } } @@ -83,8 +100,9 @@ public class BindingsAccumulator { } private void registerBinding(Binding newBinding) { - String name = newBinding.name(); - accumulated.put(name, newBinding.recipe()); + String name = newBinding.getName(); + accumulated.put(name, newBinding.getRecipe()); + accumulatedByRecipe.put(newBinding.getRecipe(), name); counts.put(name, counts.get(name)==null? 1 : counts.get(name)+1); } diff --git a/adapter-cqld4/src/main/java/io/nosqlbench/converters/cql/exporters/binders/NamingFolio.java b/adapter-cqld4/src/main/java/io/nosqlbench/converters/cql/exporters/binders/NamingFolio.java index 9bbcb4f50..6dc0921a7 100644 --- a/adapter-cqld4/src/main/java/io/nosqlbench/converters/cql/exporters/binders/NamingFolio.java +++ b/adapter-cqld4/src/main/java/io/nosqlbench/converters/cql/exporters/binders/NamingFolio.java @@ -42,7 +42,7 @@ public class NamingFolio { private final Map graph = new LinkedHashMap<>(); private final CGElementNamer namer; - public final static String DEFAULT_NAMER_SPEC = "[COLUMN]-[TYPEDEF-][TABLE][-KEYSPACE]"; + public final static String DEFAULT_NAMER_SPEC = "[BLOCKNAME-][OPTYPE-][COLUMN]-[TYPEDEF-][TABLE][-KEYSPACE]"; NamingStyle namingStyle = NamingStyle.SymbolicType; public NamingFolio(String namerspec) { @@ -75,6 +75,12 @@ public class NamingFolio { return name; } + public String nameFor(Labeled labeled, Map fields) { + Map labelsPlus = labeled.getLabelsAnd(fields); + String name = namer.apply(labelsPlus); + return name; + + } public void informNamerOfAllKnownNames(CqlModel model) { for (CqlTable table : model.getTableDefs()) { diff --git a/adapter-cqld4/src/main/java/io/nosqlbench/converters/cql/exporters/exporter.yaml b/adapter-cqld4/src/main/java/io/nosqlbench/converters/cql/exporters/exporter.yaml index 7b8ed2e5c..06fe48551 100644 --- a/adapter-cqld4/src/main/java/io/nosqlbench/converters/cql/exporters/exporter.yaml +++ b/adapter-cqld4/src/main/java/io/nosqlbench/converters/cql/exporters/exporter.yaml @@ -1,8 +1,9 @@ -text_transformers: - - class: CGRegexReplacer - config: - replacers: - - /a/b/ +# unimplemented +#text_transformers: +# - class: CGRegexReplacer +# config: +# replacers: +# - /a/b/ model_transformers: @@ -30,8 +31,9 @@ model_transformers: # # Removes Keyspace DDL statements # - class: CGKeySpaceDDLRemover - # Adds IF NOT EXIST to all DDL - - class: CGIfNotExistsInjector +# This is now a generator behavior that is done automatically +# # Adds IF NOT EXIST to all DDL +# - class: CGIfNotExistsInjector # Replaces UDTs with blobs until we have full UDT generation capability - class: CGUdtReplacer @@ -44,15 +46,59 @@ model_transformers: # Uses nodetool histogram stats to weight reads and writes over all ops - class: CGRatioCalculator -# This needs support of the AST, without relying on the refddl text images -# In other words, it doesn't work reliably with the substitution methods -# Don't enable it until the next revision -# # replaces names of keyspaces, tables, and columns with generated values -# - class: CGNameObfuscator + # replaces names of keyspaces, tables, and columns with generated values + - class: CGNameObfuscator -naming_template: "[OPTYPE-][COLUMN-][TYPEDEF-][TABLE!]-[KEYSPACE]" +naming_template: "[OPTYPE-][KEYSPACE-][TYPE-][NAME]" -partition_multiplier: 30 +# for more distinction in metric names if needed: +#naming_template: "[BLOCKNAME-][OPTYPE-][KEYSPACE-][TYPE-][NAME]" + +# how many partitions the primary key is limited to, compared to the value +# found in the node tool stats. If you have the actual value from complete +# statistics, this can be 1.0, but it is generally not accurate to take +# stats from one node. This allows for sizing up those estimates according +# to anecdotal information. + +partition_multiplier: 30.0 + +# Timeouts for each operation category. These are specified in terms of seconds. +# fractional values are allowed. If not specified, all operations will default to +# using 10 seconds. Internally, these are configured as milliseconds. +timeouts: + create: 60.0 + truncate: 900.0 + drop: 900.0 + scan: 30.0 + select: 10.0 + insert: 10.0 + update: 10.0 + delete: 10.0 + +# if this is set, and the total ratio for a table is 0.0D, then it will +# not be added to the table DDL +elide_unused_tables: false + +# future use, not active right now +blockplan: + # not needed when tags=block:'schema.*' + # schema: schema-keyspaces, schema-tables, schema-types + schema-keyspaces: schema-keyspaces + schema-tables: schema-tables + schema-types: schema-types + truncate-tables: truncate-tables + drop-types: drop-types + drop-tables: drop-tables + drop-keyspaces: drop-keyspaces + # not needed when tags=block:'drop.*' + # drop: drop-types, drop-tables, drop-keyspaces + rampup: insert + main-insert: insert + main-select: select + main-scan: scan-10 + main-update: update + # not needed when tags=block:'main.*' + # main: insert, select, scan-10, update diff --git a/adapter-cqld4/src/main/java/io/nosqlbench/converters/cql/exporters/transformers/CGCachingNameRemapper.java b/adapter-cqld4/src/main/java/io/nosqlbench/converters/cql/exporters/transformers/CGCachingNameRemapper.java index 5d7f55b5d..0234a4280 100644 --- a/adapter-cqld4/src/main/java/io/nosqlbench/converters/cql/exporters/transformers/CGCachingNameRemapper.java +++ b/adapter-cqld4/src/main/java/io/nosqlbench/converters/cql/exporters/transformers/CGCachingNameRemapper.java @@ -17,7 +17,7 @@ package io.nosqlbench.converters.cql.exporters.transformers; import io.nosqlbench.api.config.NBNamedElement; -import io.nosqlbench.converters.cql.cqlast.CqlTable; +import io.nosqlbench.api.labels.Labeled; import io.nosqlbench.virtdata.library.basics.shared.from_long.to_string.Combinations; import java.util.HashMap; @@ -37,25 +37,24 @@ public class CGCachingNameRemapper { this.namefunc = function; } - public synchronized String nameForType(String type, String originalName) { - String canonical = type+"__"+originalName; - return getOrCreateName(canonical); + public synchronized String nameForType(String type, String originalName, String prefix) { + String canonical = type+"_"+originalName; + return getOrCreateName(canonical, prefix); } - public synchronized String nameFor(NBNamedElement element) { - String canonical = element.getClass().getSimpleName()+"--"+element.getName(); - return getOrCreateName(canonical); + public synchronized String nameFor(NBNamedElement element, String prefix) { + String canonical = element.getClass().getSimpleName()+"-"+element.getName(); + return getOrCreateName(canonical, prefix); } - private String getOrCreateName(String canonical) { + private String getOrCreateName(String canonical, String prefix) { if (!remapped.containsKey(canonical)) { - String newname = namefunc.apply(index++); + String newname = prefix+namefunc.apply(index++); remapped.put(canonical,newname); } return remapped.get(canonical); } - - public Function mapperForType(CqlTable cqlTable) { - return in -> this.nameForType(cqlTable.getClass().getSimpleName(),in); + public Function mapperForType(Labeled cqlTable, String prefix) { + return in -> this.nameForType(cqlTable.getClass().getSimpleName(),in, prefix); } } diff --git a/adapter-cqld4/src/main/java/io/nosqlbench/converters/cql/exporters/transformers/CGIfNotExistsInjector.java b/adapter-cqld4/src/main/java/io/nosqlbench/converters/cql/exporters/transformers/CGIfNotExistsInjector.java index e0be8df8f..3ea40b407 100644 --- a/adapter-cqld4/src/main/java/io/nosqlbench/converters/cql/exporters/transformers/CGIfNotExistsInjector.java +++ b/adapter-cqld4/src/main/java/io/nosqlbench/converters/cql/exporters/transformers/CGIfNotExistsInjector.java @@ -16,37 +16,37 @@ package io.nosqlbench.converters.cql.exporters.transformers; -import io.nosqlbench.converters.cql.cqlast.CqlKeyspace; import io.nosqlbench.converters.cql.cqlast.CqlModel; -import io.nosqlbench.converters.cql.cqlast.CqlTable; -import io.nosqlbench.converters.cql.cqlast.CqlType; +/** + * @deprecated Superseded by direct rendering from AST in generator + */ public class CGIfNotExistsInjector implements CGModelTransformer { @Override public CqlModel apply(CqlModel model) { - for (CqlKeyspace keyspace : model.getKeyspaceDefs()) { - keyspace.setRefDdl(keyspace.getRefddl().replaceAll( - "(?m)(?s)(?i)(\\s*CREATE (TABLE|KEYSPACE|TYPE) +)(?!IF NOT EXISTS)", - "$1IF NOT EXISTS " - )); - } - for (CqlTable table : model.getTableDefs()) { - String refddl = table.getRefDdl(); - String replaced = refddl.replaceAll( - "(?m)(?s)(?i)(\\s*CREATE (TABLE|KEYSPACE|TYPE) +)(?!IF NOT EXISTS)", - "$1IF NOT EXISTS " - ); - - table.setRefDdl(replaced); - } - for (CqlType type : model.getTypes()) { - type.setRefddl(type.getRefDdl().replaceAll( - "(?m)(?s)(?i)(\\s*CREATE (TABLE|KEYSPACE|TYPE) +)(?!IF NOT EXISTS)", - "$1IF NOT EXISTS " - )); - - } +// for (CqlKeyspace keyspace : model.getKeyspaceDefs()) { +// keyspace.setRefDdl(keyspace.getRefddl().replaceAll( +// "(?m)(?s)(?i)(\\s*CREATE (TABLE|KEYSPACE|TYPE) +)(?!IF NOT EXISTS)", +// "$1IF NOT EXISTS " +// )); +// } +// for (CqlTable table : model.getTableDefs()) { +// String refddl = table.getRefDdl(); +// String replaced = refddl.replaceAll( +// "(?m)(?s)(?i)(\\s*CREATE (TABLE|KEYSPACE|TYPE) +)(?!IF NOT EXISTS)", +// "$1IF NOT EXISTS " +// ); +// +// table.setRefDdl(replaced); +// } +// for (CqlType type : model.getTypes()) { +// type.setRefddl(type.getRefDdl().replaceAll( +// "(?m)(?s)(?i)(\\s*CREATE (TABLE|KEYSPACE|TYPE) +)(?!IF NOT EXISTS)", +// "$1IF NOT EXISTS " +// )); +// +// } return model; } } diff --git a/adapter-cqld4/src/main/java/io/nosqlbench/converters/cql/exporters/transformers/CGNameObfuscator.java b/adapter-cqld4/src/main/java/io/nosqlbench/converters/cql/exporters/transformers/CGNameObfuscator.java index 9ffd1ce4f..5d8f6509f 100644 --- a/adapter-cqld4/src/main/java/io/nosqlbench/converters/cql/exporters/transformers/CGNameObfuscator.java +++ b/adapter-cqld4/src/main/java/io/nosqlbench/converters/cql/exporters/transformers/CGNameObfuscator.java @@ -38,19 +38,22 @@ public class CGNameObfuscator implements CGModelTransformer { public CqlModel apply(CqlModel model) { for (String keyspaceName : model.getAllKnownKeyspaceNames()) { - String newKeyspaceName = remapper.nameForType("keyspace",keyspaceName); + String newKeyspaceName = remapper.nameForType("keyspace",keyspaceName,"ks_"); model.renamekeyspace(keyspaceName,newKeyspaceName); } + for (CqlTable cqlTable : model.getTableDefs()) { String tablename = cqlTable.getName(); - String newTableName = remapper.nameFor(cqlTable); - model.renameTable(cqlTable.getKeySpace(), tablename, newTableName); - cqlTable.renameColumns(remapper.mapperForType(cqlTable)); + String newTableName = remapper.nameFor(cqlTable,"tbl_"); + model.renameTable(cqlTable, newTableName); + cqlTable.renameColumns(remapper.mapperForType(cqlTable, "col_")); } + for (CqlType type : model.getTypes()) { String typeName = type.getName(); - String newTypeName = remapper.nameFor(type); + String newTypeName = remapper.nameFor(type,"typ_"); model.renameType(type.getKeyspace(),typeName,newTypeName); + type.renameColumns(remapper.mapperForType(type, "typ")); } diff --git a/adapter-cqld4/src/main/java/io/nosqlbench/converters/cql/exporters/transformers/CGReplicationSettingInjector.java b/adapter-cqld4/src/main/java/io/nosqlbench/converters/cql/exporters/transformers/CGReplicationSettingInjector.java index 8cb54177a..091067432 100644 --- a/adapter-cqld4/src/main/java/io/nosqlbench/converters/cql/exporters/transformers/CGReplicationSettingInjector.java +++ b/adapter-cqld4/src/main/java/io/nosqlbench/converters/cql/exporters/transformers/CGReplicationSettingInjector.java @@ -27,7 +27,7 @@ public class CGReplicationSettingInjector implements CGModelTransformer, CGTrans @Override public CqlModel apply(CqlModel model) { for (CqlKeyspace keyspace : model.getKeyspaceDefs()) { - keyspace.setRefReplDdl(this.replicationFields); + keyspace.setReplicationData(this.replicationFields); } return model; } diff --git a/adapter-cqld4/src/main/java/io/nosqlbench/converters/cql/exporters/transformers/CGUdtReplacer.java b/adapter-cqld4/src/main/java/io/nosqlbench/converters/cql/exporters/transformers/CGUdtReplacer.java index d7cb502cc..5d4ebbbfb 100644 --- a/adapter-cqld4/src/main/java/io/nosqlbench/converters/cql/exporters/transformers/CGUdtReplacer.java +++ b/adapter-cqld4/src/main/java/io/nosqlbench/converters/cql/exporters/transformers/CGUdtReplacer.java @@ -29,14 +29,10 @@ public class CGUdtReplacer implements CGModelTransformer { List toReplace = model.getTypes().stream().map(t -> t.getKeyspace() + "." + t.getName()).toList(); for (CqlTable table : model.getTableDefs()) { for (CqlColumnDef coldef : table.getColumnDefinitions()) { - String coldefDdl = coldef.getDefinitionDdl(); + String typedef = coldef.getTrimmedTypedef(); for (String searchFor : toReplace) { - if (coldefDdl.contains(searchFor)) { - String typedef = coldef.getType(); - coldef.setType("blob"); - String replaced = coldef.getDefinitionDdl().replace(typedef, "blob"); - coldef.setDefinitionRefDdl(replaced); - table.setRefDdl(table.getRefDdl().replace(typedef,"blob")); + if (typedef.contains(searchFor)) { + coldef.setTypeDef("blob"); } } } diff --git a/adapter-cqld4/src/main/resources/cqld4.md b/adapter-cqld4/src/main/resources/cqld4.md index f134dead2..138c931b7 100644 --- a/adapter-cqld4/src/main/resources/cqld4.md +++ b/adapter-cqld4/src/main/resources/cqld4.md @@ -131,6 +131,97 @@ names for the classic form have not changed. gremlin: >- g.V().hasLabel("device").has("deviceid", UUID.fromString('{deviceid})') +## CQL Op Template - Optional Fields + +If any of these are provided as op template fields or as op params, or +as activity params, then they will have the described effect. The calls +to set these parameters on an individual statement are only incurred if +they are provided. Otherwise, defaults are used. These options can be +applied to any of the statement forms above. + +```yaml +params: + + # Set the consistency level for this statement + # For Astra, use only LOCAL_QUORUM + # Otherwise, one of + # ALL|EACH_QUORUM|QUORUM|LOCAL_QUORUM|ONE|TWO|THREE|LOCAL_ONE|ANY + cl: LOCAL_QUORUM + # or consistency_level: ... + + # Set the serial consistency level for this statement. + # Note, you probably don't need this unless you are using LWTs + # SERIAL ~ QUORUM, LOCAL_SERIAL ~ LOCAL_QUORUM + scl: LOCAL_SERIAL + # or serial_consistency_level: ... + + # Set a statement as idempotent. This is important for determining + # when ops can be trivially retried with no concern for unexpected + # mutation in the event that it succeeds multiple times. + # true or false + idempotent: false + + # Set the timeout for the operation, from the driver's perspective, + # in seconds. "2 seconds" is the default, but DDL statements, truncate or drop + # statements will generally need more. If you want millisconds, just use + # fractional seconds, like 0.500 + timeout: 2.0 + + ## The following options are meant for advanced testing scenarios only, + ## and are not generally meant to be used in typical application-level, + ## data mode, performance or scale testing. These expose properties + ## which should not be set for general use. These allow for very specific + ## scenarios to be constructed for core system-level testing. + ## Some of them will only work with specially provided bindings which + ## can provide the correct instantiated object type. + + # replace the payload with a map of String->ByteBuffer for this operation + # type: Map + custom_payload: ... + + # set an instantiated ExecutionProfile to be used for this operation + # type: com.datastax.oss.driver.api.core.config.DriverExecutionProfile + execution_profile: ... + + # set a named execution profile to be used for this operation + # type: String + execution_profile_name: ... + + # set a resolved target node to be used for this operation + # type: com.datastax.oss.driver.api.core.metadata.Node + node: ... + + # set the timestamp to be used as the "now" reference for this operation + # type: int + now_in_seconds: ... + + # set the page size for this operation + # type: int + page_size: ... + + # set the query timestamp for this operation (~ USING TIMESTAMP) + # type: long + query_timestamp: + + # set the routing key for this operation, as a single bytebuffer + # type: ByteArray + routing_key: ... + + # set the routing key for this operation as an array of bytebuffers + # type: ByteArray[] + routing_keys: ... + + # set the routing token for this operation + # type: com.datastax.oss.driver.api.core.metadata.token.Token + routing_token: ... + + # enable (or disable) tracing for this operation + # This should be used with great care, as tracing imposed overhead + # far and above most point queries or writes. Use it sparsely or only + # for functional investigation + # type: boolean + tracing: ... +``` ## Driver Cache @@ -140,6 +231,8 @@ simply set a `space` parameter in your op templates, with a dynamic value. __WARNING__: If you use the driver cache feature, be aware that creating a large number of driver instances will be very expensive. Generally driver instances are meant to be initialized and then shared throughout the life-cycle of an application process. +Thus, if you are doing multi-instance driver testing, it is best to use bindings +functions for the `space` parameter which have bounded cardinality per host. diff --git a/adapter-cqld4/src/test/java/io/nosqlbench/converters/cql/exporters/CGWorkloadExporterTest.java b/adapter-cqld4/src/test/java/io/nosqlbench/converters/cql/exporters/CGWorkloadExporterTest.java new file mode 100644 index 000000000..52f291f9b --- /dev/null +++ b/adapter-cqld4/src/test/java/io/nosqlbench/converters/cql/exporters/CGWorkloadExporterTest.java @@ -0,0 +1,33 @@ +/* + * Copyright (c) 2022 nosqlbench + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package io.nosqlbench.converters.cql.exporters; + +import org.junit.jupiter.api.Test; + +import static org.assertj.core.api.Assertions.assertThat; + +public class CGWorkloadExporterTest { + + @Test + public void testQuantizer() { + assertThat(CGWorkloadExporter.quantizeModuloByMagnitude(23L,1)).isEqualTo(20L); + assertThat(CGWorkloadExporter.quantizeModuloByMagnitude(234234343L,2)).isEqualTo(230000000L); + assertThat(CGWorkloadExporter.quantizeModuloByMagnitude(275234343L,3)).isEqualTo(275000000L); + assertThat(CGWorkloadExporter.quantizeModuloByMagnitude(275734343L,3)).isEqualTo(276000000L); + } + +} diff --git a/nb-api/src/main/java/io/nosqlbench/api/labels/Labeled.java b/nb-api/src/main/java/io/nosqlbench/api/labels/Labeled.java index eac9dc113..e4ca0b446 100644 --- a/nb-api/src/main/java/io/nosqlbench/api/labels/Labeled.java +++ b/nb-api/src/main/java/io/nosqlbench/api/labels/Labeled.java @@ -25,10 +25,15 @@ public interface Labeled { default Map getLabelsAnd(String... keyvalues) { LinkedHashMap map = new LinkedHashMap<>(getLabels()); for (int idx = 0; idx < keyvalues.length; idx+=2) { - map.put(keyvalues[0],keyvalues[1]); + map.put(keyvalues[idx],keyvalues[idx+1]); } return map; } + default Map getLabelsAnd(Map extra) { + LinkedHashMap map = new LinkedHashMap<>(getLabels()); + map.putAll(extra); + return map; + } static MapLabels forMap(Map labels) { return new MapLabels(labels); diff --git a/virtdata-lang/pom.xml b/virtdata-lang/pom.xml index d8edda6ee..34d859c3d 100644 --- a/virtdata-lang/pom.xml +++ b/virtdata-lang/pom.xml @@ -78,7 +78,7 @@ - false + false