updates to cql workload generator

This commit is contained in:
Jonathan Shook 2022-07-08 10:20:33 -05:00
parent ce070d2c19
commit e4f7e7e771
51 changed files with 2801 additions and 118 deletions

File diff suppressed because one or more lines are too long

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,338 @@
LR_BRACKET=1
RR_BRACKET=2
LC_BRACKET=3
RC_BRACKET=4
LS_BRACKET=5
RS_BRACKET=6
COMMA=7
SEMI=8
COLON=9
DOT=10
STAR=11
DIVIDE=12
MODULE=13
PLUS=14
MINUSMINUS=15
MINUS=16
DQUOTE=17
SQUOTE=18
OPERATOR_EQ=19
OPERATOR_LT=20
OPERATOR_GT=21
OPERATOR_LTE=22
OPERATOR_GTE=23
K_ADD=24
K_AGGREGATE=25
K_ALL=26
K_ALLOW=27
K_ALTER=28
K_AND=29
K_ANY=30
K_APPLY=31
K_AS=32
K_ASC=33
K_AUTHORIZE=34
K_BATCH=35
K_BEGIN=36
K_BY=37
K_CALLED=38
K_CLUSTERING=39
K_COLUMNFAMILY=40
K_COMPACT=41
K_CONSISTENCY=42
K_CONTAINS=43
K_CREATE=44
K_CUSTOM=45
K_DELETE=46
K_DESC=47
K_DESCRIBE=48
K_DISTINCT=49
K_DROP=50
K_DURABLE_WRITES=51
K_EACH_QUORUM=52
K_ENTRIES=53
K_EXECUTE=54
K_EXISTS=55
K_FALSE=56
K_FILTERING=57
K_FINALFUNC=58
K_FROM=59
K_FULL=60
K_FUNCTION=61
K_FUNCTIONS=62
K_GRANT=63
K_IF=64
K_IN=65
K_INDEX=66
K_INFINITY=67
K_INITCOND=68
K_INPUT=69
K_INSERT=70
K_INTO=71
K_IS=72
K_JSON=73
K_KEY=74
K_KEYS=75
K_KEYSPACE=76
K_KEYSPACES=77
K_LANGUAGE=78
K_LEVEL=79
K_LIMIT=80
K_LOCAL_ONE=81
K_LOCAL_QUORUM=82
K_LOGGED=83
K_LOGIN=84
K_MATERIALIZED=85
K_MODIFY=86
K_NAN=87
K_NORECURSIVE=88
K_NOSUPERUSER=89
K_NOT=90
K_NULL=91
K_OF=92
K_ON=93
K_ONE=94
K_OPTIONS=95
K_OR=96
K_ORDER=97
K_PARTITION=98
K_PASSWORD=99
K_PER=100
K_PERMISSION=101
K_PERMISSIONS=102
K_PRIMARY=103
K_QUORUM=104
K_RENAME=105
K_REPLACE=106
K_REPLICATION=107
K_RETURNS=108
K_REVOKE=109
K_ROLE=110
K_ROLES=111
K_SCHEMA=112
K_SELECT=113
K_SET=114
K_SFUNC=115
K_STATIC=116
K_STORAGE=117
K_STYPE=118
K_SUPERUSER=119
K_TABLE=120
K_THREE=121
K_TIMESTAMP=122
K_TO=123
K_TOKEN=124
K_TRIGGER=125
K_TRUE=126
K_TRUNCATE=127
K_TTL=128
K_TWO=129
K_TYPE=130
K_UNLOGGED=131
K_UPDATE=132
K_USE=133
K_USER=134
K_USING=135
K_UUID=136
K_VALUES=137
K_VIEW=138
K_WHERE=139
K_WITH=140
K_WRITETIME=141
K_ASCII=142
K_BIGINT=143
K_BLOB=144
K_BOOLEAN=145
K_COUNTER=146
K_DATE=147
K_DECIMAL=148
K_DOUBLE=149
K_FLOAT=150
K_FROZEN=151
K_INET=152
K_INT=153
K_LIST=154
K_MAP=155
K_SMALLINT=156
K_TEXT=157
K_TIMEUUID=158
K_TIME=159
K_TINYINT=160
K_TUPLE=161
K_VARCHAR=162
K_VARINT=163
CODE_BLOCK=164
STRING_LITERAL=165
DECIMAL_LITERAL=166
FLOAT_LITERAL=167
HEXADECIMAL_LITERAL=168
REAL_LITERAL=169
OBJECT_NAME=170
UUID=171
SPACE=172
SPEC_MYSQL_COMMENT=173
COMMENT_INPUT=174
LINE_COMMENT=175
'('=1
')'=2
'{'=3
'}'=4
'['=5
']'=6
','=7
';'=8
':'=9
'.'=10
'*'=11
'/'=12
'%'=13
'+'=14
'--'=15
'-'=16
'"'=17
'\''=18
'='=19
'<'=20
'>'=21
'<='=22
'>='=23
'ADD'=24
'AGGREGATE'=25
'ALL'=26
'ALLOW'=27
'ALTER'=28
'AND'=29
'ANY'=30
'APPLY'=31
'AS'=32
'ASC'=33
'AUTHORIZE'=34
'BATCH'=35
'BEGIN'=36
'BY'=37
'CALLED'=38
'CLUSTERING'=39
'COLUMNFAMILY'=40
'COMPACT'=41
'CONSISTENCY'=42
'CONTAINS'=43
'CREATE'=44
'CUSTOM'=45
'DELETE'=46
'DESC'=47
'DESCRIBE'=48
'DISTINCT'=49
'DROP'=50
'DURABLE_WRITES'=51
'EACH_QUORUM'=52
'ENTRIES'=53
'EXECUTE'=54
'EXISTS'=55
'FALSE'=56
'FILTERING'=57
'FINALFUNC'=58
'FROM'=59
'FULL'=60
'FUNCTION'=61
'FUNCTIONS'=62
'GRANT'=63
'IF'=64
'IN'=65
'INDEX'=66
'INFINITY'=67
'INITCOND'=68
'INPUT'=69
'INSERT'=70
'INTO'=71
'IS'=72
'JSON'=73
'KEY'=74
'KEYS'=75
'KEYSPACE'=76
'KEYSPACES'=77
'LANGUAGE'=78
'LEVEL'=79
'LIMIT'=80
'LOCAL_ONE'=81
'LOCAL_QUORUM'=82
'LOGGED'=83
'LOGIN'=84
'MATERIALIZED'=85
'MODIFY'=86
'NAN'=87
'NORECURSIVE'=88
'NOSUPERUSER'=89
'NOT'=90
'NULL'=91
'OF'=92
'ON'=93
'ONE'=94
'OPTIONS'=95
'OR'=96
'ORDER'=97
'PARTITION'=98
'PASSWORD'=99
'PER'=100
'PERMISSION'=101
'PERMISSIONS'=102
'PRIMARY'=103
'QUORUM'=104
'RENAME'=105
'REPLACE'=106
'REPLICATION'=107
'RETURNS'=108
'REVOKE'=109
'ROLE'=110
'ROLES'=111
'SCHEMA'=112
'SELECT'=113
'SET'=114
'SFUNC'=115
'STATIC'=116
'STORAGE'=117
'STYPE'=118
'SUPERUSER'=119
'TABLE'=120
'THREE'=121
'TIMESTAMP'=122
'TO'=123
'TOKEN'=124
'TRIGGER'=125
'TRUE'=126
'TRUNCATE'=127
'TTL'=128
'TWO'=129
'TYPE'=130
'UNLOGGED'=131
'UPDATE'=132
'USE'=133
'USER'=134
'USING'=135
'UUID'=136
'VALUES'=137
'VIEW'=138
'WHERE'=139
'WITH'=140
'WRITETIME'=141
'ASCII'=142
'BIGINT'=143
'BLOB'=144
'BOOLEAN'=145
'COUNTER'=146
'DATE'=147
'DECIMAL'=148
'DOUBLE'=149
'FLOAT'=150
'FROZEN'=151
'INET'=152
'INT'=153
'LIST'=154
'MAP'=155
'SMALLINT'=156
'TEXT'=157
'TIMEUUID'=158
'TIME'=159
'TINYINT'=160
'TUPLE'=161
'VARCHAR'=162
'VARINT'=163

View File

@ -77,6 +77,12 @@
<version>4.14.1</version>
</dependency>
<dependency>
<groupId>org.snakeyaml</groupId>
<artifactId>snakeyaml-engine</artifactId>
<version>2.3</version>
</dependency>
<dependency>
<groupId>org.xerial.snappy</groupId>
<artifactId>snappy-java</artifactId>

View File

@ -16,20 +16,25 @@
package io.nosqlbench.converters.cql.cql.cqlast;
public class CqlField {
public class CqlColumnDef {
String name;
String type;
public CqlField(String type, String name) {
public CqlColumnDef(String type, String name) {
this.type = type;
this.name = name;
}
public String getName() {
return name;
}
public String getType() {
return type;
}
@Override
public String toString() {
return "CqlField{" +
"name='" + name + '\'' +
", type='" + type + '\'' +
'}';
return " " + this.name + " " + this.type + ",";
}
}

View File

@ -16,14 +16,9 @@
package io.nosqlbench.converters.cql.cql.cqlast;
import java.util.ArrayList;
import java.util.List;
import java.util.stream.Collectors;
public class CqlKeyspace {
String keyspaceName= "";
List<CqlTable> tables = new ArrayList<>();
CqlTable lastAddedTable = null;
String refddl;
public CqlKeyspace() {
}
@ -32,29 +27,23 @@ public class CqlKeyspace {
this.keyspaceName=name;
}
public void addTable() {
lastAddedTable = new CqlTable();
tables.add(lastAddedTable);
public String getKeyspaceName() {
return this.keyspaceName;
}
public void addColumnDef(String type, String name) {
lastAddedTable.addcolumnDef(new CqlField(type, name));
}
public void setTableName(String tableName) {
lastAddedTable.setName(tableName);
}
public void addTableColumn(String type, String fieldName) {
lastAddedTable.addcolumnDef(type,fieldName);
public void setRefDdl(String refddl) {
this.refddl = refddl;
}
@Override
public String toString() {
return "keyspace:" + keyspaceName+"\n"+
" tables:\n"+
(tables.stream().map(Object::toString)
.map(s -> " "+s)
.collect(Collectors.joining("\n")));
return "CqlKeyspace{" +
"keyspaceName='" + keyspaceName + '\'' +
", refddl='" + refddl + '\'' +
'}';
}
public String getRefddl() {
return refddl;
}
}

View File

@ -0,0 +1,73 @@
package io.nosqlbench.converters.cql.cql.cqlast;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
public class CqlModel {
Map<String, CqlKeyspace> keyspaces = new LinkedHashMap<>();
Map<String, Map<String, CqlTable>> tables = new LinkedHashMap<>();
transient
CqlKeyspace keyspace = null;
transient
CqlTable table;
public void newKeyspace() {
keyspace = new CqlKeyspace();
}
public void saveKeyspace(String text,String refddl) {
keyspace.setKeyspaceName(text);
keyspace.setRefDdl(refddl);
this.keyspaces.put(text, keyspace);
keyspace=null;
}
public void newTable() {
table = new CqlTable();
}
public void saveTable(String keyspace, String text, String refddl) {
table.setKeyspace(keyspace);
table.setName(text);
table.setRefDdl(refddl);
this.tables.computeIfAbsent(keyspace, ks->new LinkedHashMap<>()).put(text, table);
table = null;
}
public void saveColumnDefinition(String coltype, String colname) {
table.addcolumnDef(coltype, colname);
}
public Map<String, CqlKeyspace> getKeyspaces() {
return keyspaces;
}
public Map<String, Map<String, CqlTable>> getTablesByKeyspace() {
return tables;
}
public List<CqlTable> getAllTables() {
return tables.values().stream().flatMap(m->m.values().stream()).toList();
}
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
for (String ks : keyspaces.keySet()) {
CqlKeyspace keyspace = keyspaces.get(ks);
sb.append("keyspace '").append(keyspace.getKeyspaceName()).append("':\n");
sb.append(keyspace.toString()).append("\n");
tables.getOrDefault(ks,Map.of()).values().stream()
.forEach(table -> {
sb.append("table '").append(table.getTableName()).append("':\n");
sb.append(table.toString());
});
}
return sb.toString();
}
}

View File

@ -18,55 +18,63 @@ package io.nosqlbench.converters.cql.cql.cqlast;
import io.nosqlbench.converters.cql.generated.CqlParser;
import io.nosqlbench.converters.cql.generated.CqlParserBaseListener;
import org.antlr.v4.runtime.ParserRuleContext;
import org.antlr.v4.runtime.misc.Interval;
import java.util.ArrayList;
import java.util.List;
public class CqlAstBuilder extends CqlParserBaseListener {
List<CqlKeyspace> keyspaces = new ArrayList<>():
CqlKeyspace lastKeyspace = null;
public class CqlModelBuilder extends CqlParserBaseListener {
CqlModel model = new CqlModel();
@Override
public void enterKeyspace(CqlParser.KeyspaceContext ctx) {
lastKeyspace = new CqlKeyspace();
this.keyspaces.add(lastKeyspace);
public void enterCreateKeyspace(CqlParser.CreateKeyspaceContext ctx) {
model.newKeyspace();
}
@Override
public void exitKeyspace(CqlParser.KeyspaceContext ctx) {
lastKeyspace.setKeyspaceName(ctx.OBJECT_NAME().getSymbol().getText());
}
@Override
public void enterCreateTable(CqlParser.CreateTableContext ctx) {
lastKeyspace.addTable();
}
@Override
public void exitCreateTable(CqlParser.CreateTableContext ctx) {
lastKeyspace.setTableName(ctx.table().OBJECT_NAME().getSymbol().getText());
}
@Override
public void enterColumnDefinition(CqlParser.ColumnDefinitionContext ctx) {
System.out.println("here");
}
@Override
public void exitColumnDefinition(CqlParser.ColumnDefinitionContext ctx) {
System.out.println("here");
lastKeyspace.addTableColumn(
ctx.dataType().dataTypeName().getText(),
ctx.column().OBJECT_NAME().getSymbol().getText()
public void exitCreateKeyspace(CqlParser.CreateKeyspaceContext ctx) {
model.saveKeyspace(
ctx.keyspace().getText(),
textOf(ctx)
);
}
@Override
public void enterCreateTable(CqlParser.CreateTableContext ctx) {
model.newTable();
}
@Override
public void exitCreateTable(CqlParser.CreateTableContext ctx) {
model.saveTable(
ctx.keyspace().OBJECT_NAME().getText(),
ctx.table().OBJECT_NAME().getText(),
textOf(ctx)
);
}
private String textOf(ParserRuleContext ctx) {
int startIndex = ctx.start.getStartIndex();
int stopIndex = ctx.stop.getStopIndex();
Interval interval = Interval.of(startIndex, stopIndex);
String text = ctx.start.getInputStream().getText(interval);
return text;
}
@Override
public void enterColumnDefinition(CqlParser.ColumnDefinitionContext ctx) {
}
@Override
public void exitColumnDefinition(CqlParser.ColumnDefinitionContext ctx) {
model.saveColumnDefinition(ctx.dataType().getText(),ctx.column().getText());
}
@Override
public String toString() {
return "CqlAstBuilder{" +
"keyspaces=" + keyspaces +
", lastKeyspace=" + lastKeyspace +
'}';
return model.toString();
}
public CqlModel getModel() {
return model;
}
}

View File

@ -18,35 +18,64 @@ package io.nosqlbench.converters.cql.cql.cqlast;
import java.util.ArrayList;
import java.util.List;
import java.util.stream.Collectors;
public class CqlTable {
String name = "";
List<CqlField> fields = new ArrayList();
String keyspace = "";
List<CqlColumnDef> coldefs = new ArrayList();
String refddl;
public CqlTable() {
}
public CqlTable(String tableName) {
this.name = name;
}
public void addcolumnDef(CqlField cqlField) {
this.fields.add(cqlField);
public void addcolumnDef(CqlColumnDef cqlField) {
this.coldefs.add(cqlField);
}
public void setName(String tableName) {
this.name = name;
this.name = tableName;
}
public void addcolumnDef(String type, String fieldName) {
fields.add(new CqlField(type, fieldName));
coldefs.add(new CqlColumnDef(type, fieldName));
}
@Override
public String toString() {
return "CqlTable{" +
"name='" + name + '\'' +
", fields=" + fields +
'}';
return "cql table: '" + this.name + "':\n"
+ this.coldefs.stream()
.map(Object::toString)
.map(s -> " " +s)
.collect(Collectors.joining("\n"));
}
public List<CqlColumnDef> getColumnDefinitions() {
return this.coldefs;
}
public String getTableName() {
return this.name;
}
public void setKeyspace(String keyspace) {
this.keyspace=keyspace;
}
public String getRefDdl() {
return this.refddl;
}
public void setRefDdl(String refddl) {
this.refddl=refddl;
}
public String getRefddl() {
return refddl;
}
public String getKeySpace() {
return this.keyspace;
}
}

View File

@ -0,0 +1,17 @@
package io.nosqlbench.converters.cql.cql.exporters;
import java.util.function.Function;
public enum CqlLiteralFormat {
text(v -> "\""+v+"\""),
UNKNOWN(v -> v);
private final Function<String, String> literalFormat;
CqlLiteralFormat(Function<String,String> modifier) {
this.literalFormat = modifier;
}
public String format(String value) {
return this.literalFormat.apply(value);
}
}

View File

@ -0,0 +1,185 @@
package io.nosqlbench.converters.cql.cql.exporters;
import com.google.gson.Gson;
import com.google.gson.GsonBuilder;
import io.nosqlbench.converters.cql.cql.cqlast.CqlColumnDef;
import io.nosqlbench.converters.cql.cql.cqlast.CqlKeyspace;
import io.nosqlbench.converters.cql.cql.cqlast.CqlModel;
import io.nosqlbench.converters.cql.cql.cqlast.CqlTable;
import io.nosqlbench.converters.cql.cql.parser.CqlModelParser;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.snakeyaml.engine.v2.api.Dump;
import org.snakeyaml.engine.v2.api.DumpSettings;
import org.snakeyaml.engine.v2.common.FlowStyle;
import org.snakeyaml.engine.v2.common.NonPrintableStyle;
import org.snakeyaml.engine.v2.common.ScalarStyle;
import org.snakeyaml.engine.v2.representer.BaseRepresenter;
import java.nio.file.Path;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.stream.Collectors;
public class CqlWorkloadExporter {
private final static Logger logger = LogManager.getLogger(CqlWorkloadExporter.class);
private final CqlModel model;
public CqlWorkloadExporter(CqlModel model) {
this.model = model;
}
public CqlWorkloadExporter(String ddl) {
this.model = CqlModelParser.parse(ddl);
}
public CqlWorkloadExporter(Path path) {
this.model = CqlModelParser.parse(path);
}
public Map<String, Object> getWorkload() {
Map<String, Object> workload = new LinkedHashMap<>();
for (CqlKeyspace ks : model.getKeyspaces().values()) {
workload.put("bindings", getDefaultCqlBindings());
Map<String, Object> blocks = new LinkedHashMap<>();
workload.put("blocks", blocks);
blocks.put("schema", genSchemaBlock(model));
blocks.put("rampup", genRampupBlock(model));
blocks.put("main", genMainBlock(model));
}
return workload;
}
private Map<String, Object> genMainBlock(CqlModel model) {
Map<String, String> mainOpTemplates = new LinkedHashMap<>();
mainOpTemplates.putAll(
model.getAllTables()
.stream()
.collect(Collectors.toMap(
t -> "insert-" + t.getTableName(),
this::genUpsertTemplate)
)
);
mainOpTemplates.putAll(
model.getAllTables()
.stream()
.collect(Collectors.toMap(
t -> "select-" + t.getTableName(),
this::genSelectTemplate)
));
return Map.of("ops", mainOpTemplates);
}
private Map<String, Object> genRampupBlock(CqlModel model) {
Map<String, String> rampupOpTemplates = model.getAllTables()
.stream()
.collect(Collectors.toMap(
t -> "insert-" + t.getTableName(),
this::genUpsertTemplate)
);
return Map.of("ops", rampupOpTemplates);
}
private String genSelectTemplate(CqlTable table) {
List<CqlColumnDef> cdefs = table.getColumnDefinitions();
return "select * from " + table.getKeySpace() + "." + table.getTableName() +
"\n WHERE " + genPredicateTemplate(table);
}
private String genPredicateTemplate(CqlTable table) {
return table.getColumnDefinitions()
.stream()
.map(this::genPredicatePart)
.collect(Collectors.joining("\n AND "))
+ ";";
}
private String genPredicatePart(CqlColumnDef def) {
String typeName = def.getType();
CqlLiteralFormat cqlLiteralFormat = null;
try {
cqlLiteralFormat = CqlLiteralFormat.valueOf(typeName);
} catch (IllegalArgumentException iae) {
cqlLiteralFormat = CqlLiteralFormat.UNKNOWN;
logger.warn("Unknown literal format for " + typeName);
}
return def.getName() + "=" + cqlLiteralFormat.format("{" + def.getName() + "}");
}
private String genUpsertTemplate(CqlTable table) {
List<CqlColumnDef> cdefs = table.getColumnDefinitions();
return "insert into " + table.getKeySpace() + "." + table.getTableName() + "\n ( "
+ cdefs.stream().map(cd -> cd.getName()).collect(Collectors.joining(" , ")) +
" )\n values\n (" + cdefs.stream().map(cd -> cd.getName()).collect(Collectors.joining("},{", "{", "}"))
+ ");";
}
public String getWorkloadAsYaml() {
DumpSettings dumpSettings = DumpSettings.builder()
.setDefaultFlowStyle(FlowStyle.BLOCK)
.setIndent(2)
.setDefaultScalarStyle(ScalarStyle.PLAIN)
.setMaxSimpleKeyLength(100)
.setSplitLines(true)
.setIndentWithIndicator(true)
.setMultiLineFlow(true)
.setNonPrintableStyle(NonPrintableStyle.ESCAPE)
.build();
BaseRepresenter r;
Dump dump = new Dump(dumpSettings);
Map<String, Object> workload = getWorkload();
return dump.dumpToString(workload);
}
public String getModelAsJson() {
Gson gson = new GsonBuilder().setPrettyPrinting().create();
return gson.toJson(model);
}
public String getWorkoadAsJson() {
Gson gson = new GsonBuilder().setPrettyPrinting().create();
Map<String, Object> workload = getWorkload();
return gson.toJson(workload);
}
private Object genTableCrudTemplates(CqlTable table) {
return Map.of();
}
private Map<String, Object> genSchemaBlock(CqlModel model) {
Map<String, Object> schemablock = new LinkedHashMap<>();
Map<String, Object> ops = new LinkedHashMap<>();
for (CqlKeyspace ks : model.getKeyspaces().values()) {
ops.put("create-keyspace-" + ks.getKeyspaceName(), ks.getRefddl());
}
for (String ksname : model.getTablesByKeyspace().keySet()) {
for (CqlTable cqltable : model.getTablesByKeyspace().get(ksname).values()) {
ops.put("create-table-" + ksname + "." + cqltable.getTableName(), cqltable.getRefDdl());
}
}
schemablock.put("ops", ops);
return schemablock;
}
private Map<String, Object> getDefaultCqlBindings() {
return Map.of(
"text", "NumberNameToString()",
"timestamp", "yaddayaddayadda"
);
}
}

View File

@ -16,7 +16,8 @@
package io.nosqlbench.converters.cql.cql.parser;
import io.nosqlbench.converters.cql.cql.cqlast.CqlAstBuilder;
import io.nosqlbench.converters.cql.cql.cqlast.CqlModel;
import io.nosqlbench.converters.cql.cql.cqlast.CqlModelBuilder;
import io.nosqlbench.converters.cql.generated.CqlLexer;
import io.nosqlbench.converters.cql.generated.CqlParser;
import org.antlr.v4.runtime.CharStreams;
@ -25,11 +26,24 @@ import org.antlr.v4.runtime.CommonTokenStream;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
public class CqlParserHarness {
private final static Logger logger = LogManager.getLogger(CqlParserHarness.class);
public void parse(String input) {
public class CqlModelParser {
private final static Logger logger = LogManager.getLogger(CqlModelParser.class);
public static CqlModel parse(Path path) {
try {
String ddl = Files.readString(path);
return parse(ddl);
} catch (IOException e) {
throw new RuntimeException(e);
}
}
public static CqlModel parse(String input) {
try {
CodePointCharStream cstream = CharStreams.fromString(input);
@ -39,36 +53,12 @@ public class CqlParserHarness {
CqlParser parser = new CqlParser(tokens);
CqlAstBuilder astListener = new CqlAstBuilder();
CqlModelBuilder astListener = new CqlModelBuilder();
parser.addParseListener(astListener);
CqlParser.RootContext keyspaceParser = parser.root();
String tree = keyspaceParser.toStringTree();
// System.out.println("parsetree:\n" + tree);
System.out.println(astListener.toString());
// VirtDataParser.VirtdataFlowContext virtdataFlowContext = parser.virtdataFlow();
// logger.trace("parse tree: " + virtdataFlowContext.toStringTree(parser));
// if (astListener.hasErrors()) {
// System.out.println(astListener.getErrorNodes());
// }
// VirtDataAST ast = astListener.getModel();
// List<VirtDataFlow> flows = ast.getFlows();
// if (flows.size() > 1) {
// throw new RuntimeException("Only one flow expected here.");
// }
//
// if (astListener.hasErrors()) {
// throw new RuntimeException("Error parsing input '" + input + "'");
// }
//
// return new ParseResult(flows.get(0));
return astListener.getModel();
} catch (Exception e) {
logger.warn("Error while parsing flow:" + e.getMessage());

View File

@ -0,0 +1,8 @@
package io.nosqlbench.converters.cql.cql.traverser;
import java.nio.file.Path;
public class CqlDDlDirectoryTraverser {
public void buildWorkloads(Path sourcePath, Path targetPath) {
}
}

View File

@ -16,29 +16,65 @@
package io.nosqlbench.converters.cql.cql.parser;
import io.nosqlbench.converters.cql.cql.exporters.CqlWorkloadExporter;
import org.junit.jupiter.api.Disabled;
import org.junit.jupiter.api.Test;
public class CqlParserHarnessTest {
@Test
public void testCqlParserHarness() {
CqlParserHarness harness = new CqlParserHarness();
harness.parse("""
private final static String ksddl = """
CREATE KEYSPACE cycling
WITH REPLICATION = {
'class' : 'SimpleStrategy',
'replication_factor' : 1
};
""";
private final static String tbddl = """
CREATE TABLE cycling.race_winners (
race_name text,
race_position int,
cyclist_name FROZEN<fullname>,
PRIMARY KEY (race_name, race_position));
""";
private final static String ddl = ksddl + tbddl;
@Test
public void testGenBasicWorkload() {
CqlWorkloadExporter exporter = new CqlWorkloadExporter(ddl);
var workloadData = exporter.getWorkloadAsYaml();
System.out.println("# generated workload:\n"+workloadData);
}
@Test
public void testCqlParserHarnessCombined() {
CqlModelParser.parse(ddl);
}
@Disabled
@Test
public void testCqlParserHarnessKeyspace() {
CqlModelParser harness = new CqlModelParser();
harness.parse("""
CREATE KEYSPACE cycling
WITH REPLICATION = {\s
'class' : 'SimpleStrategy',\s
'replication_factor' : 1\s
};
""");
}
@Test
@Disabled
public void testCqlParserHarnessTable() {
CqlModelParser harness = new CqlModelParser();
harness.parse("""
CREATE TABLE cycling.race_winners (
race_name text,\s
race_position int,\s
cyclist_name FROZEN<fullname>,\s
PRIMARY KEY (race_name, race_position));
""");
}
}
}

View File

@ -0,0 +1,6 @@
ALTER KEYSPACE cycling
WITH REPLICATION = {
'class' : 'NetworkTopologyStrategy',
'datacenter1' : 3 }
AND DURABLE_WRITES = false ;

View File

@ -0,0 +1,3 @@
ALTER MATERIALIZED VIEW cycling.cyclist_by_age
WITH comment = 'A most excellent and useful view'
AND bloom_filter_fp_chance = 0.02;

View File

@ -0,0 +1 @@
ALTER ROLE coach WITH PASSWORD='bestTeam';

View File

@ -0,0 +1,5 @@
ALTER TABLE cycling_comments
WITH compression = {
'sstable_compression' : 'DeflateCompressor',
'chunk_length_kb' : 64 };

View File

@ -0,0 +1,4 @@
ALTER TYPE cycling.fullname
RENAME middlename TO middle
AND lastname to last
AND firstname to first;

View File

@ -0,0 +1 @@
ALTER USER moss WITH PASSWORD 'bestReceiver';

View File

@ -0,0 +1 @@
APPLY BATCH;

View File

@ -0,0 +1,5 @@
CREATE AGGREGATE cycling.average(int)
SFUNC avgState
STYPE tuple<int,bigint>
FINALFUNC avgFinal
INITCOND (0,0);

View File

@ -0,0 +1,10 @@
CREATE OR REPLACE FUNCTION cycling.avgFinal ( state tuple<int,bigint> )
CALLED ON NULL INPUT
RETURNS double
LANGUAGE java AS
$$ double r = 0;
if (state.getInt(0) == 0) return null;
r = state.getLong(1);
r/= state.getInt(0);
return Double.valueOf(r); $$
;

View File

@ -0,0 +1,4 @@
CREATE INDEX user_state
ON myschema.users (state);
CREATE INDEX ON myschema.users (zip);

View File

@ -0,0 +1,6 @@
CREATE KEYSPACE cycling
WITH REPLICATION = {
'class' : 'SimpleStrategy',
'replication_factor' : 1
};

View File

@ -0,0 +1,7 @@
CREATE MATERIALIZED VIEW cycling.cyclist_by_age
AS SELECT age, name, country
FROM cycling.cyclist_mv
WHERE age IS NOT NULL AND cid IS NOT NULL
PRIMARY KEY (age, cid)
WITH caching = { 'keys' : 'ALL', 'rows_per_partition' : '100' }
AND comment = 'Based on table cyclist' ;

View File

@ -0,0 +1,3 @@
CREATE ROLE coach
WITH PASSWORD = 'All4One2day!'
AND LOGIN = true;

View File

@ -0,0 +1,5 @@
CREATE TABLE cycling.race_winners (
race_name text,
race_position int,
cyclist_name FROZEN<fullname>,
PRIMARY KEY (race_name, race_position));

View File

@ -0,0 +1 @@
DROP TRIGGER trigger_name ON table_name;

View File

@ -0,0 +1,6 @@
CREATE TYPE cycling.basic_info (
birthday timestamp,
nationality text,
weight text,
height text
);

View File

@ -0,0 +1 @@
CREATE USER newuser WITH PASSWORD 'password';

View File

@ -0,0 +1,8 @@
DELETE firstname, lastname FROM cycling.cyclist_name
WHERE id = e7ae5cf3-d358-4d99-b900-85902fda9bb0;
DELETE FROM cycling.cyclist_name
WHERE id =e7ae5cf3-d358-4d99-b900-85902fda9bb0
if firstname='Alex' and lastname='Smith';
DELETE id FROM cyclist_id
WHERE lastname = 'WELTEN' and firstname = 'Bram'
IF EXISTS;

View File

@ -0,0 +1 @@
DROP AGGREGATE IF EXISTS cycling.avgState;

View File

@ -0,0 +1 @@
DROP FUNCTION IF EXISTS cycling.fLog;

View File

@ -0,0 +1 @@
DROP INDEX cycling.ryear;

View File

@ -0,0 +1 @@
DROP KEYSPACE cycling;

View File

@ -0,0 +1 @@
DROP MATERIALIZED VIEW cycling.cyclist_by_age;

View File

@ -0,0 +1 @@
DROP ROLE IF EXISTS team_manager;

View File

@ -0,0 +1 @@
DROP TABLE cycling.cyclist_name;

View File

@ -0,0 +1 @@
DROP TRIGGER trigger_name ON ks.table_name;

View File

@ -0,0 +1 @@
DROP TYPE cycling.basic_info ;

View File

@ -0,0 +1 @@
DROP USER IF EXISTS boone;

View File

@ -0,0 +1,10 @@
GRANT SELECT ON ALL KEYSPACES TO coach;
GRANT MODIFY ON KEYSPACE field TO manager;
GRANT ALTER ON KEYSPACE cycling TO coach;
GRANT ALL PERMISSIONS ON cycling.name TO coach;
GRANT ALL ON KEYSPACE cycling TO cycling_admin;

View File

@ -0,0 +1,23 @@
INSERT INTO cycling.cyclist_name (id, lastname, firstname)
VALUES (6ab09bec-e68e-48d9-a5f8-97e6fb4c9b47, 'KRUIKSWIJK','Steven')
USING TTL 86400 AND TIMESTAMP 123456789;
INSERT INTO cycling.cyclist_categories (id,lastname,categories)
VALUES(
'6ab09bec-e68e-48d9-a5f8-97e6fb4c9b47',
'KRUIJSWIJK',
{'GC', 'Time-trial', 'Sprint'});
INSERT INTO cycling.cyclist_categories JSON
'{"category": "", "points":780, "id": "6ab09bec-e68e-48d9-a5f8-97e6fb4c9b47"}';
INSERT INTO cycling.cyclist_teams (id,lastname,teams)
VALUES(5b6962dd-3f90-4c93-8f61-eabfa4a803e2,'VOS',$$Women's Tour of New Zealand$$);
INSERT INTO cycling.route(race_id,race_name,lat_long) VALUES (500, 'Name', ('Champagne', (46.833,6.65)));
INSERT INTO "students"("id", "address", "name", "[age]", "age", "colu'mn1", "colu68mn1", "height") values
(740, 'hongkong','alice',null,32,'','',172);
INSERT INTO test(a,b,c,d) values(1,['listtext1','listtext2'],{'settext1','settext2'},{'mapkey1':'mapvale2','mapkey2':'mapvalue2'});

View File

@ -0,0 +1,5 @@
LIST ALL
OF coach;
LIST ALL;
LIST ALL
ON cyclist.name;

View File

@ -0,0 +1,3 @@
LIST ROLES;
LIST ROLES
OF manager;

View File

@ -0,0 +1,6 @@
REVOKE SELECT
ON cycling.name
FROM manager;
REVOKE ALTER
ON ALL ROLES
FROM coach;

View File

@ -0,0 +1,41 @@
SELECT event_id,
dateOf(created_at) AS creation_date,
blobAsText(content) AS content
FROM timeline;
SELECT COUNT(*)
FROM system.IndexInfo;
SELECT lastname
FROM cycling.cyclist_name
LIMIT 50000;
SELECT id, lastname, teams
FROM cycling.cyclist_career_teams
WHERE id=5b6962dd-3f90-4c93-8f61-eabfa4a803e2;
SELECT * FROM cycling.cyclist_category;
SELECT * FROM cycling.cyclist_category WHERE category = 'SPRINT';
SELECT category, points, lastname FROM cycling.cyclist_category;
SELECT * From cycling.cyclist_name LIMIT 3;
SELECT * FROM cycling.cyclist_cat_pts WHERE category = 'GC' ORDER BY points ASC;
SELECT race_name, point_id, lat_long AS CITY_LATITUDE_LONGITUDE FROM cycling.route;
SELECT * FROM cycling.upcoming_calendar WHERE year = 2015 AND month = 06;
select json name, checkin_id, time_stamp from checkin;
select name, checkin_id, toJson(time_stamp) from checkin;
SELECT * FROM cycling.calendar WHERE race_id IN (100, 101, 102) AND (race_start_date, race_end_date) IN (('2015-01-01','2015-02-02'), ('2016-01-01','2016-02-02'));
SELECT * FROM cycling.calendar WHERE race_id IN (100, 101, 102) AND (race_start, race_end) >= ('2015-01-01', '2015-02-02');
SELECT * FROM cycling.race_times WHERE race_name = '17th Santos Tour Down Under' and race_time >= '19:15:19' AND race_time <= '19:15:39';

View File

@ -0,0 +1,2 @@
TRUNCATE cycling.user_activity;
TRUNCATE TABLE cycling.user_activity;

View File

@ -0,0 +1,29 @@
UPDATE cycling.cyclist_name
SET comments ='Rides hard, gets along with others, a real winner'
WHERE id = fb372533-eb95-4bb4-8685-6ef61e994caa IF EXISTS;
UPDATE cycling.cyclists
SET firstname = 'Marianne',
lastname = 'VOS'
WHERE id = 88b8fd18-b1ed-4e96-bf79-4280797cba80;
UPDATE cycling.cyclists
SET firstname = 'Anna', lastname = 'VAN DER BREGGEN' WHERE id = e7cd5752-bc0d-4157-a80f-7523add8dbcd;
UPDATE cycling.upcoming_calendar
SET events = ['Tour de France'] + events WHERE year=2015 AND month=06;
UPDATE users
SET state = 'TX'
WHERE user_uuid
IN (88b8fd18-b1ed-4e96-bf79-4280797cba80,
06a8913c-c0d6-477c-937d-6c1b69a95d43,
bc108776-7cb5-477f-917d-869c12dfffa8);
UPDATE cyclist.cyclist_career_teams SET teams = teams + {'Team DSB - Ballast Nedam'} WHERE id = 88b8fd18-b1ed-4e96-bf79-4280797cba80;
UPDATE cyclist.cyclist_career_teams SET teams = teams - {'WOMBATS'} WHERE id = 88b8fd18-b1ed-4e96-bf79-4280797cba80;
UPDATE cyclist.cyclist_career_teams SET teams = {} WHERE id = 88b8fd18-b1ed-4e96-bf79-4280797cba80;

View File

@ -0,0 +1,3 @@
USE key_name;
USE PortfolioDemo;
USE "Excalibur";