incremental update

This commit is contained in:
Jonathan Shook 2024-05-10 16:39:06 -05:00
parent 8b332fbed8
commit d4462bb8f3
9 changed files with 233 additions and 8 deletions

View File

@ -0,0 +1,31 @@
<!--
~ Copyright (c) 2024 nosqlbench
~
~ Licensed under the Apache License, Version 2.0 (the "License");
~ you may not use this file except in compliance with the License.
~ You may obtain a copy of the License at
~
~ http://www.apache.org/licenses/LICENSE-2.0
~
~ Unless required by applicable law or agreed to in writing, software
~ distributed under the License is distributed on an "AS IS" BASIS,
~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
~ See the License for the specific language governing permissions and
~ limitations under the License.
-->
<component name="ProjectRunConfigurationManager">
<configuration default="false" name="astra DAPI rampup" type="JarApplication" folderName="Astra DAPI">
<extension name="software.aws.toolkits.jetbrains.core.execution.JavaAwsConnectionExtension">
<option name="credential" />
<option name="region" />
<option name="useCurrentConnection" value="false" />
</extension>
<option name="JAR_PATH" value="$PROJECT_DIR$/nb5/target/nb5.jar" />
<option name="PROGRAM_PARAMETERS" value="astra_kv_dapi default.rampup threads=1 astraTokenFile=target/token astraApiEndpointFile=target/endpoint -v" />
<option name="WORKING_DIRECTORY" value="$ProjectFileDir$/local/dataapi" />
<option name="ALTERNATIVE_JRE_PATH_ENABLED" value="true" />
<option name="ALTERNATIVE_JRE_PATH" value="21" />
<method v="2" />
</configuration>
</component>

View File

@ -40,7 +40,7 @@ public class DataApiOpMapper implements OpMapper<DataApiBaseOp> {
DataApiOpType.class,
String.class,
"type",
"target"
"collection"
);
logger.debug(() -> "Using '" + typeAndTarget.enumId + "' op type for op template '" + op.getName() + "'");
return switch (typeAndTarget.enumId) {

View File

@ -22,6 +22,7 @@ import io.nosqlbench.nb.api.config.standard.ConfigModel;
import io.nosqlbench.nb.api.config.standard.NBConfigModel;
import io.nosqlbench.nb.api.config.standard.NBConfiguration;
import io.nosqlbench.nb.api.config.standard.Param;
import io.nosqlbench.nb.api.errors.BasicError;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
@ -40,6 +41,7 @@ public class DataApiSpace {
private DataAPIClient dataAPIClient;
private Database database;
private String namespace;
public DataApiSpace(String name, NBConfiguration cfg) {
this.config = cfg;
this.name = name;
@ -67,7 +69,26 @@ public class DataApiSpace {
}
private void setApiEndpoint() {
this.astraApiEndpoint = config.get("astraApiEndpoint");
Optional<String> epConfig = config.getOptional("astraApiEndpoint");
Optional<String> epFileConfig = config.getOptional("astraApiEndpointFile");
if (epConfig.isPresent() && epFileConfig.isPresent()) {
throw new BasicError("You can only configure one of astraApiEndpoint or astraApiEndpointFile");
}
if (epConfig.isEmpty() && epFileConfig.isEmpty()) {
throw new BasicError("You must configure one of astraApiEndpoint or astraApiEndpointFile");
}
epFileConfig
.map(Path::of)
.map(p -> {
try {
return Files.readString(p);
} catch (IOException e) {
throw new RuntimeException(e);
}
})
.map(String::trim)
.ifPresent(ep -> this.astraApiEndpoint = ep);
epConfig.ifPresent(ep -> this.astraApiEndpoint = ep);
}
private void setNamespace() {
@ -78,7 +99,7 @@ public class DataApiSpace {
private void setToken() {
String tokenFileContents = null;
Optional<String> tokenFilePath = config.getOptional("astraTokenFile");
if(tokenFilePath.isPresent()) {
if (tokenFilePath.isPresent()) {
Path path = Paths.get(tokenFilePath.get());
try {
tokenFileContents = Files.readAllLines(path).getFirst();
@ -94,16 +115,27 @@ public class DataApiSpace {
public static NBConfigModel getConfigModel() {
return ConfigModel.of(DataApiSpace.class)
.add(
Param.optional("astraTokenFile", String.class, "file to load the Astra token from")
Param.optional("astraTokenFile", String.class)
.setDescription("file to load the Astra token from")
)
.add(
Param.optional("astraToken",String.class)
Param.optional("astraToken", String.class)
.setDescription("the Astra token used to connect to the database")
)
.add(
Param.defaultTo("astraApiEndpoint", String.class)
Param.optional("astraApiEndpoint", String.class)
.setDescription("the API endpoint for the Astra database")
)
.add(
Param.optional("astraApiEndpointFile", String.class)
.setDescription("file to load the API endpoint for the Astra database")
)
.add(
Param.defaultTo("namespace", "default_namespace")
.setDescription("The Astra namespace to use")
)
.asReadOnly();
}

View File

@ -28,6 +28,9 @@ public class DataApiDropCollectionOp extends DataApiBaseOp {
@Override
public Object apply(long value) {
Boolean exists = db.collectionExists(collectionName);
// TODO: we need to remove these from the ops when we can, because this hides additional ops which
// should be surfaced in the test definition. Condition operations should be provided with clear views
// at the workload template level
if (exists) {
db.dropCollection(collectionName);
}

View File

@ -0,0 +1,142 @@
min_version: "5.21.0"
description: |
A basic workload that uses the DataStax Data API Client in Java, emulating what
applications would do in the native stack.
TEMPLATE(collection,keyvalue)
scenarios:
default:
schema: run driver=dataapi tags==block:schema threads==1 cycles==UNDEF
rampup: run driver=dataapi tags==block:rampup cycles===TEMPLATE(rampup-cycles,10000000) threads=auto
# main: run driver=http tags==block:"main.*" cycles===TEMPLATE(main-cycles,10000000) threads=auto
# kv_dapi:
# kv_dapi_schema: run driver=http tags==block:schema threads==1 cycles==UNDEF
# kv_dapi_rampup: run driver=http tags==block:rampup cycles===TEMPLATE(rampup-cycles,10000000) threads=auto
# kv_dapi_main: run driver=http tags==block:"main.*" cycles===TEMPLATE(main-cycles,10000000) threads=auto
# basic_check:
# schema: run driver=http tags==block:schema threads==1 cycles==UNDEF
# rampup: run driver=http tags==block:rampup cycles===TEMPLATE(rampup-cycles,10) threads=auto
# main: run driver=http tags==block:"main.*" cycles===TEMPLATE(main-cycles,10) threads=auto
bindings:
# To enable an optional weighted set of hosts in place of a load balancer
# Examples
# single host: jsonapi_host=host1
# multiple hosts: jsonapi_host=host1,host2,host3
# multiple weighted hosts: jsonapi_host=host1:3,host2:7
weighted_hosts: WeightedStrings('<<jsonapi_host:<<stargate_host:localhost>>>>')
seq_key: Mod(TEMPLATE(keycount,50000000000L)); ToString() -> String
seq_value: Hash(); Mod(TEMPLATE(valuecount,50000000000L)); ToString() -> String
rw_key: TEMPLATE(keydist,Uniform(0,50000000000L)); ToString() -> String
rw_value: Hash(); TEMPLATE(valdist,Uniform(0,50000000000L)); ToString() -> String
vector_value: HashedFloatVectors(<<dimensions:1536>>); ToCqlVector()
request_id: ToHashedUUID(); ToString();
token: Discard(); Token('TEMPLATE(auth_token)','TEMPLATE(uri,http://localhost:8081/v1/auth)', 'TEMPLATE(uid,cassandra)', 'TEMPLATE(pswd,cassandra)');
params:
cl: TEMPLATE(cl,LOCAL_QUORUM)
blocks:
reset_schema:
ops:
drop_index:
raw: |-
DROP INDEX IF EXISTS TEMPLATE(keyspace, baselines).TEMPLATE(table,keyvalue)_value_idx;
drop-table:
raw: |-
DROP TABLE IF EXISTS TEMPLATE(keyspace, baselines).TEMPLATE(table,keyvalue);
schema:
ops:
delete_collection_op:
delete_collection: "TEMPLATE(collection)"
create_collection_op:
create_collection: "TEMPLATE(collection)"
rampup:
ops:
insert_one_op:
insert_one: "TEMPLATE(collection)"
document:
_id: "{seq_key}"
value: "{seq_value}"
rampup-uuid:
ops:
insert_one_op:
insert-one: "TEMPLATE(collection)"
document:
value: "{seq_value}"
main_read:
params:
ratio: 5
ops:
find_op:
find: "TEMPLATE(collection)"
filter:
_id: "{rw_key}"
schema_with_vector:
ops:
delete_collection_op:
delete_collection: "TEMPLATE(collection)"
create_collection_op:
create_collection: "TEMPLATE(collection)"
options:
vector:
size: 1536
rampup_with_vector:
ops:
insert_one_op:
document:
_id: "{seq_key}"
value: "{seq_value}"
$vector: "{vector_value}"
rampup_with_vector_uuid:
ops:
insert_one_op:
insert_one: "TEMPLATE(collection)"
document:
value: "{seq_value}"
$vector: "{vector_value}"
main_read_with_vector:
ops:
find_op:
find: "TEMPLATE(collection)"
filter:
_id: "{rw_key}"
main_ann_with_vector_limit_20:
params:
ratio: 5
ops:
find_op:
find: "TEMPLATE(collection)"
sort:
$vector: "{vector_value}"
options:
limit: 20
schema_with_text_sai:
ops:
delete_collection_op:
delete_collection: "TEMPLATE(collection)"
create_collection_op:
create_collection: "TEMPLATE(collection)"
rampup_with_text_sai:
ops:
insert_one_op:
insert_one: "TEMPLATE(collection)"
document:
_id: "{seq_key}"
value: "{seq_value}"
main_read_with_text_sai:
params:
ratio: 5
ops:
find_op:
find: "TEMPLATE(collection)"
filter:
value: "{rw_value}"

View File

@ -141,6 +141,20 @@
</dependencies>
</profile>
<profile>
<id>adapter-dataapi-include</id>
<activation>
<activeByDefault>true</activeByDefault>
</activation>
<dependencies>
<dependency>
<groupId>io.nosqlbench</groupId>
<artifactId>adapter-dataapi</artifactId>
<version>${revision}</version>
</dependency>
</dependencies>
</profile>
<profile>
<id>adapter-dynamodb-include</id>
<activation>

View File

@ -175,7 +175,7 @@
</profile>
<profile>
<id>adapter-dataapi</id>
<id>adapter-dataapi-module</id>
<activation>
<activeByDefault>true</activeByDefault>
</activation>

View File

@ -178,7 +178,7 @@ public class NBConfiguration {
}
}
} else {
throw new NBConfigError("Parameter was not found for " + Arrays.toString(names) + ".");
throw new NBConfigError("Parameter definition was not found for " + Arrays.toString(names) + ".");
}
}
if (o == null) {

View File

@ -136,6 +136,9 @@ public class Param<T> {
* @return
*/
public static <V> Param<V> defaultTo(String name, V defaultValue) {
if (defaultValue instanceof Class clazz) {
throw new RuntimeException("This is not a supported type.");
}
return new Param<V>(List.of(name), (Class<V>) defaultValue.getClass(), null, true, defaultValue, null);
}