Merge branch 'main' into better-error-on-scenario-step-selection

This commit is contained in:
sahankj2000 2024-05-13 16:50:52 +05:30 committed by GitHub
commit 4ce8f79abd
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
100 changed files with 4929 additions and 73 deletions

1
.gitignore vendored
View File

@ -18,6 +18,7 @@ bin/**
**/*.log
**/logs
**/*.*log
**/gen/**
**/generated
**/.DS_Store
**/generated/**

View File

@ -0,0 +1,15 @@
<component name="ProjectRunConfigurationManager">
<configuration default="false" name="SCENARIO astra DAPI dapi_novector" type="JarApplication" folderName="Astra DAPI">
<extension name="software.aws.toolkits.jetbrains.core.execution.JavaAwsConnectionExtension">
<option name="credential" />
<option name="region" />
<option name="useCurrentConnection" value="false" />
</extension>
<option name="JAR_PATH" value="$PROJECT_DIR$/nb5/target/nb5.jar" />
<option name="PROGRAM_PARAMETERS" value="astra_kv_dapi dapi_novector collection=baselines astraTokenFile=target/token astraApiEndpointFile=target/endpoint -v" />
<option name="WORKING_DIRECTORY" value="$ProjectFileDir$/local/dataapi" />
<option name="ALTERNATIVE_JRE_PATH_ENABLED" value="true" />
<option name="ALTERNATIVE_JRE_PATH" value="21" />
<method v="2" />
</configuration>
</component>

View File

@ -0,0 +1,15 @@
<component name="ProjectRunConfigurationManager">
<configuration default="false" name="astra DAPI rampup threads=1" type="JarApplication" folderName="Astra DAPI">
<extension name="software.aws.toolkits.jetbrains.core.execution.JavaAwsConnectionExtension">
<option name="credential" />
<option name="region" />
<option name="useCurrentConnection" value="false" />
</extension>
<option name="JAR_PATH" value="$PROJECT_DIR$/nb5/target/nb5.jar" />
<option name="PROGRAM_PARAMETERS" value="astra_kv_dapi default.rampup threads=1 astraTokenFile=target/token astraApiEndpointFile=target/endpoint -v" />
<option name="WORKING_DIRECTORY" value="$ProjectFileDir$/local/dataapi" />
<option name="ALTERNATIVE_JRE_PATH_ENABLED" value="true" />
<option name="ALTERNATIVE_JRE_PATH" value="21" />
<method v="2" />
</configuration>
</component>

View File

@ -0,0 +1,15 @@
<component name="ProjectRunConfigurationManager">
<configuration default="false" name="astra DAPI read" type="JarApplication" folderName="Astra DAPI">
<extension name="software.aws.toolkits.jetbrains.core.execution.JavaAwsConnectionExtension">
<option name="credential" />
<option name="region" />
<option name="useCurrentConnection" value="false" />
</extension>
<option name="JAR_PATH" value="$PROJECT_DIR$/nb5/target/nb5.jar" />
<option name="PROGRAM_PARAMETERS" value="astra_kv_dapi default.read threads=1 astraTokenFile=target/token astraApiEndpointFile=target/endpoint -v" />
<option name="WORKING_DIRECTORY" value="$ProjectFileDir$/local/dataapi" />
<option name="ALTERNATIVE_JRE_PATH_ENABLED" value="true" />
<option name="ALTERNATIVE_JRE_PATH" value="21" />
<method v="2" />
</configuration>
</component>

View File

@ -0,0 +1,14 @@
<component name="ProjectRunConfigurationManager">
<configuration default="false" name="nb --list-drivers" type="JarApplication" folderName="Common">
<extension name="software.aws.toolkits.jetbrains.core.execution.JavaAwsConnectionExtension">
<option name="credential" />
<option name="region" />
<option name="useCurrentConnection" value="false" />
</extension>
<option name="JAR_PATH" value="$PROJECT_DIR$/nb5/target/nb5-5.21.1-SNAPSHOT-jar-with-dependencies.jar" />
<option name="PROGRAM_PARAMETERS" value="--list-drivers --show-stacktraces" />
<option name="WORKING_DIRECTORY" value="$PROJECT_DIR$/local/" />
<option name="ALTERNATIVE_JRE_PATH" />
<method v="2" />
</configuration>
</component>

View File

@ -0,0 +1,14 @@
<component name="ProjectRunConfigurationManager">
<configuration default="false" name="nb --list-scenarios" type="JarApplication" folderName="Common">
<extension name="software.aws.toolkits.jetbrains.core.execution.JavaAwsConnectionExtension">
<option name="credential" />
<option name="region" />
<option name="useCurrentConnection" value="false" />
</extension>
<option name="JAR_PATH" value="$PROJECT_DIR$/nb5/target/nb5-5.21.1-SNAPSHOT-jar-with-dependencies.jar" />
<option name="PROGRAM_PARAMETERS" value="--list-scenarios --show-stacktraces" />
<option name="WORKING_DIRECTORY" value="$PROJECT_DIR$/local/" />
<option name="ALTERNATIVE_JRE_PATH" />
<method v="2" />
</configuration>
</component>

View File

@ -0,0 +1,14 @@
<component name="ProjectRunConfigurationManager">
<configuration default="false" name="qdrant_delete_collection_glove_25" type="JarApplication" folderName="Qdrant">
<extension name="software.aws.toolkits.jetbrains.core.execution.JavaAwsConnectionExtension">
<option name="credential" />
<option name="region" />
<option name="useCurrentConnection" value="false" />
</extension>
<option name="JAR_PATH" value="$PROJECT_DIR$/nb5/target/nb5.jar" />
<option name="PROGRAM_PARAMETERS" value="qdrant_vectors_live qdrant_vectors.delete_collection dimensions=25 testsize=10000 trainsize=1183514 dataset=glove-25-angular filetype=hdf5 collection=glove_25 similarity_function=1 qdranthost=ded78a51-8370-47d8-adb0-6147f0fcbba2.us-east4-0.gcp.cloud.qdrant.io token_file=./apikey grpc_port=6334 --progress console:1s -v --add-labels &quot;dimensions:25,dataset=glove-25&quot; --add-labels=&quot;target:qdrant,instance:vectors,vendor:qdrant_v191&quot; --report-prompush-to https://vector-perf.feat.apps.paas.datastax.com:8427/api/v1/import/prometheus/metrics/job/nosqlbench/instance/vectors --annotators &quot;[{'type':'log','level':'info'},{'type':'grafana','baseurl':'https://vector-perf.feat.apps.paas.datastax.com/'}]&quot; --report-interval 10 --show-stacktraces --logs-max 5" />
<option name="WORKING_DIRECTORY" value="$ProjectFileDir$/local/qdrant" />
<option name="ALTERNATIVE_JRE_PATH" value="jdk21" />
<method v="2" />
</configuration>
</component>

View File

@ -0,0 +1,14 @@
<component name="ProjectRunConfigurationManager">
<configuration default="false" name="qdrant_schema_collection_glove_25" type="JarApplication" folderName="Qdrant">
<extension name="software.aws.toolkits.jetbrains.core.execution.JavaAwsConnectionExtension">
<option name="credential" />
<option name="region" />
<option name="useCurrentConnection" value="false" />
</extension>
<option name="JAR_PATH" value="$PROJECT_DIR$/nb5/target/nb5.jar" />
<option name="PROGRAM_PARAMETERS" value="qdrant_vectors_live qdrant_vectors.schema_collection dimensions=25 testsize=10000 trainsize=1183514 dataset=glove-25-angular filetype=hdf5 collection=glove_25 similarity_function=1 qdranthost=ded78a51-8370-47d8-adb0-6147f0fcbba2.us-east4-0.gcp.cloud.qdrant.io token_file=./apikey grpc_port=6334 --progress console:1s -v --add-labels &quot;dimensions:25,dataset=glove-25&quot; --add-labels=&quot;target:qdrant,instance:vectors,vendor:qdrant_v191&quot; --report-prompush-to https://vector-perf.feat.apps.paas.datastax.com:8427/api/v1/import/prometheus/metrics/job/nosqlbench/instance/vectors --annotators &quot;[{'type':'log','level':'info'},{'type':'grafana','baseurl':'https://vector-perf.feat.apps.paas.datastax.com/'}]&quot; --report-interval 10 --show-stacktraces --logs-max 5" />
<option name="WORKING_DIRECTORY" value="$ProjectFileDir$/local/qdrant" />
<option name="ALTERNATIVE_JRE_PATH" value="jdk21" />
<method v="2" />
</configuration>
</component>

View File

@ -0,0 +1,14 @@
<component name="ProjectRunConfigurationManager">
<configuration default="false" name="qdrant_search_points_glove_25" type="JarApplication" folderName="Qdrant">
<extension name="software.aws.toolkits.jetbrains.core.execution.JavaAwsConnectionExtension">
<option name="credential" />
<option name="region" />
<option name="useCurrentConnection" value="false" />
</extension>
<option name="JAR_PATH" value="$PROJECT_DIR$/nb5/target/nb5.jar" />
<option name="PROGRAM_PARAMETERS" value="qdrant_vectors_live qdrant_vectors.search_points dimensions=25 testsize=10000 trainsize=1183514 dataset=glove-25-angular filetype=hdf5 collection=glove_25 similarity_function=1 qdranthost=ded78a51-8370-47d8-adb0-6147f0fcbba2.us-east4-0.gcp.cloud.qdrant.io token_file=./apikey grpc_port=6334 --progress console:1s -v --add-labels &quot;dimensions:25,dataset=glove-25&quot; --add-labels=&quot;target:qdrant,instance:vectors,vendor:qdrant_v191&quot; --report-prompush-to https://vector-perf.feat.apps.paas.datastax.com:8427/api/v1/import/prometheus/metrics/job/nosqlbench/instance/vectors --annotators &quot;[{'type':'log','level':'info'},{'type':'grafana','baseurl':'https://vector-perf.feat.apps.paas.datastax.com/'}]&quot; --report-interval 10 --report-csv-to metrics --show-stacktraces --logs-max 5" />
<option name="WORKING_DIRECTORY" value="$ProjectFileDir$/local/qdrant" />
<option name="ALTERNATIVE_JRE_PATH" value="jdk21" />
<method v="2" />
</configuration>
</component>

View File

@ -0,0 +1,14 @@
<component name="ProjectRunConfigurationManager">
<configuration default="false" name="qdrant_upsert_points_glove_25" type="JarApplication" folderName="Qdrant">
<extension name="software.aws.toolkits.jetbrains.core.execution.JavaAwsConnectionExtension">
<option name="credential" />
<option name="region" />
<option name="useCurrentConnection" value="false" />
</extension>
<option name="JAR_PATH" value="$PROJECT_DIR$/nb5/target/nb5.jar" />
<option name="PROGRAM_PARAMETERS" value="qdrant_vectors_live qdrant_vectors.rampup dimensions=25 testsize=10000 trainsize=1183514 dataset=glove-25-angular filetype=hdf5 collection=glove_25 similarity_function=1 qdranthost=ded78a51-8370-47d8-adb0-6147f0fcbba2.us-east4-0.gcp.cloud.qdrant.io token_file=./apikey grpc_port=6334 --progress console:1s -v --add-labels &quot;dimensions:25,dataset=glove-25&quot; --add-labels=&quot;target:qdrant,instance:vectors,vendor:qdrant_v191&quot; --report-prompush-to https://vector-perf.feat.apps.paas.datastax.com:8427/api/v1/import/prometheus/metrics/job/nosqlbench/instance/vectors --annotators &quot;[{'type':'log','level':'info'},{'type':'grafana','baseurl':'https://vector-perf.feat.apps.paas.datastax.com/'}]&quot; --report-interval 10 --show-stacktraces --logs-max 5" />
<option name="WORKING_DIRECTORY" value="$ProjectFileDir$/local/qdrant" />
<option name="ALTERNATIVE_JRE_PATH" value="jdk21" />
<method v="2" />
</configuration>
</component>

View File

@ -41,6 +41,8 @@
<PROG>nb5</PROG>
<maven.plugin.validation>VERBOSE</maven.plugin.validation>
<jacoco.version>0.8.12</jacoco.version>
</properties>
<name>${project.artifactId}</name>
@ -549,7 +551,7 @@
<plugin>
<groupId>org.jacoco</groupId>
<artifactId>jacoco-maven-plugin</artifactId>
<version>0.8.10</version>
<version>${jacoco.version}</version>
<executions>
<execution>
<id>prepare-agent</id>
@ -673,6 +675,7 @@
<include>**/*.java</include>
</includes>
<excludes>
<exclude>**/gen/**</exclude>
<exclude>**/generated/**</exclude>
<exclude>**/jmh_generated/**</exclude>
<exclude>**/generated-sources/**</exclude>
@ -782,7 +785,7 @@
<plugin>
<groupId>org.jacoco</groupId>
<artifactId>jacoco-maven-plugin</artifactId>
<version>0.8.10</version>
<version>${jacoco.version}</version>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>

View File

View File

@ -0,0 +1,57 @@
<!--
~ Copyright (c) 2022-2023 nosqlbench
~
~ Licensed under the Apache License, Version 2.0 (the "License");
~ you may not use this file except in compliance with the License.
~ You may obtain a copy of the License at
~
~ http://www.apache.org/licenses/LICENSE-2.0
~
~ Unless required by applicable law or agreed to in writing, software
~ distributed under the License is distributed on an "AS IS" BASIS,
~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
~ See the License for the specific language governing permissions and
~ limitations under the License.
-->
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<artifactId>adapter-dataapi</artifactId>
<packaging>jar</packaging>
<parent>
<artifactId>mvn-defaults</artifactId>
<groupId>io.nosqlbench</groupId>
<version>${revision}</version>
<relativePath>../../mvn-defaults</relativePath>
</parent>
<name>${project.artifactId}</name>
<description>
An nosqlbench adapter driver module for the DataStax Data API
</description>
<dependencies>
<dependency>
<groupId>io.nosqlbench</groupId>
<artifactId>nb-annotations</artifactId>
<version>${revision}</version>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>io.nosqlbench</groupId>
<artifactId>adapters-api</artifactId>
<version>${revision}</version>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>com.datastax.astra</groupId>
<artifactId>astra-db-java</artifactId>
<version>1.0.0</version>
</dependency>
</dependencies>
</project>

View File

@ -0,0 +1,51 @@
/*
* Copyright (c) 2024 nosqlbench
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.nosqlbench.adapter.dataapi;
import io.nosqlbench.adapter.dataapi.ops.DataApiBaseOp;
import io.nosqlbench.adapters.api.activityimpl.OpMapper;
import io.nosqlbench.adapters.api.activityimpl.uniform.BaseDriverAdapter;
import io.nosqlbench.adapters.api.activityimpl.uniform.DriverAdapter;
import io.nosqlbench.nb.annotations.Service;
import io.nosqlbench.nb.api.components.core.NBComponent;
import io.nosqlbench.nb.api.config.standard.NBConfigModel;
import io.nosqlbench.nb.api.config.standard.NBConfiguration;
import io.nosqlbench.nb.api.labels.NBLabels;
import java.util.function.Function;
@Service(value = DriverAdapter.class, selector = "dataapi")
public class DataApiDriverAdapter extends BaseDriverAdapter<DataApiBaseOp, DataApiSpace> {
public DataApiDriverAdapter(NBComponent parent, NBLabels childLabels) {
super(parent, childLabels);
}
@Override
public OpMapper getOpMapper() {
return new DataApiOpMapper(this);
}
@Override
public Function<String, ? extends DataApiSpace> getSpaceInitializer(NBConfiguration cfg) {
return (s) -> new DataApiSpace(s, cfg);
}
@Override
public NBConfigModel getConfigModel() {
return super.getConfigModel().add(DataApiSpace.getConfigModel());
}
}

View File

@ -0,0 +1,30 @@
/*
* Copyright (c) 2024 nosqlbench
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.nosqlbench.adapter.dataapi;
import io.nosqlbench.adapter.diag.DriverAdapterLoader;
import io.nosqlbench.nb.annotations.Service;
import io.nosqlbench.nb.api.components.core.NBComponent;
import io.nosqlbench.nb.api.labels.NBLabels;
@Service(value = DriverAdapterLoader.class, selector = "dataapi")
public class DataApiDriverAdapterLoader implements DriverAdapterLoader {
@Override
public DataApiDriverAdapter load(NBComponent parent, NBLabels childLabels) {
return new DataApiDriverAdapter(parent, childLabels);
}
}

View File

@ -0,0 +1,82 @@
/*
* Copyright (c) 2024 nosqlbench
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.nosqlbench.adapter.dataapi;
import com.datastax.astra.client.Database;
import com.datastax.astra.client.model.Filter;
import com.datastax.astra.client.model.FindOptions;
import com.datastax.astra.client.model.Projection;
import com.datastax.astra.client.model.Sort;
import io.nosqlbench.adapter.dataapi.opdispensers.DataApiOpDispenser;
import io.nosqlbench.adapter.dataapi.ops.DataApiBaseOp;
import io.nosqlbench.adapter.dataapi.ops.DataApiFindVectorFilterOp;
import io.nosqlbench.adapters.api.templating.ParsedOp;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import java.util.function.LongFunction;
public class DataApiFindVectorFilterOpDispenser extends DataApiOpDispenser {
private static final Logger logger = LogManager.getLogger(DataApiFindVectorFilterOpDispenser.class);
private final LongFunction<DataApiFindVectorFilterOp> opFunction;
public DataApiFindVectorFilterOpDispenser(DataApiDriverAdapter adapter, ParsedOp op, LongFunction<String> targetFunction) {
super(adapter, op, targetFunction);
this.opFunction = createOpFunction(op);
}
private LongFunction<DataApiFindVectorFilterOp> createOpFunction(ParsedOp op) {
return (l) -> {
Database db = spaceFunction.apply(l).getDatabase();
float[] vector = getVectorValues(op, l);
Filter filter = getFilterFromOp(op, l);
int limit = getLimit(op, l);
return new DataApiFindVectorFilterOp(
db,
db.getCollection(targetFunction.apply(l)),
vector,
limit,
filter
);
};
}
private int getLimit(ParsedOp op, long l) {
return op.getConfigOr("limit", 100, l);
}
private FindOptions getFindOptions(ParsedOp op, long l) {
FindOptions options = new FindOptions();
Sort sort = getSortFromOp(op, l);
float[] vector = getVectorValues(op, l);
if (sort != null) {
options = vector != null ? options.sort(vector, sort) : options.sort(sort);
} else if (vector != null) {
options = options.sort(vector);
}
Projection[] projection = getProjectionFromOp(op, l);
if (projection != null) {
options = options.projection(projection);
}
options.setIncludeSimilarity(true);
return options;
}
@Override
public DataApiBaseOp getOp(long value) {
return opFunction.apply(value);
}
}

View File

@ -0,0 +1,64 @@
/*
* Copyright (c) 2024 nosqlbench
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.nosqlbench.adapter.dataapi;
import io.nosqlbench.adapter.dataapi.opdispensers.*;
import io.nosqlbench.adapter.dataapi.ops.DataApiBaseOp;
import io.nosqlbench.adapter.dataapi.ops.DataApiOpType;
import io.nosqlbench.adapters.api.activityimpl.OpDispenser;
import io.nosqlbench.adapters.api.activityimpl.OpMapper;
import io.nosqlbench.adapters.api.templating.ParsedOp;
import io.nosqlbench.engine.api.templating.TypeAndTarget;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
public class DataApiOpMapper implements OpMapper<DataApiBaseOp> {
private static final Logger logger = LogManager.getLogger(DataApiOpMapper.class);
private final DataApiDriverAdapter adapter;
public DataApiOpMapper(DataApiDriverAdapter dataApiDriverAdapter) {
this.adapter = dataApiDriverAdapter;
}
@Override
public OpDispenser<? extends DataApiBaseOp> apply(ParsedOp op) {
TypeAndTarget<DataApiOpType, String> typeAndTarget = op.getTypeAndTarget(
DataApiOpType.class,
String.class,
"type",
"collection"
);
logger.debug(() -> "Using '" + typeAndTarget.enumId + "' op type for op template '" + op.getName() + "'");
return switch (typeAndTarget.enumId) {
case create_collection -> new DataApiCreateCollectionOpDispenser(adapter, op, typeAndTarget.targetFunction);
case insert_many -> new DataApiInsertManyOpDispenser(adapter, op, typeAndTarget.targetFunction);
case insert_one -> new DataApiInsertOneOpDispenser(adapter, op, typeAndTarget.targetFunction);
case insert_one_vector -> new DataApiInsertOneVectorOpDispenser(adapter, op, typeAndTarget.targetFunction);
case find -> new DataApiFindOpDispenser(adapter, op, typeAndTarget.targetFunction);
case find_one -> new DataApiFindOneOpDispenser(adapter, op, typeAndTarget.targetFunction);
case find_one_and_delete -> new DataApiFindOneAndDeleteOpDispenser(adapter, op, typeAndTarget.targetFunction);
case find_one_and_update -> new DataApiFindOneAndUpdateOpDispenser(adapter, op, typeAndTarget.targetFunction);
case find_vector -> new DataApiFindVectorOpDispenser(adapter, op, typeAndTarget.targetFunction);
case find_vector_filter -> new DataApiFindVectorFilterOpDispenser(adapter, op, typeAndTarget.targetFunction);
case update_one -> new DataApiUpdateOneOpDispenser(adapter, op, typeAndTarget.targetFunction);
case update_many -> new DataApiUpdateManyOpDispenser(adapter, op, typeAndTarget.targetFunction);
case delete_one -> new DataApiDeleteOneOpDispenser(adapter, op, typeAndTarget.targetFunction);
case delete_many -> new DataApiDeleteManyOpDispenser(adapter, op, typeAndTarget.targetFunction);
case delete_collection -> new DataApiDropCollectionOpDispenser(adapter, op, typeAndTarget.targetFunction);
};
}
}

View File

@ -0,0 +1,142 @@
/*
* Copyright (c) 2024 nosqlbench
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.nosqlbench.adapter.dataapi;
import com.datastax.astra.client.DataAPIClient;
import com.datastax.astra.client.Database;
import io.nosqlbench.nb.api.config.standard.ConfigModel;
import io.nosqlbench.nb.api.config.standard.NBConfigModel;
import io.nosqlbench.nb.api.config.standard.NBConfiguration;
import io.nosqlbench.nb.api.config.standard.Param;
import io.nosqlbench.nb.api.errors.BasicError;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.Optional;
public class DataApiSpace {
private final static Logger logger = LogManager.getLogger(DataApiSpace.class);
private final NBConfiguration config;
private final String name;
private String astraToken;
private String astraApiEndpoint;
private DataAPIClient dataAPIClient;
private Database database;
private String namespace;
public DataApiSpace(String name, NBConfiguration cfg) {
this.config = cfg;
this.name = name;
setToken();
setApiEndpoint();
setNamespace();
createClient();
}
public DataAPIClient getDataAPIClient() {
return dataAPIClient;
}
public Database getDatabase() {
return database;
}
private void createClient() {
this.dataAPIClient = new DataAPIClient(astraToken);
if (namespace != null) {
this.database = dataAPIClient.getDatabase(astraApiEndpoint, namespace);
} else {
this.database = dataAPIClient.getDatabase(astraApiEndpoint);
}
}
private void setApiEndpoint() {
Optional<String> epConfig = config.getOptional("astraApiEndpoint");
Optional<String> epFileConfig = config.getOptional("astraApiEndpointFile");
if (epConfig.isPresent() && epFileConfig.isPresent()) {
throw new BasicError("You can only configure one of astraApiEndpoint or astraApiEndpointFile");
}
if (epConfig.isEmpty() && epFileConfig.isEmpty()) {
throw new BasicError("You must configure one of astraApiEndpoint or astraApiEndpointFile");
}
epFileConfig
.map(Path::of)
.map(p -> {
try {
return Files.readString(p);
} catch (IOException e) {
throw new RuntimeException(e);
}
})
.map(String::trim)
.ifPresent(ep -> this.astraApiEndpoint = ep);
epConfig.ifPresent(ep -> this.astraApiEndpoint = ep);
}
private void setNamespace() {
Optional<String> maybeNamespace = config.getOptional("namespace");
maybeNamespace.ifPresent(s -> this.namespace = s);
}
private void setToken() {
String tokenFileContents = null;
Optional<String> tokenFilePath = config.getOptional("astraTokenFile");
if (tokenFilePath.isPresent()) {
Path path = Paths.get(tokenFilePath.get());
try {
tokenFileContents = Files.readAllLines(path).getFirst();
} catch (IOException e) {
String error = "Error while reading token from file:" + path;
logger.error(error, e);
throw new RuntimeException(e);
}
}
this.astraToken = (tokenFileContents != null) ? tokenFileContents : config.get("astraToken");
}
public static NBConfigModel getConfigModel() {
return ConfigModel.of(DataApiSpace.class)
.add(
Param.optional("astraTokenFile", String.class)
.setDescription("file to load the Astra token from")
)
.add(
Param.optional("astraToken", String.class)
.setDescription("the Astra token used to connect to the database")
)
.add(
Param.optional("astraApiEndpoint", String.class)
.setDescription("the API endpoint for the Astra database")
)
.add(
Param.optional("astraApiEndpointFile", String.class)
.setDescription("file to load the API endpoint for the Astra database")
)
.add(
Param.defaultTo("namespace", "default_namespace")
.setDescription("The Astra namespace to use")
)
.asReadOnly();
}
}

View File

@ -0,0 +1,73 @@
/*
* Copyright (c) 2024 nosqlbench
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.nosqlbench.adapter.dataapi.opdispensers;
import com.datastax.astra.client.model.CollectionOptions;
import com.datastax.astra.client.model.SimilarityMetric;
import io.nosqlbench.adapter.dataapi.DataApiDriverAdapter;
import io.nosqlbench.adapter.dataapi.ops.DataApiBaseOp;
import io.nosqlbench.adapter.dataapi.ops.DataApiCreateCollectionOp;
import io.nosqlbench.adapters.api.templating.ParsedOp;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import java.util.Optional;
import java.util.function.LongFunction;
public class DataApiCreateCollectionOpDispenser extends DataApiOpDispenser {
private static final Logger logger = LogManager.getLogger(DataApiCreateCollectionOpDispenser.class);
private final LongFunction<DataApiCreateCollectionOp> opFunction;
public DataApiCreateCollectionOpDispenser(DataApiDriverAdapter adapter, ParsedOp op, LongFunction<String> targetFunction) {
super(adapter, op, targetFunction);
this.opFunction = createOpFunction(op);
}
private LongFunction<DataApiCreateCollectionOp> createOpFunction(ParsedOp op) {
return (l) -> {
CollectionOptions.CollectionOptionsBuilder optionsBldr = CollectionOptions.builder();
Optional<LongFunction<Integer>> dimFunc = op.getAsOptionalFunction("dimensions", Integer.class);
if (dimFunc.isPresent()) {
LongFunction<Integer> af = dimFunc.get();
optionsBldr.vectorDimension(af.apply(l));
}
// COSINE("cosine"),
// EUCLIDEAN("euclidean"),
// DOT_PRODUCT("dot_product");
Optional<LongFunction<String>> simFunc = op.getAsOptionalFunction("similarity", String.class);
if (simFunc.isPresent()) {
LongFunction<String> sf = simFunc.get();
optionsBldr.vectorSimilarity(SimilarityMetric.fromValue(sf.apply(l)));
}
DataApiCreateCollectionOp dataApiCreateCollectionOp =
new DataApiCreateCollectionOp(
spaceFunction.apply(l).getDatabase(),
targetFunction.apply(l),
optionsBldr.build());
return dataApiCreateCollectionOp;
};
}
@Override
public DataApiBaseOp getOp(long value) {
return opFunction.apply(value);
}
}

View File

@ -0,0 +1,56 @@
/*
* Copyright (c) 2024 nosqlbench
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.nosqlbench.adapter.dataapi.opdispensers;
import com.datastax.astra.client.Database;
import com.datastax.astra.client.model.Filter;
import io.nosqlbench.adapter.dataapi.DataApiDriverAdapter;
import io.nosqlbench.adapter.dataapi.ops.DataApiBaseOp;
import io.nosqlbench.adapter.dataapi.ops.DataApiDeleteManyOp;
import io.nosqlbench.adapters.api.templating.ParsedOp;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import java.util.function.LongFunction;
public class DataApiDeleteManyOpDispenser extends DataApiOpDispenser {
private static final Logger logger = LogManager.getLogger(DataApiDeleteManyOpDispenser.class);
private final LongFunction<DataApiDeleteManyOp> opFunction;
public DataApiDeleteManyOpDispenser(DataApiDriverAdapter adapter, ParsedOp op, LongFunction<String> targetFunction) {
super(adapter, op, targetFunction);
this.opFunction = createOpFunction(op);
}
private LongFunction<DataApiDeleteManyOp> createOpFunction(ParsedOp op) {
return (l) -> {
Database db = spaceFunction.apply(l).getDatabase();
Filter filter = getFilterFromOp(op, l);
return new DataApiDeleteManyOp(
db,
db.getCollection(targetFunction.apply(l)),
filter
);
};
}
@Override
public DataApiBaseOp getOp(long value) {
return opFunction.apply(value);
}
}

View File

@ -0,0 +1,77 @@
/*
* Copyright (c) 2024 nosqlbench
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.nosqlbench.adapter.dataapi.opdispensers;
import com.datastax.astra.client.Database;
import com.datastax.astra.client.model.DeleteOneOptions;
import com.datastax.astra.client.model.Filter;
import com.datastax.astra.client.model.Sort;
import io.nosqlbench.adapter.dataapi.DataApiDriverAdapter;
import io.nosqlbench.adapter.dataapi.ops.DataApiBaseOp;
import io.nosqlbench.adapter.dataapi.ops.DataApiDeleteOneOp;
import io.nosqlbench.adapters.api.templating.ParsedOp;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import java.util.function.LongFunction;
public class DataApiDeleteOneOpDispenser extends DataApiOpDispenser {
private static final Logger logger = LogManager.getLogger(DataApiDeleteOneOpDispenser.class);
private final LongFunction<DataApiDeleteOneOp> opFunction;
public DataApiDeleteOneOpDispenser(DataApiDriverAdapter adapter, ParsedOp op, LongFunction<String> targetFunction) {
super(adapter, op, targetFunction);
this.opFunction = createOpFunction(op);
}
private LongFunction<DataApiDeleteOneOp> createOpFunction(ParsedOp op) {
return (l) -> {
Database db = spaceFunction.apply(l).getDatabase();
Filter filter = getFilterFromOp(op, l);
DeleteOneOptions options = getDeleteOneOptions(op, l);
return new DataApiDeleteOneOp(
db,
db.getCollection(targetFunction.apply(l)),
filter,
options
);
};
}
private DeleteOneOptions getDeleteOneOptions(ParsedOp op, long l) {
DeleteOneOptions options = new DeleteOneOptions();
Sort sort = getSortFromOp(op, l);
if (sort != null) {
options = options.sort(sort);
}
float[] vector = getVectorFromOp(op, l);
if (vector != null) {
options = options.vector(vector);
}
return options;
}
private float[] getVectorFromOp(ParsedOp op, long l) {
return getVectorValues(op.get("vector", l));
}
@Override
public DataApiBaseOp getOp(long value) {
return opFunction.apply(value);
}
}

View File

@ -0,0 +1,47 @@
/*
* Copyright (c) 2024 nosqlbench
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.nosqlbench.adapter.dataapi.opdispensers;
import io.nosqlbench.adapter.dataapi.DataApiDriverAdapter;
import io.nosqlbench.adapter.dataapi.ops.DataApiBaseOp;
import io.nosqlbench.adapter.dataapi.ops.DataApiDropCollectionOp;
import io.nosqlbench.adapters.api.templating.ParsedOp;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import java.util.function.LongFunction;
public class DataApiDropCollectionOpDispenser extends DataApiOpDispenser {
private static final Logger logger = LogManager.getLogger(DataApiDropCollectionOpDispenser.class);
private final LongFunction<DataApiDropCollectionOp> opFunction;
public DataApiDropCollectionOpDispenser(DataApiDriverAdapter adapter, ParsedOp op, LongFunction<String> targetFunction) {
super(adapter, op, targetFunction);
this.opFunction = createOpFunction(op);
}
private LongFunction<DataApiDropCollectionOp> createOpFunction(ParsedOp op) {
return (l) -> new DataApiDropCollectionOp(
spaceFunction.apply(l).getDatabase(),
targetFunction.apply(l)
);
}
@Override
public DataApiBaseOp getOp(long value) {
return opFunction.apply(value);
}
}

View File

@ -0,0 +1,74 @@
/*
* Copyright (c) 2024 nosqlbench
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.nosqlbench.adapter.dataapi.opdispensers;
import com.datastax.astra.client.Database;
import com.datastax.astra.client.model.Filter;
import com.datastax.astra.client.model.FindOneAndDeleteOptions;
import com.datastax.astra.client.model.Projection;
import com.datastax.astra.client.model.Sort;
import io.nosqlbench.adapter.dataapi.DataApiDriverAdapter;
import io.nosqlbench.adapter.dataapi.ops.DataApiBaseOp;
import io.nosqlbench.adapter.dataapi.ops.DataApiFindOneAndDeleteOp;
import io.nosqlbench.adapters.api.templating.ParsedOp;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import java.util.function.LongFunction;
public class DataApiFindOneAndDeleteOpDispenser extends DataApiOpDispenser {
private static final Logger logger = LogManager.getLogger(DataApiFindOneAndDeleteOpDispenser.class);
private final LongFunction<DataApiFindOneAndDeleteOp> opFunction;
public DataApiFindOneAndDeleteOpDispenser(DataApiDriverAdapter adapter, ParsedOp op, LongFunction<String> targetFunction) {
super(adapter, op, targetFunction);
this.opFunction = createOpFunction(op);
}
private LongFunction<DataApiFindOneAndDeleteOp> createOpFunction(ParsedOp op) {
return (l) -> {
Database db = spaceFunction.apply(l).getDatabase();
Filter filter = getFilterFromOp(op, l);
FindOneAndDeleteOptions options = getFindOneAndDeleteOptions(op, l);
return new DataApiFindOneAndDeleteOp(
db,
db.getCollection(targetFunction.apply(l)),
filter,
options
);
};
}
private FindOneAndDeleteOptions getFindOneAndDeleteOptions(ParsedOp op, long l) {
FindOneAndDeleteOptions options = new FindOneAndDeleteOptions();
Sort sort = getSortFromOp(op, l);
if (sort != null) {
options = options.sort(sort);
}
Projection[] projection = getProjectionFromOp(op, l);
if (projection != null) {
options = options.projection(projection);
}
return options;
}
@Override
public DataApiBaseOp getOp(long value) {
return opFunction.apply(value);
}
}

View File

@ -0,0 +1,58 @@
/*
* Copyright (c) 2024 nosqlbench
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.nosqlbench.adapter.dataapi.opdispensers;
import com.datastax.astra.client.Database;
import com.datastax.astra.client.model.Filter;
import com.datastax.astra.client.model.Update;
import io.nosqlbench.adapter.dataapi.DataApiDriverAdapter;
import io.nosqlbench.adapter.dataapi.ops.DataApiBaseOp;
import io.nosqlbench.adapter.dataapi.ops.DataApiFindOneAndUpdateOp;
import io.nosqlbench.adapters.api.templating.ParsedOp;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import java.util.function.LongFunction;
public class DataApiFindOneAndUpdateOpDispenser extends DataApiOpDispenser {
private static final Logger logger = LogManager.getLogger(DataApiFindOneAndUpdateOpDispenser.class);
private final LongFunction<DataApiFindOneAndUpdateOp> opFunction;
public DataApiFindOneAndUpdateOpDispenser(DataApiDriverAdapter adapter, ParsedOp op, LongFunction<String> targetFunction) {
super(adapter, op, targetFunction);
this.opFunction = createOpFunction(op);
}
private LongFunction<DataApiFindOneAndUpdateOp> createOpFunction(ParsedOp op) {
return (l) -> {
Database db = spaceFunction.apply(l).getDatabase();
Filter filter = getFilterFromOp(op, l);
Update update = getUpdates(op, l);
return new DataApiFindOneAndUpdateOp(
db,
db.getCollection(targetFunction.apply(l)),
filter,
update
);
};
}
@Override
public DataApiBaseOp getOp(long value) {
return opFunction.apply(value);
}
}

View File

@ -0,0 +1,76 @@
/*
* Copyright (c) 2024 nosqlbench
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.nosqlbench.adapter.dataapi.opdispensers;
import com.datastax.astra.client.Database;
import com.datastax.astra.client.model.Filter;
import com.datastax.astra.client.model.FindOneOptions;
import com.datastax.astra.client.model.Projection;
import com.datastax.astra.client.model.Sort;
import io.nosqlbench.adapter.dataapi.DataApiDriverAdapter;
import io.nosqlbench.adapter.dataapi.ops.DataApiBaseOp;
import io.nosqlbench.adapter.dataapi.ops.DataApiFindOneOp;
import io.nosqlbench.adapters.api.templating.ParsedOp;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import java.util.function.LongFunction;
public class DataApiFindOneOpDispenser extends DataApiOpDispenser {
private static final Logger logger = LogManager.getLogger(DataApiFindOneOpDispenser.class);
private final LongFunction<DataApiFindOneOp> opFunction;
public DataApiFindOneOpDispenser(DataApiDriverAdapter adapter, ParsedOp op, LongFunction<String> targetFunction) {
super(adapter, op, targetFunction);
this.opFunction = createOpFunction(op);
}
private LongFunction<DataApiFindOneOp> createOpFunction(ParsedOp op) {
return (l) -> {
Database db = spaceFunction.apply(l).getDatabase();
Filter filter = getFilterFromOp(op, l);
FindOneOptions options = getFindOneOptions(op, l);
return new DataApiFindOneOp(
db,
db.getCollection(targetFunction.apply(l)),
filter,
options
);
};
}
private FindOneOptions getFindOneOptions(ParsedOp op, long l) {
FindOneOptions options = new FindOneOptions();
Sort sort = getSortFromOp(op, l);
float[] vector = getVectorValues(op, l);
if (sort != null) {
options = vector != null ? options.sort(vector, sort) : options.sort(sort);
} else if (vector != null) {
options = options.sort(vector);
}
Projection[] projection = getProjectionFromOp(op, l);
if (projection != null) {
options = options.projection(projection);
}
options.setIncludeSimilarity(true);
return options;
}
@Override
public DataApiBaseOp getOp(long value) {
return opFunction.apply(value);
}
}

View File

@ -0,0 +1,78 @@
/*
* Copyright (c) 2024 nosqlbench
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.nosqlbench.adapter.dataapi.opdispensers;
import com.datastax.astra.client.Database;
import com.datastax.astra.client.model.Filter;
import com.datastax.astra.client.model.FindOptions;
import com.datastax.astra.client.model.Projection;
import com.datastax.astra.client.model.Sort;
import io.nosqlbench.adapter.dataapi.DataApiDriverAdapter;
import io.nosqlbench.adapter.dataapi.ops.DataApiBaseOp;
import io.nosqlbench.adapter.dataapi.ops.DataApiFindOp;
import io.nosqlbench.adapters.api.templating.ParsedOp;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import java.util.function.LongFunction;
public class DataApiFindOpDispenser extends DataApiOpDispenser {
private static final Logger logger = LogManager.getLogger(DataApiFindOpDispenser.class);
private final LongFunction<DataApiFindOp> opFunction;
public DataApiFindOpDispenser(DataApiDriverAdapter adapter, ParsedOp op, LongFunction<String> targetFunction) {
super(adapter, op, targetFunction);
this.opFunction = createOpFunction(op);
}
private LongFunction<DataApiFindOp> createOpFunction(ParsedOp op) {
return (l) -> {
Database db = spaceFunction.apply(l).getDatabase();
Filter filter = getFilterFromOp(op, l);
FindOptions options = getFindOptions(op, l);
return new DataApiFindOp(
db,
db.getCollection(targetFunction.apply(l)),
filter,
options
);
};
}
private FindOptions getFindOptions(ParsedOp op, long l) {
FindOptions options = new FindOptions();
Sort sort = getSortFromOp(op, l);
if (op.isDefined("vector")) {
float[] vector = getVectorValues(op, l);
if (sort != null) {
options = vector != null ? options.sort(vector, sort) : options.sort(sort);
} else if (vector != null) {
options = options.sort(vector);
}
}
Projection[] projection = getProjectionFromOp(op, l);
if (projection != null) {
options = options.projection(projection);
}
options.setIncludeSimilarity(true);
return options;
}
@Override
public DataApiBaseOp getOp(long value) {
return opFunction.apply(value);
}
}

View File

@ -0,0 +1,79 @@
/*
* Copyright (c) 2024 nosqlbench
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.nosqlbench.adapter.dataapi.opdispensers;
import com.datastax.astra.client.Database;
import com.datastax.astra.client.model.FindOptions;
import com.datastax.astra.client.model.Projection;
import com.datastax.astra.client.model.Sort;
import io.nosqlbench.adapter.dataapi.DataApiDriverAdapter;
import io.nosqlbench.adapter.dataapi.ops.DataApiBaseOp;
import io.nosqlbench.adapter.dataapi.ops.DataApiFindVectorOp;
import io.nosqlbench.adapters.api.templating.ParsedOp;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import java.util.function.LongFunction;
public class DataApiFindVectorOpDispenser extends DataApiOpDispenser {
private static final Logger logger = LogManager.getLogger(DataApiFindVectorOpDispenser.class);
private final LongFunction<DataApiFindVectorOp> opFunction;
public DataApiFindVectorOpDispenser(DataApiDriverAdapter adapter, ParsedOp op, LongFunction<String> targetFunction) {
super(adapter, op, targetFunction);
this.opFunction = createOpFunction(op);
}
private LongFunction<DataApiFindVectorOp> createOpFunction(ParsedOp op) {
return (l) -> {
Database db = spaceFunction.apply(l).getDatabase();
float[] vector = getVectorValues(op, l);
int limit = getLimit(op, l);
return new DataApiFindVectorOp(
db,
db.getCollection(targetFunction.apply(l)),
vector,
limit
);
};
}
private int getLimit(ParsedOp op, long l) {
return op.getConfigOr("limit", 100, l);
}
private FindOptions getFindOptions(ParsedOp op, long l) {
FindOptions options = new FindOptions();
Sort sort = getSortFromOp(op, l);
float[] vector = getVectorValues(op, l);
if (sort != null) {
options = vector != null ? options.sort(vector, sort) : options.sort(sort);
} else if (vector != null) {
options = options.sort(vector);
}
Projection[] projection = getProjectionFromOp(op, l);
if (projection != null) {
options = options.projection(projection);
}
options.setIncludeSimilarity(true);
return options;
}
@Override
public DataApiBaseOp getOp(long value) {
return opFunction.apply(value);
}
}

View File

@ -0,0 +1,79 @@
/*
* Copyright (c) 2024 nosqlbench
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.nosqlbench.adapter.dataapi.opdispensers;
import com.datastax.astra.client.model.Document;
import com.datastax.astra.client.model.InsertManyOptions;
import io.nosqlbench.adapter.dataapi.DataApiDriverAdapter;
import io.nosqlbench.adapter.dataapi.ops.DataApiBaseOp;
import io.nosqlbench.adapter.dataapi.ops.DataApiInsertManyOp;
import io.nosqlbench.adapters.api.templating.ParsedOp;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.function.LongFunction;
public class DataApiInsertManyOpDispenser extends DataApiOpDispenser {
private static final Logger logger = LogManager.getLogger(DataApiInsertManyOpDispenser.class);
private final LongFunction<DataApiInsertManyOp> opFunction;
public DataApiInsertManyOpDispenser(DataApiDriverAdapter adapter, ParsedOp op, LongFunction<String> targetFunction) {
super(adapter, op, targetFunction);
this.opFunction = createOpFunction(op);
}
private LongFunction<DataApiInsertManyOp> createOpFunction(ParsedOp op) {
return (l) -> {
List<Document> documents = new ArrayList<>();
op.getAsRequiredFunction("documents", List.class).apply(l).forEach(o -> documents.add(Document.parse(o.toString())));
return new DataApiInsertManyOp(
spaceFunction.apply(l).getDatabase(),
targetFunction.apply(l),
documents,
getInsertManyOptions(op, l)
);
};
}
private InsertManyOptions getInsertManyOptions(ParsedOp op, long l) {
InsertManyOptions options = new InsertManyOptions();
Optional<LongFunction<Map>> optionsFunction = op.getAsOptionalFunction("options", Map.class);
if (optionsFunction.isPresent()) {
Map<String, String> optionFields = optionsFunction.get().apply(l);
for(Map.Entry<String,String> entry: optionFields.entrySet()) {
switch(entry.getKey()) {
case "chunkSize"->
options = options.chunkSize(Integer.parseInt(entry.getValue()));
case "concurrency" ->
options = options.concurrency(Integer.parseInt(entry.getValue()));
case "ordered" ->
options = options.ordered(Boolean.parseBoolean(entry.getValue()));
}
}
}
return options;
}
@Override
public DataApiBaseOp getOp(long value) {
return opFunction.apply(value);
}
}

View File

@ -0,0 +1,55 @@
/*
* Copyright (c) 2024 nosqlbench
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.nosqlbench.adapter.dataapi.opdispensers;
import com.datastax.astra.client.model.Document;
import io.nosqlbench.adapter.dataapi.DataApiDriverAdapter;
import io.nosqlbench.adapter.dataapi.ops.DataApiBaseOp;
import io.nosqlbench.adapter.dataapi.ops.DataApiInsertOneOp;
import io.nosqlbench.adapters.api.templating.ParsedOp;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import java.util.Map;
import java.util.function.LongFunction;
public class DataApiInsertOneOpDispenser extends DataApiOpDispenser {
private static final Logger logger = LogManager.getLogger(DataApiInsertOneOpDispenser.class);
private final LongFunction<DataApiInsertOneOp> opFunction;
public DataApiInsertOneOpDispenser(DataApiDriverAdapter adapter, ParsedOp op, LongFunction<String> targetFunction) {
super(adapter, op, targetFunction);
this.opFunction = createOpFunction(op);
}
private LongFunction<DataApiInsertOneOp> createOpFunction(ParsedOp op) {
LongFunction<Map> docMapFunc = op.getAsRequiredFunction("document", Map.class);
LongFunction<Document> docFunc = (long m) -> new Document(docMapFunc.apply(m));
return (l) -> {
return new DataApiInsertOneOp(
spaceFunction.apply(l).getDatabase(),
targetFunction.apply(l),
docFunc.apply(l)
);
};
}
@Override
public DataApiBaseOp getOp(long value) {
return opFunction.apply(value);
}
}

View File

@ -0,0 +1,58 @@
/*
* Copyright (c) 2024 nosqlbench
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.nosqlbench.adapter.dataapi.opdispensers;
import com.datastax.astra.client.model.Document;
import io.nosqlbench.adapter.dataapi.DataApiDriverAdapter;
import io.nosqlbench.adapter.dataapi.ops.DataApiBaseOp;
import io.nosqlbench.adapter.dataapi.ops.DataApiInsertOneOp;
import io.nosqlbench.adapter.dataapi.ops.DataApiInsertOneVectorOp;
import io.nosqlbench.adapters.api.templating.ParsedOp;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import java.util.Map;
import java.util.function.LongFunction;
public class DataApiInsertOneVectorOpDispenser extends DataApiOpDispenser {
private static final Logger logger = LogManager.getLogger(DataApiInsertOneVectorOpDispenser.class);
private final LongFunction<DataApiInsertOneVectorOp> opFunction;
public DataApiInsertOneVectorOpDispenser(DataApiDriverAdapter adapter, ParsedOp op, LongFunction<String> targetFunction) {
super(adapter, op, targetFunction);
this.opFunction = createOpFunction(op);
}
private LongFunction<DataApiInsertOneVectorOp> createOpFunction(ParsedOp op) {
LongFunction<Map> docMapFunc = op.getAsRequiredFunction("document", Map.class);
LongFunction<Document> docFunc = (long m) -> new Document(docMapFunc.apply(m));
LongFunction<float[]> vectorF= op.getAsRequiredFunction("vector", float[].class);
return (l) -> {
return new DataApiInsertOneVectorOp(
spaceFunction.apply(l).getDatabase(),
targetFunction.apply(l),
docFunc.apply(l),
vectorF.apply(l)
);
};
}
@Override
public DataApiBaseOp getOp(long value) {
return opFunction.apply(value);
}
}

View File

@ -0,0 +1,191 @@
/*
* Copyright (c) 2024 nosqlbench
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.nosqlbench.adapter.dataapi.opdispensers;
import com.datastax.astra.client.model.*;
import io.nosqlbench.adapter.dataapi.DataApiSpace;
import io.nosqlbench.adapter.dataapi.ops.DataApiBaseOp;
import io.nosqlbench.adapters.api.activityimpl.BaseOpDispenser;
import io.nosqlbench.adapters.api.activityimpl.uniform.DriverAdapter;
import io.nosqlbench.adapters.api.templating.ParsedOp;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.function.LongFunction;
public abstract class DataApiOpDispenser extends BaseOpDispenser<DataApiBaseOp, DataApiSpace> {
protected final LongFunction<String> targetFunction;
protected final LongFunction<DataApiSpace> spaceFunction;
protected DataApiOpDispenser(DriverAdapter<? extends DataApiBaseOp, DataApiSpace> adapter, ParsedOp op,
LongFunction<String> targetFunction) {
super(adapter, op);
this.targetFunction = targetFunction;
this.spaceFunction = adapter.getSpaceFunc(op);
}
protected Sort getSortFromOp(ParsedOp op, long l) {
Sort sort = null;
Optional<LongFunction<Map>> sortFunction = op.getAsOptionalFunction("sort", Map.class);
if (sortFunction.isPresent()) {
Map<String,Object> sortFields = sortFunction.get().apply(l);
String sortOrder = sortFields.get("type").toString();
String sortField = sortFields.get("field").toString();
switch(sortOrder) {
case "asc" -> sort = Sorts.ascending(sortField);
case "desc" -> sort = Sorts.descending(sortField);
}
}
return sort;
}
protected Filter getFilterFromOp(ParsedOp op, long l) {
// TODO: Clarify 'filter' vs 'filters' or whether to support both uniformly
Filter filter = null;
Optional<LongFunction<List>> filterFunction = op.getAsOptionalFunction("filters", List.class).or(
() -> op.getAsOptionalFunction("filter",List.class)
);
if (filterFunction.isPresent()) {
List<Map<String,Object>> filters = filterFunction.get().apply(l);
List<Filter> andFilterList = new ArrayList<>();
List<Filter> orFilterList = new ArrayList<>();
for (Map<String,Object> filterFields : filters) {
switch ((String)filterFields.get("conjunction")) {
case "and" -> {
switch (filterFields.get("operator").toString()) {
case "lt" ->
andFilterList.add(Filters.lt(filterFields.get("field").toString(), (long) filterFields.get("value")));
case "gt" ->
andFilterList.add(Filters.gt(filterFields.get("field").toString(), (long) filterFields.get("value")));
case "eq" ->
andFilterList.add(Filters.eq(filterFields.get("field").toString(), filterFields.get("value")));
default -> logger.error(() -> "Operation " + filterFields.get("operator") + " not supported");
}
}
case "or" -> {
switch (filterFields.get("operator").toString()) {
case "lt" ->
orFilterList.add(Filters.lt(filterFields.get("field").toString(), (long) filterFields.get("value")));
case "gt" ->
orFilterList.add(Filters.gt(filterFields.get("field").toString(), (long) filterFields.get("value")));
case "eq" ->
orFilterList.add(Filters.eq(filterFields.get("field").toString(), filterFields.get("value")));
default -> logger.error(() -> "Operation " + filterFields.get("operator") + " not supported");
}
}
default -> logger.error(() -> "Conjunction " + filterFields.get("conjunction") + " not supported");
}
}
if (!andFilterList.isEmpty())
filter = Filters.and(andFilterList.toArray(new Filter[0]));
if (!orFilterList.isEmpty())
filter = Filters.or(orFilterList.toArray(new Filter[0]));
}
return filter;
}
protected Update getUpdates(ParsedOp op, long l) {
Update update = new Update();
Optional<LongFunction<Map>> updatesFunction = op.getAsOptionalFunction("updates", Map.class);
if (updatesFunction.isPresent()) {
Map<String, Object> updates = updatesFunction.get().apply(l);
for (Map.Entry<String, Object> entry : updates.entrySet()) {
if (entry.getKey().equalsIgnoreCase("update")) {
Map<String, Object> updateFields = (Map<String, Object>) entry.getValue();
switch (updateFields.get("operation").toString()) {
case "set" ->
update = Updates.set(updateFields.get("field").toString(), updateFields.get("value"));
case "inc" ->
update = Updates.inc(updateFields.get("field").toString(), (double) updateFields.get("value"));
case "unset" -> update = Updates.unset(updateFields.get("field").toString());
case "addToSet" ->
update = Updates.addToSet(updateFields.get("field").toString(), updateFields.get("value"));
case "min" ->
update = Updates.min(updateFields.get("field").toString(), (double) updateFields.get("value"));
case "rename" ->
update = Updates.rename(updateFields.get("field").toString(), updateFields.get("value").toString());
default -> logger.error(() -> "Operation " + updateFields.get("operation") + " not supported");
}
} else {
logger.error(() -> "Filter " + entry.getKey() + " not supported");
}
}
}
return update;
}
protected float[] getVectorValues(ParsedOp op, long l) {
Object rawVectorValues = op.get("vector", l);
return getVectorValues(rawVectorValues);
}
protected float[] getVectorValues(Object rawVectorValues) {
float[] floatValues;
if (rawVectorValues instanceof float[] f) {
return f;
}
if (rawVectorValues instanceof String) {
String[] rawValues = (((String) rawVectorValues).split(","));
floatValues = new float[rawValues.length];
for (int i = 0; i < rawValues.length; i++) {
floatValues[i] = Float.parseFloat(rawValues[i]);
}
} else if (rawVectorValues instanceof List) {
return getVectorValuesList(rawVectorValues);
} else {
throw new RuntimeException("Invalid type specified for values (type: " + rawVectorValues.getClass().getSimpleName() + "), values: " + rawVectorValues.toString());
}
return floatValues;
}
protected float[] getVectorValuesList(Object rawVectorValues) {
float[] vectorValues = null;
List<Object> vectorValuesList = (List<Object>) rawVectorValues;
vectorValues = new float[vectorValuesList.size()];
for (int i = 0; i < vectorValuesList.size(); i++) {
vectorValues[i] = Float.parseFloat(vectorValuesList.get(i).toString());
}
return vectorValues;
}
protected Projection[] getProjectionFromOp(ParsedOp op, long l) {
Projection[] projection = null;
Optional<LongFunction<Map>> projectionFunction = op.getAsOptionalFunction("projection", Map.class);
if (projectionFunction.isPresent()) {
Map<String,List<String>> projectionFields = projectionFunction.get().apply(l);
for (Map.Entry<String,List<String>> field : projectionFields.entrySet()) {
List<String> includeFields = field.getValue();
StringBuffer sb = new StringBuffer();
for (String includeField : includeFields) {
sb.append(includeField).append(",");
}
sb.deleteCharAt(sb.length() - 1);
if (field.getKey().equalsIgnoreCase("include")) {
projection = Projections.include(sb.toString());
} else if (field.getKey().equalsIgnoreCase("exclude")) {
projection = Projections.exclude(sb.toString());
} else {
logger.error("Projection " + field + " not supported");
}
}
}
return projection;
}
}

View File

@ -0,0 +1,34 @@
/*
* Copyright (c) 2024 nosqlbench
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.nosqlbench.adapter.dataapi.opdispensers;
import io.nosqlbench.adapter.dataapi.DataApiDriverAdapter;
import io.nosqlbench.adapter.dataapi.ops.DataApiBaseOp;
import io.nosqlbench.adapters.api.templating.ParsedOp;
import java.util.function.LongFunction;
public class DataApiUpdateManyOpDispenser extends DataApiOpDispenser {
public DataApiUpdateManyOpDispenser(DataApiDriverAdapter adapter, ParsedOp op, LongFunction<String> targetFunction) {
super(adapter, op, targetFunction);
}
@Override
public DataApiBaseOp getOp(long value) {
return null;
}
}

View File

@ -0,0 +1,34 @@
/*
* Copyright (c) 2024 nosqlbench
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.nosqlbench.adapter.dataapi.opdispensers;
import io.nosqlbench.adapter.dataapi.DataApiDriverAdapter;
import io.nosqlbench.adapter.dataapi.ops.DataApiBaseOp;
import io.nosqlbench.adapters.api.templating.ParsedOp;
import java.util.function.LongFunction;
public class DataApiUpdateOneOpDispenser extends DataApiOpDispenser {
public DataApiUpdateOneOpDispenser(DataApiDriverAdapter adapter, ParsedOp op, LongFunction<String> targetFunction) {
super(adapter, op, targetFunction);
}
@Override
public DataApiBaseOp getOp(long value) {
return null;
}
}

View File

@ -0,0 +1,28 @@
/*
* Copyright (c) 2024 nosqlbench
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.nosqlbench.adapter.dataapi.ops;
import com.datastax.astra.client.Database;
import io.nosqlbench.adapters.api.activityimpl.uniform.flowtypes.CycleOp;
public abstract class DataApiBaseOp implements CycleOp {
protected final Database db;
public DataApiBaseOp(Database db) {
this.db = db;
}
}

View File

@ -0,0 +1,36 @@
/*
* Copyright (c) 2024 nosqlbench
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.nosqlbench.adapter.dataapi.ops;
import com.datastax.astra.client.Database;
import com.datastax.astra.client.model.CollectionOptions;
public class DataApiCreateCollectionOp extends DataApiBaseOp {
private final String collectionName;
private final CollectionOptions options;
public DataApiCreateCollectionOp(Database db, String collectionName, CollectionOptions options) {
super(db);
this.collectionName = collectionName;
this.options = options;
}
@Override
public Object apply(long value) {
return db.createCollection(collectionName, options);
}
}

View File

@ -0,0 +1,37 @@
/*
* Copyright (c) 2024 nosqlbench
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.nosqlbench.adapter.dataapi.ops;
import com.datastax.astra.client.Collection;
import com.datastax.astra.client.Database;
import com.datastax.astra.client.model.Filter;
public class DataApiDeleteManyOp extends DataApiBaseOp {
private final Collection collection;
private final Filter filter;
public DataApiDeleteManyOp(Database db, Collection collection, Filter filter) {
super(db);
this.collection = collection;
this.filter = filter;
}
@Override
public Object apply(long value) {
return collection.deleteMany(filter);
}
}

View File

@ -0,0 +1,40 @@
/*
* Copyright (c) 2024 nosqlbench
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.nosqlbench.adapter.dataapi.ops;
import com.datastax.astra.client.Collection;
import com.datastax.astra.client.Database;
import com.datastax.astra.client.model.DeleteOneOptions;
import com.datastax.astra.client.model.Filter;
public class DataApiDeleteOneOp extends DataApiBaseOp {
private final Collection collection;
private final Filter filter;
private final DeleteOneOptions options;
public DataApiDeleteOneOp(Database db, Collection collection, Filter filter, DeleteOneOptions options) {
super(db);
this.collection = collection;
this.filter = filter;
this.options = options;
}
@Override
public Object apply(long value) {
return collection.deleteOne(filter, options);
}
}

View File

@ -0,0 +1,39 @@
/*
* Copyright (c) 2024 nosqlbench
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.nosqlbench.adapter.dataapi.ops;
import com.datastax.astra.client.Database;
public class DataApiDropCollectionOp extends DataApiBaseOp {
private final String collectionName;
public DataApiDropCollectionOp(Database db, String collectionName) {
super(db);
this.collectionName = collectionName;
}
@Override
public Object apply(long value) {
Boolean exists = db.collectionExists(collectionName);
// TODO: we need to remove these from the ops when we can, because this hides additional ops which
// should be surfaced in the test definition. Condition operations should be provided with clear views
// at the workload template level
if (exists) {
db.dropCollection(collectionName);
}
return exists;
}
}

View File

@ -0,0 +1,41 @@
/*
* Copyright (c) 2024 nosqlbench
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.nosqlbench.adapter.dataapi.ops;
import com.datastax.astra.client.Collection;
import com.datastax.astra.client.Database;
import com.datastax.astra.client.model.Document;
import com.datastax.astra.client.model.Filter;
import com.datastax.astra.client.model.FindOneAndDeleteOptions;
public class DataApiFindOneAndDeleteOp extends DataApiBaseOp {
private final Collection<Document> collection;
private final Filter filter;
private final FindOneAndDeleteOptions options;
public DataApiFindOneAndDeleteOp(Database db, Collection<Document> collection, Filter filter, FindOneAndDeleteOptions options) {
super(db);
this.collection = collection;
this.filter = filter;
this.options = options;
}
@Override
public Object apply(long value) {
return collection.findOneAndDelete(filter, options);
}
}

View File

@ -0,0 +1,39 @@
/*
* Copyright (c) 2024 nosqlbench
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.nosqlbench.adapter.dataapi.ops;
import com.datastax.astra.client.Collection;
import com.datastax.astra.client.Database;
import com.datastax.astra.client.model.*;
public class DataApiFindOneAndUpdateOp extends DataApiBaseOp {
private final Collection<Document> collection;
private final Filter filter;
private final Update update;
public DataApiFindOneAndUpdateOp(Database db, Collection<Document> collection, Filter filter, Update update) {
super(db);
this.collection = collection;
this.filter = filter;
this.update = update;
}
@Override
public Object apply(long value) {
return collection.findOneAndUpdate(filter, update);
}
}

View File

@ -0,0 +1,41 @@
/*
* Copyright (c) 2024 nosqlbench
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.nosqlbench.adapter.dataapi.ops;
import com.datastax.astra.client.Collection;
import com.datastax.astra.client.Database;
import com.datastax.astra.client.model.Document;
import com.datastax.astra.client.model.Filter;
import com.datastax.astra.client.model.FindOneOptions;
public class DataApiFindOneOp extends DataApiBaseOp {
private final Collection<Document> collection;
private final Filter filter;
private final FindOneOptions options;
public DataApiFindOneOp(Database db, Collection<Document> collection, Filter filter, FindOneOptions options) {
super(db);
this.collection = collection;
this.filter = filter;
this.options = options;
}
@Override
public Object apply(long value) {
return collection.findOne(filter, options);
}
}

View File

@ -0,0 +1,45 @@
/*
* Copyright (c) 2024 nosqlbench
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.nosqlbench.adapter.dataapi.ops;
import com.datastax.astra.client.Collection;
import com.datastax.astra.client.Database;
import com.datastax.astra.client.model.*;
import java.util.List;
import java.util.stream.StreamSupport;
public class DataApiFindOp extends DataApiBaseOp {
private final Collection<Document> collection;
private final Filter filter;
private final FindOptions options;
public DataApiFindOp(Database db, Collection<Document> collection, Filter filter, FindOptions options) {
super(db);
this.collection = collection;
this.filter = filter;
this.options = options;
}
@Override
public List<Document> apply(long value) {
// TODO: make sure we're traversing result data
FindIterable<Document> documentStream = collection.find(filter, options);
List<Document> documents = StreamSupport.stream(documentStream.spliterator(), false).toList();
return documents;
}
}

View File

@ -0,0 +1,42 @@
/*
* Copyright (c) 2024 nosqlbench
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.nosqlbench.adapter.dataapi.ops;
import com.datastax.astra.client.Collection;
import com.datastax.astra.client.Database;
import com.datastax.astra.client.model.Document;
import com.datastax.astra.client.model.Filter;
public class DataApiFindVectorFilterOp extends DataApiBaseOp {
private final Collection<Document> collection;
private final float[] vector;
private final int limit;
private final Filter filter;
public DataApiFindVectorFilterOp(Database db, Collection<Document> collection, float[] vector, int limit, Filter filter) {
super(db);
this.collection = collection;
this.vector = vector;
this.limit = limit;
this.filter = filter;
}
@Override
public Object apply(long value) {
return collection.find(filter, vector, limit);
}
}

View File

@ -0,0 +1,41 @@
/*
* Copyright (c) 2024 nosqlbench
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.nosqlbench.adapter.dataapi.ops;
import com.datastax.astra.client.Collection;
import com.datastax.astra.client.Database;
import com.datastax.astra.client.model.Document;
import com.datastax.astra.client.model.Filter;
import com.datastax.astra.client.model.FindOptions;
public class DataApiFindVectorOp extends DataApiBaseOp {
private final Collection<Document> collection;
private final float[] vector;
private final int limit;
public DataApiFindVectorOp(Database db, Collection<Document> collection, float[] vector, int limit) {
super(db);
this.collection = collection;
this.vector = vector;
this.limit = limit;
}
@Override
public Object apply(long value) {
return collection.find(vector, limit);
}
}

View File

@ -0,0 +1,42 @@
/*
* Copyright (c) 2024 nosqlbench
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.nosqlbench.adapter.dataapi.ops;
import com.datastax.astra.client.Database;
import com.datastax.astra.client.model.Document;
import com.datastax.astra.client.model.InsertManyOptions;
import java.util.List;
public class DataApiInsertManyOp extends DataApiBaseOp {
private final List<? extends Document> documents;
private final String collectionName;
private final InsertManyOptions options;
public DataApiInsertManyOp(Database db, String collectionName, List<? extends Document> documents, InsertManyOptions options) {
super(db);
this.collectionName = collectionName;
this.documents = documents;
this.options = options;
}
@Override
public Object apply(long value) {
return db.getCollection(collectionName).insertMany(documents, options);
}
}

View File

@ -0,0 +1,36 @@
/*
* Copyright (c) 2024 nosqlbench
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.nosqlbench.adapter.dataapi.ops;
import com.datastax.astra.client.Database;
import com.datastax.astra.client.model.Document;
public class DataApiInsertOneOp extends DataApiBaseOp {
private final Document doc;
private final String collectionName;
public DataApiInsertOneOp(Database db, String collectionName, Document doc) {
super(db);
this.collectionName = collectionName;
this.doc = doc;
}
@Override
public Object apply(long value) {
return db.getCollection(collectionName).insertOne(doc);
}
}

View File

@ -0,0 +1,38 @@
/*
* Copyright (c) 2024 nosqlbench
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.nosqlbench.adapter.dataapi.ops;
import com.datastax.astra.client.Database;
import com.datastax.astra.client.model.Document;
public class DataApiInsertOneVectorOp extends DataApiBaseOp {
private final Document doc;
private final String collectionName;
private float[] vector;
public DataApiInsertOneVectorOp(Database db, String collectionName, Document doc, float[] vector) {
super(db);
this.collectionName = collectionName;
this.doc = doc;
this.vector = vector;
}
@Override
public Object apply(long value) {
return db.getCollection(collectionName).insertOne(doc, vector);
}
}

View File

@ -0,0 +1,35 @@
/*
* Copyright (c) 2024 nosqlbench
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.nosqlbench.adapter.dataapi.ops;
public enum DataApiOpType {
create_collection,
insert_many,
insert_one,
insert_one_vector,
find,
find_one,
find_one_and_delete,
find_one_and_update,
find_vector,
find_vector_filter,
update_one,
update_many,
delete_one,
delete_many,
delete_collection
}

View File

@ -0,0 +1,193 @@
min_version: "5.21.0"
description: |
A basic workload that uses the DataStax Data API Client in Java, emulating what
applications would do in the native stack.
TEMPLATE(cardinality,1000)
TEMPLATE(collection,keyvalue)
TEMPLATE(dimensions,1536)
TEMPLATE(similarity,COSINE)
TEMPLATE(keycount,TEMPLATE(cardinality))
TEMPLATE(valuecount,TEMPLATE(cardinality))
scenarios:
dapi_novector:
schema: run driver=dataapi tags==block:schema threads==1 cycles==UNDEF
rampup: >-
run driver=dataapi tags==block:rampup
cycles===TEMPLATE(rampup-cycles,TEMPLATE(cardinality))
threads=auto errors=count
find_key: >-
run driver=dataapi tags==block:find_key
cycles===TEMPLATE(main-cycles,1000)
threads=auto errors=count
dapi_vector_d1536:
schema_vector: run driver=dataapi tags==block:schema_vector threads==1 cycles==UNDEF
rampup_vector: >-
run driver=dataapi tags==block:rampup_vector
cycles===TEMPLATE(rampup-cycles,TEMPLATE(cardinality))
threads=auto errors=count
find_key_vector: >-
run driver=dataapi tags==block:find_key_vector
cycles===TEMPLATE(main-cycles,TEMPLATE(cardinality))
threads=auto errors=count
# kv_dapi:
# kv_dapi_schema: run driver=http tags==block:schema threads==1 cycles==UNDEF
# kv_dapi_rampup: run driver=http tags==block:rampup cycles===TEMPLATE(rampup-cycles,10000000) threads=auto
# kv_dapi_main: run driver=http tags==block:"main.*" cycles===TEMPLATE(main-cycles,10000000) threads=auto
# basic_check:
# schema: run driver=http tags==block:schema threads==1 cycles==UNDEF
# rampup: run driver=http tags==block:rampup cycles===TEMPLATE(rampup-cycles,10) threads=auto
# main: run driver=http tags==block:"main.*" cycles===TEMPLATE(main-cycles,10) threads=auto
bindings:
# To enable an optional weighted set of hosts in place of a load balancer
# Examples
# single host: jsonapi_host=host1
# multiple hosts: jsonapi_host=host1,host2,host3
# multiple weighted hosts: jsonapi_host=host1:3,host2:7
weighted_hosts: WeightedStrings('TEMPLATE(jsonapi_host,TEMPLATE(stargate_host,localhost))')
seq_key: Mod(TEMPLATE(keycount)); ToString() -> String
# seq_key: Mod(TEMPLATE(keycount,50000000000L));
seq_value: Hash(); Mod(TEMPLATE(valuecount)); ToString() -> String
# rw_key: TEMPLATE(keydist,Uniform(0,50000000000L));
rw_key: TEMPLATE(keydist,Uniform(0,TEMPLATE(keycount))); ToString() -> String
rw_key_num: TEMPLATE(keydist,Uniform(0,TEMPLATE(keycount)));
rw_value: Hash(); TEMPLATE(valdist,Uniform(0,TEMPLATE(valuecount))); ToString() -> String
vector_value: HashedFloatVectors(TEMPLATE(dimensions,1536));
request_id: ToHashedUUID(); ToString();
params:
cl: TEMPLATE(cl,LOCAL_QUORUM)
blocks:
reset_schema:
ops:
drop_index:
raw: |-
DROP INDEX IF EXISTS TEMPLATE(keyspace, baselines).TEMPLATE(table,keyvalue)_value_idx;
drop-table:
raw: |-
DROP TABLE IF EXISTS TEMPLATE(keyspace, baselines).TEMPLATE(table,keyvalue);
schema:
ops:
delete_collection_op:
delete_collection: "TEMPLATE(collection)"
create_collection_op:
create_collection: "TEMPLATE(collection)"
# separate these cases later, when you can recreate the same collection name with/without vector support
schema_vector:
ops:
delete_collection_op_v:
delete_collection: "TEMPLATE(collection)_v"
create_collection_op_v:
create_collection: "TEMPLATE(collection)_v"
dimensions: TEMPLATE(dimensions)
similarity: TEMPLATE(similarity)
rampup:
ops:
insert_one_op:
insert_one: "TEMPLATE(collection)"
document:
_id: "{seq_key}"
value: "{seq_value}"
rampup_vector:
ops:
insert_one_op_v:
insert_one_vector: "TEMPLATE(collection)_v"
document:
_id: "{seq_key}"
value: "{seq_value}"
vector: "{vector_value}"
# rampup-uuid:
# ops:
# insert_one_op:
# insert-one: "TEMPLATE(collection)"
# document:
# value: "{seq_value}"
find_key:
params:
ratio: 5
ops:
find_op:
find: "TEMPLATE(collection)"
filters:
- conjunction: "and"
operator: "lt"
field: "_id"
value: "{rw_key_num}"
vector: "{vector_value}"
find_key_vector:
params:
ratio: 5
ops:
find_op_v:
find_vector_filter: "TEMPLATE(collection)"
filters:
- conjunction: "and"
operator: "lt"
field: "_id"
value: "{rw_key_num}"
vector: "{vector_value}"
#
# rampup_with_vector_uuid:
# ops:
# insert_one_op:
# insert_one: "TEMPLATE(collection)"
# document:
# value: "{seq_value}"
# vector: "{vector_value}"
#
# main_read_with_vector:
# ops:
# find_op:
# find: "TEMPLATE(collection)"
# filter:
# _id: "{rw_key}"
#
# main_ann_with_vector_limit_20:
# params:
# ratio: 5
# ops:
# find_op:
# find: "TEMPLATE(collection)"
# sort:
# vector: "{vector_value}"
# options:
# limit: 20
# schema_with_text_sai:
# ops:
# delete_collection_op:
# delete_collection: "TEMPLATE(collection)"
# create_collection_op:
# create_collection: "TEMPLATE(collection)"
# rampup_with_text_sai:
# ops:
# insert_one_op:
# insert_one: "TEMPLATE(collection)"
# document:
# _id: "{seq_key}"
# value: "{seq_value}"
# main_read_with_text_sai:
# params:
# ratio: 5
# ops:
# find_op:
# find: "TEMPLATE(collection)"
# filter:
# value: "{rw_value}"

View File

@ -0,0 +1,11 @@
scenarios:
default:
create_collection: run driver=dataapi tags==blocks:create_collection cycles=1
blocks:
create_collection:
ops:
op1:
create_collection: "collectionName"
dimensions: 10
similarity: "COSINE"

View File

@ -0,0 +1,18 @@
scenarios:
default:
delete_many: run driver=dataapi tags==blocks:delete_many cycles=1
blocks:
delete_many:
ops:
op1:
delete_many: "collectionName"
filters:
- conjunction: "and"
operator: "lt"
field: "field1"
value: 10
- conjunction: "or"
operator: "eq"
field: "field2"
value: "value2"

View File

@ -0,0 +1,13 @@
scenarios:
default:
delete_one: run driver=dataapi tags==blocks:delete_one cycles=1
blocks:
delete_one:
ops:
op1:
delete_one: "collectionName"
sort:
type: "asc"
field: "field1"
vector: "1.0,2.0,3.0,4.0,5.0,6.0,7.0,8.0,9.0,10.0"

View File

@ -0,0 +1,9 @@
scenarios:
default:
drop_collection: run driver=dataapi tags==blocks:drop_collection cycles=1
blocks:
drop_collection:
ops:
op1:
drop_collection: "collectionName"

View File

@ -0,0 +1,26 @@
scenarios:
default:
find: run driver=dataapi tags==blocks:find cycles=10
blocks:
find:
ops:
op1:
find: "collectionName"
filters:
- conjunction: "and"
operator: "lt"
field: "field1"
value: 10
- conjunction: "or"
operator: "eq"
field: "field2"
value: "value2"
sort:
type: "asc"
field: "field1"
vector: "1.0,2.0,3.0,4.0,5.0,6.0,7.0,8.0,9.0,10.0"
projection:
include:
- "field1"
- "field2"

View File

@ -0,0 +1,26 @@
scenarios:
default:
find_one: run driver=dataapi tags==blocks:find_one cycles=10
blocks:
find_one:
ops:
op1:
find_one: "collectionName"
filters:
- conjunction: "and"
operator: "lt"
field: "field1"
value: 10
- conjunction: "or"
operator: "eq"
field: "field2"
value: "value2"
sort:
type: "asc"
field: "field1"
vector: "1.0,2.0,3.0,4.0,5.0,6.0,7.0,8.0,9.0,10.0"
projection:
include:
- "field1"
- "field2"

View File

@ -0,0 +1,25 @@
scenarios:
default:
find_one_and_delete: run driver=dataapi tags==blocks:find_one_and_delete cycles=1
blocks:
find_one_and_delete:
ops:
op1:
find_one_and_delete: "collectionName"
filters:
- conjunction: "and"
operator: "lt"
field: "field1"
value: 10
- conjunction: "or"
operator: "eq"
field: "field2"
value: "value2"
sort:
type: "asc"
field: "field1"
projection:
include:
- "field1"
- "field2"

View File

@ -0,0 +1,17 @@
scenarios:
default:
insert_many: run driver=dataapi tags==blocks:insert_many cycles=1
blocks:
insert_many:
ops:
op1:
insert_many: "collectionName"
options:
chunkSize: "1000"
concurrency: "10"
ordered: "false"
documents:
- "{valid json here}"
- "{valid json here}"
- "{valid json here}"

View File

@ -0,0 +1,10 @@
scenarios:
default:
insert_one: run driver=dataapi tags==blocks:insert_one cycles=1
blocks:
insert_one:
ops:
op1:
insert_one: "collectionName"
document: "{valid json here}"

View File

@ -0,0 +1,11 @@
scenarios:
default:
insert_one: run driver=dataapi tags==blocks:insert_one cycles=1
blocks:
insert_one:
ops:
op1:
insert_one: "collectionName"
document: "{valid json here}"
vector: "1.0,2.0,3.0,4.0,5.0,6.0,7.0,8.0,9.0,10.0"

View File

@ -45,7 +45,7 @@ public class MilvusSpace implements AutoCloseable {
protected MilvusServiceClient client;
private final Map<String, ConnectParam> connections = new HashMap<>();
// private final Map<String, ConnectParam> connections = new HashMap<>();
/**
* Create a new MilvusSpace Object which stores all stateful contextual information needed to interact

View File

@ -24,7 +24,7 @@ import io.nosqlbench.adapters.api.templating.ParsedOp;
public class MilvusCreateCollectionOp extends MilvusBaseOp<CreateCollectionParam> {
/**
* Create a new {@link ParsedOp} encapsulating a call to the Milvus/Zilliz client delete method
* Create a new {@link ParsedOp} encapsulating a call to the Milvus/Zilliz client create method.
*
* @param client The associated {@link MilvusServiceClient} used to communicate with the database
* @param request The {@link CreateCollectionParam} built for this operation

View File

@ -9,12 +9,15 @@ https://github.com/milvus-io/milvus-sdk-java.
The following parameters must be supplied to the adapter at runtime in order to successfully connect to an
instance of the Milvus/Zilliz database:
* token - In order to use the pinecone database you must have an account. Once the account is created you can [request
* `token` - In order to use the Milvus/Zilliz database you must have an account. Once the account is created you
can [request
an api key/token](https://milvus.io/docs/users_and_roles.md#Users-and-Roles). This key will need to be provided any
time a
database connection is desired.
* uri - When an Index is created in the database the uri must be specified as well. The adapter will
use the default value of localhost:19530 if none is provided at runtime.
time a database connection is desired. Alternatively,
the api key can be stored in a file securely and referenced via the `token_file` config option pointing to the path of
the file.
* `uri` - When an index is created in the database the URI/endpoint must be specified as well. The adapter will
use the default value of `localhost:19530` if none is provided at runtime.
* `database_name` or `database` - the name of the database to use. For Zilliz, only `default` is supported.
## Op Templates

View File

@ -0,0 +1,62 @@
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<artifactId>adapter-qdrant</artifactId>
<packaging>jar</packaging>
<parent>
<artifactId>mvn-defaults</artifactId>
<groupId>io.nosqlbench</groupId>
<version>${revision}</version>
<relativePath>../../mvn-defaults</relativePath>
</parent>
<name>${project.artifactId}</name>
<description>
An nosqlbench adapter driver module for the Qdrant database.
</description>
<dependencies>
<dependency>
<groupId>io.nosqlbench</groupId>
<artifactId>nb-annotations</artifactId>
<version>${revision}</version>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>io.nosqlbench</groupId>
<artifactId>adapters-api</artifactId>
<version>${revision}</version>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>io.grpc</groupId>
<artifactId>grpc-protobuf</artifactId>
<!-- <version>1.63.0</version> -->
<!-- Trying to match https://github.com/qdrant/java-client/blob/v1.9.0/build.gradle#L80 -->
<version>1.59.0</version>
</dependency>
<dependency>
<groupId>com.google.protobuf</groupId>
<artifactId>protobuf-java-util</artifactId>
<!--<version>3.25.3</version>-->
<!-- Trying to match https://github.com/qdrant/java-client/blob/master/build.gradle#L81 -->
<version>3.24.0</version>
</dependency>
<dependency>
<groupId>com.google.guava</groupId>
<artifactId>guava</artifactId>
<!--<version>33.1.0-jre</version>-->
<!-- Trying to match https://github.com/qdrant/java-client/blob/master/build.gradle#L93 -->
<version>30.1-jre</version>
</dependency>
<dependency>
<groupId>io.qdrant</groupId>
<artifactId>client</artifactId>
<version>1.9.0</version>
</dependency>
</dependencies>
</project>

View File

@ -0,0 +1,67 @@
/*
* Copyright (c) 2020-2024 nosqlbench
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.nosqlbench.adapter.qdrant;
import io.qdrant.client.grpc.Points.ScoredPoint;
import org.apache.commons.lang3.StringUtils;
import java.util.Arrays;
import java.util.List;
public class QdrantAdapterUtils {
public static final String QDRANT = "qdrant";
public static List<String> splitNames(String input) {
assert StringUtils.isNotBlank(input) && StringUtils.isNotEmpty(input);
return Arrays.stream(input.split("( +| *, *)"))
.filter(StringUtils::isNotBlank)
.toList();
}
public static List<Long> splitLongs(String input) {
assert StringUtils.isNotBlank(input) && StringUtils.isNotEmpty(input);
return Arrays.stream(input.split("( +| *, *)"))
.filter(StringUtils::isNotBlank)
.map(Long::parseLong)
.toList();
}
/**
* Mask the digits in the given string with '*'
*
* @param unmasked The string to mask
* @return The masked string
*/
protected static String maskDigits(String unmasked) {
assert StringUtils.isNotBlank(unmasked) && StringUtils.isNotEmpty(unmasked);
int inputLength = unmasked.length();
StringBuilder masked = new StringBuilder(inputLength);
for (char ch : unmasked.toCharArray()) {
if (Character.isDigit(ch)) {
masked.append("*");
} else {
masked.append(ch);
}
}
return masked.toString();
}
public static int[] searchPointsResponseIdNumToIntArray(List<ScoredPoint> response) {
return response.stream().mapToInt(r -> ((Number) r.getId().getNum()).intValue()).toArray();
}
}

View File

@ -0,0 +1,54 @@
/*
* Copyright (c) 2020-2024 nosqlbench
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.nosqlbench.adapter.qdrant;
import io.nosqlbench.adapter.qdrant.ops.QdrantBaseOp;
import io.nosqlbench.adapters.api.activityimpl.OpMapper;
import io.nosqlbench.adapters.api.activityimpl.uniform.BaseDriverAdapter;
import io.nosqlbench.adapters.api.activityimpl.uniform.DriverAdapter;
import io.nosqlbench.nb.annotations.Service;
import io.nosqlbench.nb.api.components.core.NBComponent;
import io.nosqlbench.nb.api.config.standard.NBConfigModel;
import io.nosqlbench.nb.api.config.standard.NBConfiguration;
import io.nosqlbench.nb.api.labels.NBLabels;
import java.util.function.Function;
import static io.nosqlbench.adapter.qdrant.QdrantAdapterUtils.QDRANT;
@Service(value = DriverAdapter.class, selector = QDRANT)
public class QdrantDriverAdapter extends BaseDriverAdapter<QdrantBaseOp<?>, QdrantSpace> {
public QdrantDriverAdapter(NBComponent parentComponent, NBLabels labels) {
super(parentComponent, labels);
}
@Override
public OpMapper<QdrantBaseOp<?>> getOpMapper() {
return new QdrantOpMapper(this);
}
@Override
public Function<String, ? extends QdrantSpace> getSpaceInitializer(NBConfiguration cfg) {
return (s) -> new QdrantSpace(s, cfg);
}
@Override
public NBConfigModel getConfigModel() {
return super.getConfigModel().add(QdrantSpace.getConfigModel());
}
}

View File

@ -0,0 +1,32 @@
/*
* Copyright (c) 2020-2024 nosqlbench
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.nosqlbench.adapter.qdrant;
import io.nosqlbench.adapter.diag.DriverAdapterLoader;
import io.nosqlbench.nb.annotations.Service;
import io.nosqlbench.nb.api.components.core.NBComponent;
import io.nosqlbench.nb.api.labels.NBLabels;
import static io.nosqlbench.adapter.qdrant.QdrantAdapterUtils.QDRANT;
@Service(value = DriverAdapterLoader.class, selector = QDRANT)
public class QdrantDriverAdapterLoader implements DriverAdapterLoader {
@Override
public QdrantDriverAdapter load(NBComponent parent, NBLabels childLabels) {
return new QdrantDriverAdapter(parent, childLabels);
}
}

View File

@ -0,0 +1,70 @@
/*
* Copyright (c) 2020-2024 nosqlbench
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.nosqlbench.adapter.qdrant;
import io.nosqlbench.adapter.qdrant.opdispensers.*;
import io.nosqlbench.adapter.qdrant.ops.QdrantBaseOp;
import io.nosqlbench.adapter.qdrant.types.QdrantOpType;
import io.nosqlbench.adapters.api.activityimpl.OpDispenser;
import io.nosqlbench.adapters.api.activityimpl.OpMapper;
import io.nosqlbench.adapters.api.templating.ParsedOp;
import io.nosqlbench.engine.api.templating.TypeAndTarget;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
public class QdrantOpMapper implements OpMapper<QdrantBaseOp<?>> {
private static final Logger logger = LogManager.getLogger(QdrantOpMapper.class);
private final QdrantDriverAdapter adapter;
/**
* Create a new QdrantOpMapper implementing the {@link OpMapper} interface.
*
* @param adapter The associated {@link QdrantDriverAdapter}
*/
public QdrantOpMapper(QdrantDriverAdapter adapter) {
this.adapter = adapter;
}
/**
* Given an instance of a {@link ParsedOp} returns the appropriate {@link QdrantBaseOpDispenser} subclass
*
* @param op The {@link ParsedOp} to be evaluated
* @return The correct {@link QdrantBaseOpDispenser} subclass based on the op type
*/
@Override
public OpDispenser<? extends QdrantBaseOp<?>> apply(ParsedOp op) {
TypeAndTarget<QdrantOpType, String> typeAndTarget = op.getTypeAndTarget(
QdrantOpType.class,
String.class,
"type",
"target"
);
logger.info(() -> "Using '" + typeAndTarget.enumId + "' op type for op template '" + op.getName() + "'");
return switch (typeAndTarget.enumId) {
case delete_collection -> new QdrantDeleteCollectionOpDispenser(adapter, op, typeAndTarget.targetFunction);
case create_collection -> new QdrantCreateCollectionOpDispenser(adapter, op, typeAndTarget.targetFunction);
case create_payload_index ->
new QdrantCreatePayloadIndexOpDispenser(adapter, op, typeAndTarget.targetFunction);
case search_points -> new QdrantSearchPointsOpDispenser(adapter, op, typeAndTarget.targetFunction);
case upsert_points -> new QdrantUpsertPointsOpDispenser(adapter, op, typeAndTarget.targetFunction);
case count_points -> new QdrantCountPointsOpDispenser(adapter, op, typeAndTarget.targetFunction);
// default -> throw new RuntimeException("Unrecognized op type '" + typeAndTarget.enumId.name() + "' while " +
// "mapping parsed op " + op);
};
}
}

View File

@ -0,0 +1,135 @@
/*
* Copyright (c) 2020-2024 nosqlbench
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.nosqlbench.adapter.qdrant;
import io.nosqlbench.nb.api.config.standard.ConfigModel;
import io.nosqlbench.nb.api.config.standard.NBConfigModel;
import io.nosqlbench.nb.api.config.standard.NBConfiguration;
import io.nosqlbench.nb.api.config.standard.Param;
import io.qdrant.client.QdrantClient;
import io.qdrant.client.QdrantGrpcClient;
import org.apache.commons.lang3.math.NumberUtils;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Paths;
import java.time.Duration;
/**
* The {@code QdrantSpace} class is a context object which stores all stateful contextual information needed to interact
* with the Qdrant database instance.
*
* @see <a href="https://qdrant.tech/documentation/cloud/quickstart-cloud/">Qdrant cloud quick start guide</a>
* @see <a href="https://qdrant.tech/documentation/quick-start/">Qdrant quick start guide</a>
* @see <a href="https://github.com/qdrant/java-client">Qdrant Java client</a>
*/
public class QdrantSpace implements AutoCloseable {
private final static Logger logger = LogManager.getLogger(QdrantSpace.class);
private final String name;
private final NBConfiguration cfg;
protected QdrantClient client;
/**
* Create a new QdrantSpace Object which stores all stateful contextual information needed to interact
* with the Qdrant database instance.
*
* @param name The name of this space
* @param cfg The configuration ({@link NBConfiguration}) for this nb run
*/
public QdrantSpace(String name, NBConfiguration cfg) {
this.name = name;
this.cfg = cfg;
}
public synchronized QdrantClient getClient() {
if (client == null) {
client = createClient();
}
return client;
}
private QdrantClient createClient() {
String uri = cfg.get("uri");
int grpcPort = cfg.getOptional("grpc_port").map(Integer::parseInt).orElse(6334);
boolean useTls = cfg.getOptional("use_tls").map(Boolean::parseBoolean).orElse(true);
var builder = QdrantGrpcClient.newBuilder(uri, grpcPort, useTls);
var requiredToken = cfg.getOptional("token_file")
.map(Paths::get)
.map(
tokenFilePath -> {
try {
return Files.readAllLines(tokenFilePath).getFirst();
} catch (IOException e) {
String error = "Error while reading token from file:" + tokenFilePath;
logger.error(error, e);
throw new RuntimeException(e);
}
}
).orElseGet(
() -> cfg.getOptional("token")
.orElseThrow(() -> new RuntimeException("You must provide either a token_file or a token to " +
"configure a Qdrant client"))
);
builder = builder.withApiKey(requiredToken);
builder = builder.withTimeout(
Duration.ofMillis(NumberUtils.toInt(cfg.getOptional("timeout_ms").orElse("3000")))
);
logger.info("{}: Creating new Qdrant Client with (masked) token [{}], uri/endpoint [{}]",
this.name, QdrantAdapterUtils.maskDigits(requiredToken), cfg.get("uri").toString());
return new QdrantClient(builder.build());
}
public static NBConfigModel getConfigModel() {
return ConfigModel.of(QdrantSpace.class)
.add(
Param.optional("token_file", String.class, "the file to load the api token from")
)
.add(
Param.defaultTo("token", "qdrant")
.setDescription("the Qdrant api token to use to connect to the database")
)
.add(
Param.defaultTo("uri", "localhost")
.setDescription("the URI endpoint in which the database is running. Do not provide any suffix like https:// here.")
)
.add(
Param.defaultTo("use_tls", true)
.setDescription("whether to use TLS for the connection. Defaults to true.")
)
.add(
Param.defaultTo("timeout_ms", 3000)
.setDescription("sets the timeout in milliseconds for all requests. Defaults to 3000ms.")
)
.add(
Param.defaultTo("grpc_port", 6443)
.setDescription("the port to use for the gRPC connection. Defaults to 6334.")
)
.asReadOnly();
}
@Override
public void close() throws Exception {
if (client != null) {
client.close();
}
}
}

View File

@ -0,0 +1,64 @@
/*
* Copyright (c) 2020-2024 nosqlbench
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.nosqlbench.adapter.qdrant.opdispensers;
import io.nosqlbench.adapter.qdrant.QdrantDriverAdapter;
import io.nosqlbench.adapter.qdrant.QdrantSpace;
import io.nosqlbench.adapter.qdrant.ops.QdrantBaseOp;
import io.nosqlbench.adapters.api.activityimpl.BaseOpDispenser;
import io.nosqlbench.adapters.api.activityimpl.uniform.DriverAdapter;
import io.nosqlbench.adapters.api.templating.ParsedOp;
import io.qdrant.client.QdrantClient;
import java.util.function.LongFunction;
public abstract class QdrantBaseOpDispenser<T> extends BaseOpDispenser<QdrantBaseOp<T>, QdrantSpace> {
protected final LongFunction<QdrantSpace> qdrantSpaceFunction;
protected final LongFunction<QdrantClient> clientFunction;
private final LongFunction<? extends QdrantBaseOp<T>> opF;
private final LongFunction<T> paramF;
protected QdrantBaseOpDispenser(QdrantDriverAdapter adapter, ParsedOp op, LongFunction<String> targetF) {
super((DriverAdapter)adapter, op);
this.qdrantSpaceFunction = adapter.getSpaceFunc(op);
this.clientFunction = (long l) -> this.qdrantSpaceFunction.apply(l).getClient();
this.paramF = getParamFunc(this.clientFunction,op,targetF);
this.opF = createOpFunc(paramF, this.clientFunction, op, targetF);
}
protected QdrantDriverAdapter getDriverAdapter() {
return (QdrantDriverAdapter) adapter;
}
public abstract LongFunction<T> getParamFunc(
LongFunction<QdrantClient> clientF,
ParsedOp op,
LongFunction<String> targetF
);
public abstract LongFunction<QdrantBaseOp<T>> createOpFunc(
LongFunction<T> paramF,
LongFunction<QdrantClient> clientF,
ParsedOp op,
LongFunction<String> targetF
);
@Override
public QdrantBaseOp<T> getOp(long value) {
return opF.apply(value);
}
}

View File

@ -0,0 +1,51 @@
/*
* Copyright (c) 2020-2024 nosqlbench
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.nosqlbench.adapter.qdrant.opdispensers;
import io.nosqlbench.adapter.qdrant.QdrantDriverAdapter;
import io.nosqlbench.adapter.qdrant.ops.QdrantBaseOp;
import io.nosqlbench.adapter.qdrant.ops.QdrantCountPointsOp;
import io.nosqlbench.adapters.api.templating.ParsedOp;
import io.qdrant.client.QdrantClient;
import io.qdrant.client.grpc.Points.CountPoints;
import java.util.function.LongFunction;
public class QdrantCountPointsOpDispenser extends QdrantBaseOpDispenser<CountPoints> {
public QdrantCountPointsOpDispenser(QdrantDriverAdapter adapter, ParsedOp op, LongFunction<String> targetFunction) {
super(adapter, op, targetFunction);
}
@Override
public LongFunction<CountPoints> getParamFunc(
LongFunction<QdrantClient> clientF, ParsedOp op, LongFunction<String> targetF) {
LongFunction<CountPoints.Builder> ebF =
l -> CountPoints.newBuilder().setCollectionName(targetF.apply(l));
final LongFunction<CountPoints.Builder> lastF = ebF;
return l -> lastF.apply(l).build();
}
@Override
public LongFunction<QdrantBaseOp<CountPoints>> createOpFunc(
LongFunction<CountPoints> paramF,
LongFunction<QdrantClient> clientF,
ParsedOp op,
LongFunction<String> targetF) {
return l -> new QdrantCountPointsOp(clientF.apply(l), paramF.apply(l));
}
}

View File

@ -0,0 +1,395 @@
/*
* Copyright (c) 2020-2024 nosqlbench
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.nosqlbench.adapter.qdrant.opdispensers;
import io.nosqlbench.adapter.qdrant.QdrantDriverAdapter;
import io.nosqlbench.adapter.qdrant.ops.QdrantBaseOp;
import io.nosqlbench.adapter.qdrant.ops.QdrantCreateCollectionOp;
import io.nosqlbench.adapters.api.templating.ParsedOp;
import io.nosqlbench.nb.api.errors.OpConfigError;
import io.qdrant.client.QdrantClient;
import io.qdrant.client.grpc.Collections.*;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import java.util.HashMap;
import java.util.Map;
import java.util.Optional;
import java.util.function.LongFunction;
public class QdrantCreateCollectionOpDispenser extends QdrantBaseOpDispenser<CreateCollection> {
private static final Logger logger = LogManager.getLogger(QdrantCreateCollectionOpDispenser.class);
/**
* Create a new QdrantCreateCollectionOpDispenser subclassed from {@link QdrantBaseOpDispenser}.
*
* @param adapter The associated {@link QdrantDriverAdapter}
* @param op The {@link ParsedOp} encapsulating the activity for this cycle
* @param targetFunction A LongFunction that returns the specified Qdrant Index for this Op
* @see <a href="https://qdrant.github.io/qdrant/redoc/index.html#tag/collections/operation/create_collection">Qdrant Create Collection</a>.
*/
public QdrantCreateCollectionOpDispenser(QdrantDriverAdapter adapter,
ParsedOp op,
LongFunction<String> targetFunction) {
super(adapter, op, targetFunction);
}
@Override
public LongFunction<CreateCollection> getParamFunc(
LongFunction<QdrantClient> clientF,
ParsedOp op,
LongFunction<String> targetF
) {
LongFunction<CreateCollection.Builder> ebF =
l -> CreateCollection.newBuilder().setCollectionName(targetF.apply(l));
LongFunction<Map<String, VectorParams>> namedVectorParamsMap = buildNamedVectorsStruct(op);
final LongFunction<CreateCollection.Builder> namedVectorsF = ebF;
ebF = l -> namedVectorsF.apply(l).setVectorsConfig(VectorsConfig.newBuilder().setParamsMap(
VectorParamsMap.newBuilder().putAllMap(namedVectorParamsMap.apply(l)).build()));
ebF = op.enhanceFuncOptionally(ebF, "on_disk_payload", Boolean.class,
CreateCollection.Builder::setOnDiskPayload);
ebF = op.enhanceFuncOptionally(ebF, "shard_number", Number.class,
(CreateCollection.Builder b, Number n) -> b.setShardNumber(n.intValue()));
ebF = op.enhanceFuncOptionally(ebF, "replication_factor", Number.class,
(CreateCollection.Builder b, Number n) -> b.setReplicationFactor(n.intValue()));
ebF = op.enhanceFuncOptionally(ebF, "write_consistency_factor", Number.class,
(CreateCollection.Builder b, Number n) -> b.setWriteConsistencyFactor(n.intValue()));
ebF = op.enhanceFuncOptionally(ebF, "init_from", String.class,
CreateCollection.Builder::setInitFromCollection);
ebF = op.enhanceFuncOptionally(ebF, "sharding_method", String.class,
(CreateCollection.Builder b, String s) -> b.setShardingMethod(ShardingMethod.valueOf(s)));
Optional<LongFunction<Map>> walF = op.getAsOptionalFunction("wal_config", Map.class);
if (walF.isPresent()) {
final LongFunction<CreateCollection.Builder> wallFunc = ebF;
LongFunction<WalConfigDiff> wcdF = buildWalConfigDiff(walF.get());
ebF = l -> wallFunc.apply(l).setWalConfig(wcdF.apply(l));
}
Optional<LongFunction<Map>> optConDifF = op.getAsOptionalFunction("optimizers_config", Map.class);
if (optConDifF.isPresent()) {
final LongFunction<CreateCollection.Builder> wallFunc = ebF;
LongFunction<OptimizersConfigDiff> ocdF = buildOptimizerConfigDiff(optConDifF.get());
ebF = l -> wallFunc.apply(l).setOptimizersConfig(ocdF.apply(l));
}
Optional<LongFunction<Map>> hnswConfigDiffF = op.getAsOptionalFunction("hnsw_config", Map.class);
if (hnswConfigDiffF.isPresent()) {
final LongFunction<CreateCollection.Builder> hnswConfigF = ebF;
LongFunction<HnswConfigDiff> hcdF = buildHnswConfigDiff(hnswConfigDiffF.get());
ebF = l -> hnswConfigF.apply(l).setHnswConfig(hcdF.apply(l));
}
Optional<LongFunction<Map>> quantConfigF = op.getAsOptionalFunction("quantization_config", Map.class);
if (quantConfigF.isPresent()) {
final LongFunction<CreateCollection.Builder> qConF = ebF;
LongFunction<QuantizationConfig> qcDiffF = buildQuantizationConfig(quantConfigF.get());
ebF = l -> qConF.apply(l).setQuantizationConfig(qcDiffF.apply(l));
}
Optional<LongFunction<Map>> sparseVectorsF = op.getAsOptionalFunction("sparse_vectors", Map.class);
if (sparseVectorsF.isPresent()) {
final LongFunction<CreateCollection.Builder> sparseVecF = ebF;
LongFunction<SparseVectorConfig> sparseVectorsMap = buildSparseVectorsStruct(sparseVectorsF.get());
ebF = l -> sparseVecF.apply(l).setSparseVectorsConfig(sparseVectorsMap.apply(l));
}
final LongFunction<CreateCollection.Builder> lastF = ebF;
return l -> lastF.apply(l).build();
}
/**
* Build the {@link OptimizersConfigDiff} from the provided {@link ParsedOp}.
*
* @param ocdMapLongFunc {@link LongFunction<Map>} containing the optimizer config data.
* @return {@link OptimizersConfigDiff} containing the optimizer config data
*/
private LongFunction<OptimizersConfigDiff> buildOptimizerConfigDiff(LongFunction<Map> ocdMapLongFunc) {
return l -> {
OptimizersConfigDiff.Builder ocDiffBuilder = OptimizersConfigDiff.newBuilder();
ocdMapLongFunc.apply(l).forEach((key, value) -> {
if (key.equals("deleted_threshold")) {
ocDiffBuilder.setDeletedThreshold(((Number) value).doubleValue());
}
if (key.equals("vacuum_min_vector_number")) {
ocDiffBuilder.setVacuumMinVectorNumber(((Number) value).longValue());
}
if (key.equals("default_segment_number")) {
ocDiffBuilder.setDefaultSegmentNumber(((Number) value).longValue());
}
if (key.equals("max_segment_size")) {
ocDiffBuilder.setMaxSegmentSize(((Number) value).longValue());
}
if (key.equals("memmap_threshold")) {
ocDiffBuilder.setMemmapThreshold(((Number) value).longValue());
}
if (key.equals("indexing_threshold")) {
ocDiffBuilder.setIndexingThreshold(((Number) value).longValue());
}
if (key.equals(("flush_interval_sec"))) {
ocDiffBuilder.setFlushIntervalSec(((Number) value).longValue());
}
if (key.equals("max_optimization_threads")) {
ocDiffBuilder.setMaxOptimizationThreads(((Number) value).intValue());
}
}
);
return ocDiffBuilder.build();
};
}
/**
* Build the {@link WalConfigDiff} from the provided {@link ParsedOp}.
*
* @param mapLongFunction {@link LongFunction<Map>} containing the WAL config data.
* @return {@link LongFunction<WalConfigDiff>} containing the WAL config data
*/
private LongFunction<WalConfigDiff> buildWalConfigDiff(LongFunction<Map> mapLongFunction) {
return l -> {
WalConfigDiff.Builder walConfigDiffBuilder = WalConfigDiff.newBuilder();
mapLongFunction.apply(l).forEach((key, value) -> {
if (key.equals("wal_capacity_mb")) {
walConfigDiffBuilder.setWalCapacityMb(((Number) value).longValue());
}
if (key.equals("wal_segments_ahead")) {
walConfigDiffBuilder.setWalSegmentsAhead(((Number) value).longValue());
}
}
);
return walConfigDiffBuilder.build();
};
}
/**
* Only named vectors are supported at this time in this driver.
*
* @param {@link ParsedOp} op
* @return {@link LongFunction<Map<String, VectorParams>>} containing the named vectors
*/
private LongFunction<Map<String, VectorParams>> buildNamedVectorsStruct(ParsedOp op) {
if (!op.isDefined("vectors")) {
throw new OpConfigError("Must provide values for 'vectors' in 'create_collection' op");
}
Optional<LongFunction<Map>> baseFunc = op.getAsOptionalFunction("vectors", Map.class);
return baseFunc.<LongFunction<Map<String, VectorParams>>>map(mapLongFunc -> l -> {
Map<String, Object> nvMap = mapLongFunc.apply(l);
Map<String, VectorParams> namedVectors = new HashMap<>();
nvMap.forEach((name, value) -> {
VectorParams.Builder builder = VectorParams.newBuilder();
if (value instanceof Map) {
((Map<String, Object>) value).forEach((innerKey, innerValue) -> {
if (innerKey.equals("distance_value")) {
builder.setDistanceValue(((Number) innerValue).intValue());
}
if (innerKey.equals("size")) {
builder.setSize(((Number) innerValue).longValue());
}
if (innerKey.equals("on_disk")) {
builder.setOnDisk((Boolean) innerValue);
}
if (innerKey.equals("datatype_value")) {
builder.setDatatypeValue(((Number) innerValue).intValue());
}
if (innerKey.equals("hnsw_config")) {
builder.setHnswConfig(buildHnswConfigDiff((Map<String, Object>) innerValue));
}
if (innerKey.equals("quantization_config")) {
builder.setQuantizationConfig(buildQuantizationConfig((Map<String, Object>) innerValue));
}
});
} else {
throw new OpConfigError("Named vectors must be a Map<String, Map<String, Object>>, but got "
+ value.getClass().getSimpleName() + " instead for the inner value");
}
namedVectors.put(name, builder.build());
});
return namedVectors;
}).orElse(null);
}
private LongFunction<QuantizationConfig> buildQuantizationConfig(LongFunction<Map> quantConfMapLongFunc) {
return l -> this.buildQuantizationConfig(quantConfMapLongFunc.apply(l));
}
private QuantizationConfig buildQuantizationConfig(Map<String, Object> quantConfMap) {
QuantizationConfig.Builder qcBuilder = QuantizationConfig.newBuilder();
quantConfMap.forEach((key, value) -> {
switch (key) {
case "binary" -> {
BinaryQuantization.Builder binaryBuilder = BinaryQuantization.newBuilder();
Map<?, ?> binaryQCData = (Map<?, ?>) value;
if (null != binaryQCData && !binaryQCData.isEmpty()) {
if (binaryQCData.containsKey("always_ram")) {
binaryBuilder.setAlwaysRam((Boolean) binaryQCData.get("always_ram"));
}
qcBuilder.setBinary(binaryBuilder);
}
}
case "product" -> {
ProductQuantization.Builder productBuilder = ProductQuantization.newBuilder();
Map<?, ?> productQCData = (Map<?, ?>) value;
if (null != productQCData && !productQCData.isEmpty()) {
// Mandatory field
productBuilder.setAlwaysRam((Boolean) productQCData.get("always_ram"));
// Optional field(s) below
if (productQCData.containsKey("compression")) {
productBuilder.setCompression(CompressionRatio.valueOf((String) productQCData.get("compression")));
}
qcBuilder.setProduct(productBuilder);
}
}
case "scalar" -> {
ScalarQuantization.Builder scalarBuilder = ScalarQuantization.newBuilder();
Map<?, ?> scalarQCData = (Map<?, ?>) value;
if (null != scalarQCData && !scalarQCData.isEmpty()) {
// Mandatory field
scalarBuilder.setType(QuantizationType.forNumber(((Number) scalarQCData.get("type")).intValue()));
// Optional field(s) below
if (scalarQCData.containsKey("always_ram")) {
scalarBuilder.setAlwaysRam((Boolean) scalarQCData.get("always_ram"));
}
if (scalarQCData.containsKey("quantile")) {
scalarBuilder.setQuantile(((Number) scalarQCData.get("quantile")).floatValue());
}
qcBuilder.setScalar(scalarBuilder);
}
}
}
});
return qcBuilder.build();
}
/**
* Build the {@link HnswConfigDiff} from the provided {@link ParsedOp}.
*
* @param fieldSpec The {@link ParsedOp} containing the hnsw config data
* @return The {@link HnswConfigDiff} built from the provided {@link ParsedOp}
* @see <a href="https://qdrant.tech/documentation/concepts/indexing/#vector-index">HNSW Config</a>
*/
@Deprecated
private HnswConfigDiff buildHnswConfigDiff(ParsedOp fieldSpec) {
HnswConfigDiff.Builder hnswConfigBuilder = HnswConfigDiff.newBuilder();
fieldSpec.getOptionalStaticValue("hnsw_config", Map.class).ifPresent(hnswConfigData -> {
if (hnswConfigData.isEmpty()) {
return;
} else {
if (hnswConfigData.containsKey("ef_construct")) {
hnswConfigBuilder.setEfConstruct(((Number) hnswConfigData.get("ef_construct")).longValue());
}
if (hnswConfigData.containsKey("m")) {
hnswConfigBuilder.setM(((Number) hnswConfigData.get("m")).intValue());
}
if (hnswConfigData.containsKey("full_scan_threshold")) {
hnswConfigBuilder.setFullScanThreshold(((Number) hnswConfigData.get("full_scan_threshold")).intValue());
}
if (hnswConfigData.containsKey("max_indexing_threads")) {
hnswConfigBuilder.setMaxIndexingThreads(((Number) hnswConfigData.get("max_indexing_threads")).intValue());
}
if (hnswConfigData.containsKey("on_disk")) {
hnswConfigBuilder.setOnDisk((Boolean) hnswConfigData.get("on_disk"));
}
if (hnswConfigData.containsKey("payload_m")) {
hnswConfigBuilder.setPayloadM(((Number) hnswConfigData.get("payload_m")).intValue());
}
}
});
return hnswConfigBuilder.build();
}
private LongFunction<HnswConfigDiff> buildHnswConfigDiff(LongFunction<Map> hnswConfigDiffMapLongFunc) {
return l -> this.buildHnswConfigDiff(hnswConfigDiffMapLongFunc.apply(l));
}
/**
* Build the {@link HnswConfigDiff} from the provided {@link ParsedOp}.
*
* @param hnswConfigDiffMap The {@link Map<String, Object>} containing the hnsw config data
* @return The {@link LongFunction<HnswConfigDiff>} built from the provided {@link ParsedOp}
* @see <a href="https://qdrant.tech/documentation/concepts/indexing/#vector-index">HNSW Config</a>
*/
private HnswConfigDiff buildHnswConfigDiff(Map<String, Object> hnswConfigDiffMap) {
HnswConfigDiff.Builder hnswConfigBuilder = HnswConfigDiff.newBuilder();
hnswConfigDiffMap.forEach((key, value) -> {
if (key.equals("ef_construct")) {
hnswConfigBuilder.setEfConstruct(((Number) value).longValue());
}
if (key.equals("m")) {
hnswConfigBuilder.setM(((Number) value).intValue());
}
if (key.equals("full_scan_threshold")) {
hnswConfigBuilder.setFullScanThreshold(((Number) value).intValue());
}
if (key.equals("max_indexing_threads")) {
hnswConfigBuilder.setMaxIndexingThreads(((Number) value).intValue());
}
if (key.equals("on_disk")) {
hnswConfigBuilder.setOnDisk((Boolean) value);
}
if (key.equals("payload_m")) {
hnswConfigBuilder.setPayloadM(((Number) value).intValue());
}
}
);
return hnswConfigBuilder.build();
}
/**
* Build the {@link SparseVectorConfig} from the provided {@link ParsedOp}.
*
* @param sparseVectorsMapLongFunc The {@link LongFunction<Map>} containing the sparse vectors data
* @return The {@link LongFunction<SparseVectorConfig>} built from the provided {@link ParsedOp}'s data
*/
private LongFunction<SparseVectorConfig> buildSparseVectorsStruct(LongFunction<Map> sparseVectorsMapLongFunc) {
return l -> {
SparseVectorConfig.Builder builder = SparseVectorConfig.newBuilder();
sparseVectorsMapLongFunc.apply(l).forEach((key, value) -> {
SparseVectorParams.Builder svpBuilder = SparseVectorParams.newBuilder();
SparseIndexConfig.Builder sicBuilder = SparseIndexConfig.newBuilder();
if (value instanceof Map) {
((Map<String, Object>) value).forEach((innerKey, innerValue) -> {
if (innerKey.equals("full_scan_threshold")) {
sicBuilder.setFullScanThreshold(((Number) innerValue).intValue());
}
if (innerKey.equals("on_disk")) {
sicBuilder.setOnDisk((Boolean) innerValue);
}
svpBuilder.setIndex(sicBuilder);
builder.putMap((String) key, svpBuilder.build());
}
);
} else {
throw new OpConfigError("Sparse Vectors must be a Map<String, Map<String, Object>>, but got "
+ value.getClass().getSimpleName() + " instead for the inner value");
}
});
return builder.build();
};
}
// https://qdrant.tech/documentation/concepts/collections/#create-a-collection
@Override
public LongFunction<QdrantBaseOp<CreateCollection>> createOpFunc(
LongFunction<CreateCollection> paramF,
LongFunction<QdrantClient> clientF,
ParsedOp op,
LongFunction<String> targetF
) {
return l -> new QdrantCreateCollectionOp(clientF.apply(l), paramF.apply(l));
}
}

View File

@ -0,0 +1,54 @@
/*
* Copyright (c) 2020-2024 nosqlbench
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.nosqlbench.adapter.qdrant.opdispensers;
import io.nosqlbench.adapter.qdrant.QdrantDriverAdapter;
import io.nosqlbench.adapter.qdrant.ops.QdrantBaseOp;
import io.nosqlbench.adapter.qdrant.ops.QdrantPayloadIndexOp;
import io.nosqlbench.adapters.api.templating.ParsedOp;
import io.qdrant.client.QdrantClient;
import io.qdrant.client.grpc.Collections.PayloadIndexParams;
import java.util.function.LongFunction;
public class QdrantCreatePayloadIndexOpDispenser extends QdrantBaseOpDispenser<PayloadIndexParams> {
public QdrantCreatePayloadIndexOpDispenser(
QdrantDriverAdapter adapter,
ParsedOp op,
LongFunction<String> targetFunction) {
super(adapter, op, targetFunction);
}
@Override
public LongFunction<PayloadIndexParams> getParamFunc(
LongFunction<QdrantClient> clientF,
ParsedOp op,
LongFunction<String> targetF) {
LongFunction<PayloadIndexParams.Builder> ebF =
l -> PayloadIndexParams.newBuilder().setField(null, targetF.apply(l));
return l -> ebF.apply(l).build();
}
@Override
public LongFunction<QdrantBaseOp<PayloadIndexParams>> createOpFunc(
LongFunction<PayloadIndexParams> paramF,
LongFunction<QdrantClient> clientF,
ParsedOp op,
LongFunction<String> targetF) {
return l -> new QdrantPayloadIndexOp(clientF.apply(l), paramF.apply(l));
}
}

View File

@ -0,0 +1,62 @@
/*
* Copyright (c) 2020-2024 nosqlbench
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.nosqlbench.adapter.qdrant.opdispensers;
import io.nosqlbench.adapter.qdrant.QdrantDriverAdapter;
import io.nosqlbench.adapter.qdrant.ops.QdrantBaseOp;
import io.nosqlbench.adapter.qdrant.ops.QdrantDeleteCollectionOp;
import io.nosqlbench.adapters.api.templating.ParsedOp;
import io.qdrant.client.QdrantClient;
import io.qdrant.client.grpc.Collections.DeleteCollection;
import java.util.function.LongFunction;
public class QdrantDeleteCollectionOpDispenser extends QdrantBaseOpDispenser<DeleteCollection> {
/**
* Create a new {@link QdrantDeleteCollectionOpDispenser} subclassed from {@link QdrantBaseOpDispenser}.
*
* @param adapter The associated {@link QdrantDriverAdapter}
* @param op The {@link ParsedOp} encapsulating the activity for this cycle
* @param targetFunction A LongFunction that returns the specified Qdrant object for this Op
* @see <a href="https://qdrant.github.io/qdrant/redoc/index.html#tag/collections/operation/delete_collection">Qdrant Delete Collection</a>.
*/
public QdrantDeleteCollectionOpDispenser(QdrantDriverAdapter adapter,
ParsedOp op,
LongFunction<String> targetFunction) {
super(adapter, op, targetFunction);
}
@Override
public LongFunction<DeleteCollection> getParamFunc(
LongFunction<QdrantClient> clientF,
ParsedOp op,
LongFunction<String> targetF) {
LongFunction<DeleteCollection.Builder> ebF =
l -> DeleteCollection.newBuilder().setCollectionName(targetF.apply(l));
return l -> ebF.apply(l).build();
}
@Override
public LongFunction<QdrantBaseOp<DeleteCollection>> createOpFunc(
LongFunction<DeleteCollection> paramF,
LongFunction<QdrantClient> clientF,
ParsedOp op,
LongFunction<String> targetF) {
return l -> new QdrantDeleteCollectionOp(clientF.apply(l), paramF.apply(l));
}
}

View File

@ -0,0 +1,219 @@
/*
* Copyright (c) 2020-2024 nosqlbench
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.nosqlbench.adapter.qdrant.opdispensers;
import io.nosqlbench.adapter.qdrant.QdrantDriverAdapter;
import io.nosqlbench.adapter.qdrant.ops.QdrantBaseOp;
import io.nosqlbench.adapter.qdrant.ops.QdrantSearchPointsOp;
import io.nosqlbench.adapter.qdrant.pojo.SearchPointsHelper;
import io.nosqlbench.adapters.api.templating.ParsedOp;
import io.nosqlbench.nb.api.errors.OpConfigError;
import io.qdrant.client.QdrantClient;
import io.qdrant.client.ShardKeySelectorFactory;
import io.qdrant.client.WithPayloadSelectorFactory;
import io.qdrant.client.WithVectorsSelectorFactory;
import io.qdrant.client.grpc.Points.*;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.function.LongFunction;
public class QdrantSearchPointsOpDispenser extends QdrantBaseOpDispenser<SearchPoints> {
public QdrantSearchPointsOpDispenser(QdrantDriverAdapter adapter, ParsedOp op, LongFunction<String> targetFunction) {
super(adapter, op, targetFunction);
}
@Override
public LongFunction<QdrantBaseOp<SearchPoints>> createOpFunc(
LongFunction<SearchPoints> paramF,
LongFunction<QdrantClient> clientF,
ParsedOp op, LongFunction<String> targetF) {
return l -> new QdrantSearchPointsOp(clientF.apply(l), paramF.apply(l));
}
@Override
public LongFunction<SearchPoints> getParamFunc(
LongFunction<QdrantClient> clientF,
ParsedOp op,
LongFunction<String> targetF) {
LongFunction<SearchPoints.Builder> ebF =
l -> SearchPoints.newBuilder().setCollectionName(targetF.apply(l));
// query params here
ebF = op.enhanceFuncOptionally(ebF, "timeout", Number.class,
(SearchPoints.Builder b, Number t) -> b.setTimeout(t.longValue()));
Optional<LongFunction<Object>> optionalConsistencyF = op.getAsOptionalFunction("consistency", Object.class);
if (optionalConsistencyF.isPresent()) {
LongFunction<SearchPoints.Builder> consistencyFunc = ebF;
LongFunction<ReadConsistency> builtConsistency = buildReadConsistency(optionalConsistencyF.get());
ebF = l -> consistencyFunc.apply(l).setReadConsistency(builtConsistency.apply(l));
}
// body params here
// - required items
ebF = op.enhanceFuncOptionally(ebF, "limit", Number.class,
(SearchPoints.Builder b, Number n) -> b.setLimit(n.longValue()));
LongFunction<SearchPointsHelper> searchPointsHelperF = buildVectorForSearch(op);
final LongFunction<SearchPoints.Builder> detailsOfNamedVectorsF = ebF;
ebF = l -> detailsOfNamedVectorsF.apply(l)
.setVectorName(searchPointsHelperF.apply(l).getVectorName())
.addAllVector(searchPointsHelperF.apply(l).getVectorValues());
//.setSparseIndices(searchPointsHelperF.apply(l).getSparseIndices()); throws NPE at their driver and hence below
final LongFunction<SearchPoints.Builder> sparseIndicesF = ebF;
ebF = l -> {
SearchPoints.Builder builder = sparseIndicesF.apply(l);
if (searchPointsHelperF.apply(l).getSparseIndices() != null) {
builder.setSparseIndices(searchPointsHelperF.apply(l).getSparseIndices());
}
return builder;
};
// - optional items
ebF = op.enhanceFuncOptionally(ebF, "shard_key", String.class, (SearchPoints.Builder b, String sk) ->
b.setShardKeySelector(ShardKeySelectorFactory.shardKeySelector(sk)));
ebF = op.enhanceFuncOptionally(ebF, "score_threshold", Number.class,
(SearchPoints.Builder b, Number n) -> b.setScoreThreshold(n.floatValue()));
ebF = op.enhanceFuncOptionally(ebF, "offset", Number.class,
(SearchPoints.Builder b, Number n) -> b.setOffset(n.longValue()));
Optional<LongFunction<Object>> optionalWithPayloadF = op.getAsOptionalFunction("with_payload", Object.class);
if (optionalWithPayloadF.isPresent()) {
LongFunction<SearchPoints.Builder> withPayloadFunc = ebF;
LongFunction<WithPayloadSelector> builtWithPayload = buildWithPayloadSelector(optionalWithPayloadF.get());
ebF = l -> withPayloadFunc.apply(l).setWithPayload(builtWithPayload.apply(l));
}
Optional<LongFunction<Object>> optionalWithVectorF = op.getAsOptionalFunction("with_vector", Object.class);
if (optionalWithVectorF.isPresent()) {
LongFunction<SearchPoints.Builder> withVectorFunc = ebF;
LongFunction<WithVectorsSelector> builtWithVector = buildWithVectorSelector(optionalWithVectorF.get());
ebF = l -> withVectorFunc.apply(l).setWithVectors(builtWithVector.apply(l));
}
// TODO - Implement filter, params
final LongFunction<SearchPoints.Builder> lastF = ebF;
return l -> lastF.apply(l).build();
}
private LongFunction<SearchPointsHelper> buildVectorForSearch(ParsedOp op) {
if (!op.isDefined("vector")) {
throw new OpConfigError("Must provide values for 'vector'");
}
Optional<LongFunction<List>> baseFunc = op.getAsOptionalFunction("vector", List.class);
return baseFunc.<LongFunction<SearchPointsHelper>>map(listLongFunction -> l -> {
List<Map<String, Object>> vectorPointsList = listLongFunction.apply(l);
SearchPointsHelper searchPointsHelperBuilder = new SearchPointsHelper();
vectorPointsList.forEach(point -> {
if (point.containsKey("name")) {
searchPointsHelperBuilder.setVectorName((String) point.get("name"));
} else {
throw new OpConfigError("Must provide values for 'name' within 'vector' field");
}
if (point.containsKey("values")) {
searchPointsHelperBuilder.setVectorValues((List<Float>) point.get("values"));
} else {
throw new OpConfigError("Must provide values for 'values' within 'vector' field");
}
if (point.containsKey("sparse_indices")) {
searchPointsHelperBuilder.setSparseIndices(
SparseIndices.newBuilder().addAllData((List<Integer>) point.get("sparse_indices")).build());
}
});
return searchPointsHelperBuilder;
}).orElse(null);
}
private LongFunction<WithVectorsSelector> buildWithVectorSelector(LongFunction<Object> objectLongFunction) {
return l -> {
Object withVector = objectLongFunction.apply(l);
switch (withVector) {
case Boolean b -> {
return WithVectorsSelectorFactory.enable(b);
}
case List<?> objects when objects.getFirst() instanceof String -> {
return WithVectorsSelectorFactory.include((List<String>) withVector);
}
case null, default -> {
assert withVector != null;
throw new OpConfigError("Invalid type for with_vector specified [{}]" +
withVector.getClass().getSimpleName());
}
}
};
}
private LongFunction<WithPayloadSelector> buildWithPayloadSelector(LongFunction<Object> objectLongFunction) {
return l -> {
Object withPayload = objectLongFunction.apply(l);
switch (withPayload) {
case Boolean b -> {
return WithPayloadSelector.newBuilder().setEnable(b).build();
}
case List<?> objects when objects.getFirst() instanceof String -> {
return WithPayloadSelectorFactory.include((List<String>) withPayload);
}
case Map<?, ?> map -> {
WithPayloadSelector.Builder withPayloadSelector = WithPayloadSelector.newBuilder();
map.forEach((key, value) -> {
if (key.equals("include")) {
withPayloadSelector.setInclude(
PayloadIncludeSelector.newBuilder().addAllFields((List<String>) value).build());
} else if (key.equals("exclude")) {
withPayloadSelector.setExclude(
PayloadExcludeSelector.newBuilder().addAllFields((List<String>) value).build());
} else {
throw new OpConfigError("Only 'include' & 'exclude' fields for with_payload map is supported," +
" but we got [{}]" + key);
}
});
return withPayloadSelector.build();
}
case null, default -> {
assert withPayload != null;
throw new OpConfigError("Invalid type for with_payload specified [{}]" +
withPayload.getClass().getSimpleName());
}
}
};
}
/**
* @param objectLongFunction the {@link LongFunction<Object>} from which the consistency for search will be built.
* @return a {@link ReadConsistency} function object to be added to a Qdrant {@link UpsertPoints} request.
* <p>
* This method interrogates the subsection of the ParsedOp defined for vector parameters and constructs a list of
* vector (dense plus sparse) points based on the included values, or returns null if this section is not populated.
* The base function returns either the List of vectors or null, while the interior function builds the vectors
* with a Builder pattern based on the values contained in the source ParsedOp.
*/
private LongFunction<ReadConsistency> buildReadConsistency(LongFunction<Object> objectLongFunction) {
return l -> {
Object consistency = objectLongFunction.apply(l);
if (consistency instanceof Number) {
return ReadConsistency.newBuilder().setTypeValue((Integer) consistency).build();
} else if (consistency instanceof String) {
return ReadConsistency.newBuilder().setType(ReadConsistencyType.valueOf((String) consistency)).build();
} else {
throw new OpConfigError("Invalid type for read consistency specified");
}
};
}
}

View File

@ -0,0 +1,192 @@
/*
* Copyright (c) 2020-2024 nosqlbench
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.nosqlbench.adapter.qdrant.opdispensers;
import io.nosqlbench.adapter.qdrant.QdrantDriverAdapter;
import io.nosqlbench.adapter.qdrant.ops.QdrantBaseOp;
import io.nosqlbench.adapter.qdrant.ops.QdrantUpsertPointsOp;
import io.nosqlbench.adapters.api.activityimpl.OpDispenser;
import io.nosqlbench.adapters.api.templating.ParsedOp;
import io.nosqlbench.nb.api.errors.OpConfigError;
import io.qdrant.client.*;
import io.qdrant.client.grpc.JsonWithInt.ListValue;
import io.qdrant.client.grpc.JsonWithInt.NullValue;
import io.qdrant.client.grpc.JsonWithInt.Struct;
import io.qdrant.client.grpc.JsonWithInt.Value;
import io.qdrant.client.grpc.Points.Vector;
import io.qdrant.client.grpc.Points.*;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import java.util.*;
import java.util.function.BiConsumer;
import java.util.function.LongFunction;
public class QdrantUpsertPointsOpDispenser extends QdrantBaseOpDispenser<UpsertPoints> {
private static final Logger logger = LogManager.getLogger(QdrantUpsertPointsOpDispenser.class);
/**
* Create a new {@link QdrantUpsertPointsOpDispenser} implementing the {@link OpDispenser} interface.
* @param adapter
* @param op
* @param targetFunction
* @see <a href="https://qdrant.github.io/qdrant/redoc/index.html#tag/points/operation/upsert_points">Upsert Points</a>
*/
public QdrantUpsertPointsOpDispenser(QdrantDriverAdapter adapter, ParsedOp op, LongFunction<String> targetFunction) {
super(adapter, op, targetFunction);
}
@Override
public LongFunction<UpsertPoints> getParamFunc(
LongFunction<QdrantClient> clientF,
ParsedOp op,
LongFunction<String> targetF
) {
LongFunction<UpsertPoints.Builder> ebF =
l -> UpsertPoints.newBuilder().setCollectionName(targetF.apply(l));
// set wait and ordering query params
ebF = op.enhanceFuncOptionally(ebF, "wait", Boolean.class, UpsertPoints.Builder::setWait);
ebF = op.enhanceFuncOptionally(ebF, "ordering", Number.class, (UpsertPoints.Builder b, Number n) ->
b.setOrdering(WriteOrdering.newBuilder().setType(WriteOrderingType.forNumber(n.intValue()))));
// request body begins here
ebF = op.enhanceFuncOptionally(ebF, "shard_key", String.class, (UpsertPoints.Builder b, String sk) ->
b.setShardKeySelector(ShardKeySelectorFactory.shardKeySelector(sk)));
LongFunction<List<PointStruct>> pointsF = constructVectorPointsFunc(op);
final LongFunction<UpsertPoints.Builder> pointsOfNamedVectorsF = ebF;
ebF = l -> pointsOfNamedVectorsF.apply(l).addAllPoints(pointsF.apply(l));
final LongFunction<UpsertPoints.Builder> lastF = ebF;
return l -> lastF.apply(l).build();
}
/**
* @param op the {@link ParsedOp} from which the vector objects will be built
* @return an Iterable Collection of {@link PointStruct} objects to be added to a Qdrant {@link UpsertPoints} request.
* <p>
* This method interrogates the subsection of the ParsedOp defined for vector parameters and constructs a list of
* vector (dense plus sparse) points based on the included values, or returns null if this section is not populated.
* The base function returns either the List of vectors or null, while the interior function builds the vectors
* with a Builder pattern based on the values contained in the source ParsedOp.
*/
private LongFunction<List<PointStruct>> constructVectorPointsFunc(ParsedOp op) {
Optional<LongFunction<List>> baseFunc =
op.getAsOptionalFunction("points", List.class);
return baseFunc.<LongFunction<List<PointStruct>>>map(listLongFunction -> l -> {
List<PointStruct> returnVectorPoints = new ArrayList<>();
List<Map<String, Object>> vectorPoints = listLongFunction.apply(l);
PointStruct.Builder pointBuilder;
for (Map<String, Object> point : vectorPoints) {
pointBuilder = PointStruct.newBuilder();
// 'id' field is mandatory, if not present, server will throw an exception
PointId.Builder pointId = PointId.newBuilder();
if (point.get("id") instanceof Number) {
pointId.setNum(((Number) point.get("id")).longValue());
} else if (point.get("id") instanceof String) {
pointId.setUuid((String) point.get("id"));
} else {
logger.warn("Unsupported 'id' value type [{}] specified for 'points'. Ignoring.",
point.get("id").getClass().getSimpleName());
}
pointBuilder.setId(pointId);
if (point.containsKey("payload")) {
pointBuilder.putAllPayload(getPayloadValues(point.get("payload")));
}
pointBuilder.setVectors(VectorsFactory.namedVectors(getNamedVectorMap(point.get("vector"))));
returnVectorPoints.add(pointBuilder.build());
}
return returnVectorPoints;
}).orElse(null);
}
private Map<String, Vector> getNamedVectorMap(Object rawVectorValues) {
Map<String, Vector> namedVectorMapData;
if (rawVectorValues instanceof Map) {
namedVectorMapData = new HashMap<>();
List<Float> sparseVectors = new ArrayList<>();
List<Integer> sparseIndices = new ArrayList<>();
BiConsumer<String, Object> namedVectorsToPointsVectorValue = (nvkey, nvVal) -> {
Vector targetVectorVal;
if (nvVal instanceof Map) {
// Deal with named sparse vectors here
((Map<String, Object>) nvVal).forEach(
(svKey, svValue) -> {
if ("values".equals(svKey)) {
sparseVectors.addAll((List<Float>) svValue);
} else if ("indices".equals(svKey)) {
sparseIndices.addAll((List<Integer>) svValue);
} else {
logger.warn("Unrecognized sparse vector field [{}] provided. Ignoring.", svKey);
}
}
);
targetVectorVal = VectorFactory.vector(sparseVectors, sparseIndices);
} else if (nvVal instanceof List) {
// Deal with regular named dense vectors here
targetVectorVal = VectorFactory.vector((List<Float>) nvVal);
} else
throw new RuntimeException("Unsupported 'vector' value type [" + nvVal.getClass().getSimpleName() + " ]");
namedVectorMapData.put(nvkey, targetVectorVal);
};
((Map<String, Object>) rawVectorValues).forEach(namedVectorsToPointsVectorValue);
} else {
throw new OpConfigError("Invalid format of type" +
" [" + rawVectorValues.getClass().getSimpleName() + "] specified for 'vector'");
}
return namedVectorMapData;
}
private Map<String, Value> getPayloadValues(Object rawPayloadValues) {
if (rawPayloadValues instanceof Map) {
Map<String, Object> payloadMap = (Map<String, Object>) rawPayloadValues;
Map<String, Value> payloadMapData = new HashMap<>(payloadMap.size());
payloadMap.forEach((pKey, pVal) -> {
switch (pVal) {
case Boolean b -> payloadMapData.put(pKey, ValueFactory.value(b));
case Double v -> payloadMapData.put(pKey, ValueFactory.value(v));
case Integer i -> payloadMapData.put(pKey, ValueFactory.value(i));
case String s -> payloadMapData.put(pKey, ValueFactory.value(s));
case ListValue listValue -> payloadMapData.put(pKey, ValueFactory.list((List<Value>) pVal));
case NullValue nullValue -> payloadMapData.put(pKey, ValueFactory.nullValue());
case Struct struct -> payloadMapData.put(pKey, Value.newBuilder().setStructValue(struct).build());
default -> logger.warn("Unknown payload value type passed." +
" Only https://qdrant.tech/documentation/concepts/payload/#payload-types are supported." +
" {} will be ignored.", pVal.toString());
}
});
return payloadMapData;
} else {
throw new RuntimeException("Invalid format of type" +
" [" + rawPayloadValues.getClass().getSimpleName() + "] specified for payload");
}
}
/**
* Create a new {@link QdrantUpsertPointsOp} implementing the {@link QdrantBaseOp} interface.
* @see <a href="https://qdrant.tech/documentation/concepts/points/">Upsert Points</a>
*/
@Override
public LongFunction<QdrantBaseOp<UpsertPoints>> createOpFunc(
LongFunction<UpsertPoints> paramF,
LongFunction<QdrantClient> clientF,
ParsedOp op,
LongFunction<String> targetF
) {
return l -> new QdrantUpsertPointsOp(clientF.apply(l), paramF.apply(l));
}
}

View File

@ -0,0 +1,65 @@
/*
* Copyright (c) 2020-2024 nosqlbench
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.nosqlbench.adapter.qdrant.ops;
import io.nosqlbench.adapters.api.activityimpl.uniform.flowtypes.CycleOp;
import io.qdrant.client.QdrantClient;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import java.util.function.LongFunction;
public abstract class QdrantBaseOp<T> implements CycleOp<Object> {
protected final static Logger logger = LogManager.getLogger(QdrantBaseOp.class);
protected final QdrantClient client;
protected final T request;
protected final LongFunction<Object> apiCall;
public QdrantBaseOp(QdrantClient client, T requestParam) {
this.client = client;
this.request = requestParam;
this.apiCall = this::applyOp;
}
public QdrantBaseOp(QdrantClient client, T requestParam, LongFunction<Object> call) {
this.client = client;
this.request = requestParam;
this.apiCall = call;
}
@Override
public final Object apply(long value) {
logger.trace("applying op: {}", this);
try {
Object result = applyOp(value);
return result;
} catch (Exception e) {
RuntimeException rte = (RuntimeException) e;
throw rte;
}
}
public abstract Object applyOp(long value);
@Override
public String toString() {
return "QdrantOp(" + this.request.getClass().getSimpleName() + ")";
}
}

View File

@ -0,0 +1,46 @@
/*
* Copyright (c) 2020-2024 nosqlbench
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.nosqlbench.adapter.qdrant.ops;
import io.qdrant.client.QdrantClient;
import io.qdrant.client.grpc.Points.CountPoints;
import java.time.Duration;
import java.util.concurrent.ExecutionException;
public class QdrantCountPointsOp extends QdrantBaseOp<CountPoints> {
public QdrantCountPointsOp(QdrantClient client, CountPoints request) {
super(client, request);
}
@Override
public Object applyOp(long value) {
long result;
try {
result = client.countAsync(
request.getCollectionName(),
request.getFilter(),
request.getExact(),
Duration.ofMinutes(5) // opinionated default of 5 minutes for timeout
).get();
logger.info("[QdrantCountPointsOp] Total vector points counted: {}", result);
} catch (InterruptedException | ExecutionException e) {
throw new RuntimeException(e);
}
return result;
}
}

View File

@ -0,0 +1,47 @@
/*
* Copyright (c) 2020-2024 nosqlbench
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.nosqlbench.adapter.qdrant.ops;
import io.nosqlbench.adapters.api.templating.ParsedOp;
import io.qdrant.client.QdrantClient;
import io.qdrant.client.grpc.Collections.CollectionOperationResponse;
import io.qdrant.client.grpc.Collections.CreateCollection;
import java.util.concurrent.ExecutionException;
public class QdrantCreateCollectionOp extends QdrantBaseOp<CreateCollection> {
/**
* Create a new {@link ParsedOp} encapsulating a call to the <b>Qdrant</b> create collection method.
*
* @param client The associated {@link QdrantClient} used to communicate with the database
* @param request The {@link CreateCollection} built for this operation
*/
public QdrantCreateCollectionOp(QdrantClient client, CreateCollection request) {
super(client, request);
}
@Override
public Object applyOp(long value) {
CollectionOperationResponse response = null;
try {
response = client.createCollectionAsync(request).get();
} catch (InterruptedException | ExecutionException e) {
throw new RuntimeException(e);
}
return response;
}
}

View File

@ -0,0 +1,41 @@
/*
* Copyright (c) 2020-2024 nosqlbench
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.nosqlbench.adapter.qdrant.ops;
import io.qdrant.client.QdrantClient;
import io.qdrant.client.grpc.Collections.CollectionOperationResponse;
import io.qdrant.client.grpc.Collections.DeleteCollection;
import java.util.concurrent.ExecutionException;
public class QdrantDeleteCollectionOp extends QdrantBaseOp<DeleteCollection> {
public QdrantDeleteCollectionOp(QdrantClient client, DeleteCollection request) {
super(client, request);
}
@Override
public Object applyOp(long value) {
CollectionOperationResponse response = null;
try {
response = client.deleteCollectionAsync(request.getCollectionName()).get();
} catch (InterruptedException | ExecutionException e) {
throw new RuntimeException(e);
}
return response;
}
}

View File

@ -0,0 +1,32 @@
/*
* Copyright (c) 2020-2024 nosqlbench
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.nosqlbench.adapter.qdrant.ops;
import io.qdrant.client.QdrantClient;
import io.qdrant.client.grpc.Collections.PayloadIndexParams;
public class QdrantPayloadIndexOp extends QdrantBaseOp<PayloadIndexParams> {
public QdrantPayloadIndexOp(QdrantClient client, PayloadIndexParams request) {
super(client, request);
}
@Override
public Object applyOp(long value) {
//client.createPayloadIndexAsync(PayloadIndexParams.get);
return null;
}
}

View File

@ -0,0 +1,47 @@
/*
* Copyright (c) 2024 nosqlbench
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.nosqlbench.adapter.qdrant.ops;
import io.qdrant.client.QdrantClient;
import io.qdrant.client.grpc.Points.ScoredPoint;
import io.qdrant.client.grpc.Points.SearchPoints;
import java.util.List;
import java.util.concurrent.ExecutionException;
public class QdrantSearchPointsOp extends QdrantBaseOp<SearchPoints> {
public QdrantSearchPointsOp(QdrantClient client, SearchPoints request) {
super(client, request);
}
@Override
public Object applyOp(long value) {
List<ScoredPoint> response = null;
try {
logger.debug("[QdrantSearchPointsOp] Cycle {} has request: {}", value, request.toString());
response = client.searchAsync(request).get();
if (logger.isDebugEnabled()) {
response.forEach(scoredPoint -> {
logger.debug("[QdrantSearchPointsOp] Scored Point: {}", scoredPoint.toString());
});
}
} catch (InterruptedException | ExecutionException e) {
throw new RuntimeException(e);
}
return response;
}
}

View File

@ -0,0 +1,55 @@
/*
* Copyright (c) 2020-2024 nosqlbench
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.nosqlbench.adapter.qdrant.ops;
import io.qdrant.client.QdrantClient;
import io.qdrant.client.grpc.Points.UpdateResult;
import io.qdrant.client.grpc.Points.UpsertPoints;
import java.util.concurrent.ExecutionException;
public class QdrantUpsertPointsOp extends QdrantBaseOp<UpsertPoints> {
public QdrantUpsertPointsOp(QdrantClient client, UpsertPoints request) {
super(client, request);
}
@Override
public Object applyOp(long value) {
UpdateResult response = null;
String responseStatus;
long responseOperationId;
try {
logger.debug("[QdrantUpsertPointsOp] Cycle {} has Request: {}", value, request.toString());
response = client.upsertAsync(request).get();
responseStatus = response.getStatus().toString();
responseOperationId = response.getOperationId();
switch(response.getStatus()) {
case Completed, Acknowledged ->
logger.trace("[QdrantUpsertPointsOp] Upsert points finished successfully." +
" [Status ({}) for Operation id ({})]", responseStatus, responseOperationId);
case UnknownUpdateStatus, ClockRejected ->
logger.error("[QdrantUpsertPointsOp] Upsert points failed with status '{}'" +
" for operation id '{}'", responseStatus, responseOperationId);
default ->
logger.error("[QdrantUpsertPointsOp] Unknown status '{}' for operation id '{}'", responseStatus, responseOperationId);
}
} catch (InterruptedException | ExecutionException e) {
throw new RuntimeException(e);
}
return response;
}
}

View File

@ -0,0 +1,81 @@
/*
* Copyright (c) 2020-2024 nosqlbench
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.nosqlbench.adapter.qdrant.pojo;
import io.qdrant.client.grpc.Points.SparseIndices;
import java.util.List;
import java.util.Objects;
/**
* Helper class to store the vector name, vector values and sparse indices to be used for searching points.
*/
public class SearchPointsHelper {
private String vectorName;
private List<Float> vectorValues;
private SparseIndices sparseIndices;
public SearchPointsHelper(String vectorName, List<Float> vectorValues, SparseIndices sparseIndices) {
this.vectorName = vectorName;
this.vectorValues = vectorValues;
this.sparseIndices = sparseIndices;
}
public SearchPointsHelper() {
}
public String getVectorName() {
return vectorName;
}
public void setVectorName(String vectorName) {
this.vectorName = vectorName;
}
public List<Float> getVectorValues() {
return vectorValues;
}
public void setVectorValues(List<Float> vectorValues) {
this.vectorValues = vectorValues;
}
public SparseIndices getSparseIndices() {
return sparseIndices;
}
public void setSparseIndices(SparseIndices sparseIndices) {
this.sparseIndices = sparseIndices;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
SearchPointsHelper that = (SearchPointsHelper) o;
return getVectorName().equals(that.getVectorName()) && getVectorValues().equals(that.getVectorValues()) && Objects.equals(getSparseIndices(), that.getSparseIndices());
}
@Override
public int hashCode() {
int result = getVectorName().hashCode();
result = 31 * result + getVectorValues().hashCode();
result = 31 * result + Objects.hashCode(getSparseIndices());
return result;
}
}

View File

@ -0,0 +1,31 @@
/*
* Copyright (c) 2020-2024 nosqlbench
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.nosqlbench.adapter.qdrant.types;
public enum QdrantOpType {
create_collection,
delete_collection,
create_payload_index,
// https://qdrant.github.io/qdrant/redoc/index.html#tag/points/operation/search_points
search_points,
// https://qdrant.tech/documentation/concepts/points/
// https://qdrant.github.io/qdrant/redoc/index.html#tag/points/operation/upsert_points
upsert_points,
// https://qdrant.github.io/qdrant/redoc/index.html#tag/points/operation/count_points
// https://qdrant.tech/documentation/concepts/points/#counting-points
count_points,
}

View File

@ -0,0 +1,186 @@
min_version: 5.21
description: |
This is a template for live vector search testing.
Template Variables:
schema: Install the schema required to run the test
rampup: Measure how long it takes to load a set of embeddings
search: Measure how the system responds to queries while it
is indexing recently ingested data.
search: Run vector search with a set of default (or overridden) parameters
In all of these phases, it is important to instance the metrics with distinct names.
Also, aggregates of recall should include total aggregate as well as a moving average.
scenarios:
qdrant_vectors:
delete_collection: >-
run tags==block:delete_collection
errors===stop
cycles===UNDEF threads===UNDEF
uri=TEMPLATE(qdranthost) grpc_port=TEMPLATE(grpc_port,6334) token_file=TEMPLATE(token_file)
schema_collection: >-
run tags==block:schema_collection
errors===stop
cycles===UNDEF threads===UNDEF
uri=TEMPLATE(qdranthost) grpc_port=TEMPLATE(grpc_port,6334) token_file=TEMPLATE(token_file)
rampup: >-
run tags==block:rampup
errors===warn,counter,retry
cycles===TEMPLATE(train_cycles,TEMPLATE(trainsize,1000)) threads===TEMPLATE(train_threads,AUTO)
uri=TEMPLATE(qdranthost) grpc_port=TEMPLATE(grpc_port,6334) token_file=TEMPLATE(token_file)
count_vectors: >-
run tags==block:count_vectors
errors===stop
cycles===UNDEF threads===UNDEF
uri=TEMPLATE(qdranthost) grpc_port=TEMPLATE(grpc_port,6334) token_file=TEMPLATE(token_file)
search_points: >-
run tags==block:search_points
errors===warn,counter
cycles===TEMPLATE(testann_cycles,TEMPLATE(testsize,1000)) threads===TEMPLATE(testann_threads,AUTO)
uri=TEMPLATE(qdranthost) grpc_port=TEMPLATE(grpc_port,6334) token_file=TEMPLATE(token_file)
params:
driver: qdrant
instrument: true
bindings:
id_val: Identity();
row_key: ToString()
row_key_batch: Mul(TEMPLATE(batch_size)L); ListSizedStepped(TEMPLATE(batch_size),long->ToString());
# filetype=hdf5 for TEMPLATE(filetype,hdf5)
test_floatlist_hdf5: HdfFileToFloatList("testdata/TEMPLATE(dataset).hdf5", "/test");
relevant_indices_hdf5: HdfFileToIntArray("testdata/TEMPLATE(dataset).hdf5", "/neighbors")
distance_floatlist_hdf5: HdfFileToFloatList("testdata/TEMPLATE(dataset).hdf5", "/distance")
train_floatlist_hdf5: HdfFileToFloatList("testdata/TEMPLATE(dataset).hdf5", "/train");
train_floatlist_hdf5_batch: Mul(TEMPLATE(batch_size)L); ListSizedStepped(TEMPLATE(batch_size),HdfFileToFloatList("testdata/TEMPLATE(dataset).hdf5", "/train"));
# filetype=fvec for TEMPLATE(filetype,fvec)
test_floatlist_fvec: FVecReader("testdata/TEMPLATE(dataset)_TEMPLATE(trainsize)_query_vectors.fvec");
relevant_indices_fvec: IVecReader("testdata/TEMPLATE(dataset)_TEMPLATE(trainsize)_indices_query.ivec");
distance_floatlist_fvec: FVecReader("testdata/TEMPLATE(dataset)_TEMPLATE(testsize)_distances_count.fvec",TEMPLATE(dimensions),0);
train_floatlist_fvec: FVecReader("testdata/TEMPLATE(dataset)_TEMPLATE(trainsize)_base_vectors.fvec",TEMPLATE(dimensions),0);
train_floatlist_fvec_batch: Mul(TEMPLATE(batch_size,10)L); ListSizedStepped(TEMPLATE(batch_size),FVecReader("testdata/TEMPLATE(dataset)_TEMPLATE(trainsize)_base_vectors.fvec",TEMPLATE(dimensions),0));
blocks:
delete_collection:
ops:
# https://qdrant.github.io/qdrant/redoc/index.html#tag/collections/operation/delete_collection
delete_col_op:
delete_collection: "TEMPLATE(collection)"
schema_collection:
ops:
# https://qdrant.github.io/qdrant/redoc/index.html#tag/collections/operation/create_collection
create_col_op:
create_collection: "TEMPLATE(collection)"
on_disk_payload: true
shard_number: 1
replication_factor: 1
write_consistency_factor: 1
vectors:
value:
size: TEMPLATE(dimensions,25)
# https://github.com/qdrant/qdrant/blob/v1.9.0/lib/api/src/grpc/proto/collections.proto#L90-L96
# 1 = Cosine, 2 = Euclid, 3 = Dot, 4 = Manhattan, 0 = UnknownDistance
distance_value: TEMPLATE(similarity_function,1)
on_disk: true
# https://github.com/qdrant/qdrant/blob/v1.9.0/lib/api/src/grpc/proto/collections.proto#L5-L9
# 0 = Default, 1 = Float32, 2 = Uint8
datatype_value: 1
hnsw_config:
m: 16
ef_construct: 100
full_scan_threshold: 10000
max_indexing_threads: 0
on_disk: true
#payload_m: 16
quantization_config:
binary:
always_ram: false
#scalar:
# # https://github.com/qdrant/qdrant/blob/v1.9.0/lib/api/src/grpc/proto/collections.proto#L117-L120
# # 0 = UnknownQuantization, 1 = Inet8
# type: 1
# quantile: 0.99
# always_ram: false
#product:
# compression: x16
# always_ram: false
wal_config:
wal_capacity_mb: 32
wal_segments_ahead: 0
optimizer_config:
deleted_threshold: 0.2
vacuum_min_vector_number: 1000
default_segment_number: 0
indexing_threshold: 20000
flush_interval_sec: 5
#sparse_vectors:
# svec1:
# full_scan_threshold: 100
# on_disk: true
rampup:
ops:
upsert_points_op:
upsert_points: "TEMPLATE(collection)"
wait: TEMPLATE(upsert_point_wait,true)
# https://github.com/qdrant/qdrant/blob/v1.9.0/lib/api/src/grpc/proto/points.proto#L11-L15
# 0 - Weak, 1 - Medium, 2 - Strong
ordering: TEMPLATE(upsert_point_ordering,1)
#shard_key: "{row_key}"
points:
- id: "{id_val}"
payload:
key: "{row_key}"
vector:
# For dense vectors, use the below format
value: "{train_floatlist_TEMPLATE(filetype)}"
# For sparse vectors, use the below format
#value_sv:
# indices: your array of numbers
# values: your array of floats
search_points:
ops:
search_points_op:
search_points: "TEMPLATE(collection)"
timeout: 300 # 5 minutes
# https://github.com/qdrant/qdrant/blob/v1.9.0/lib/api/src/grpc/proto/points.proto#L21-L25
# 0 - All, 1 - Majority, 2 - Quorum
consistency: "Quorum"
with_payload: true
with_vector: true
limit: TEMPLATE(select_limit,100)
# Another option to set with payload is as follows
# with_payload: ["key1"]
# Another option to set with payload is as follows
# with_payload:
# include: ["key1"]
# exclude: ["key2"]
vector:
- name: "value"
values: "{test_floatlist_TEMPLATE(filetype)}"
#indices: "[1,7]"
verifier-init: |
relevancy= new io.nosqlbench.nb.api.engine.metrics.wrappers.RelevancyMeasures(_parsed_op);
for (int k in List.of(100)) {
relevancy.addFunction(io.nosqlbench.engine.extensions.computefunctions.RelevancyFunctions.recall("recall",k));
relevancy.addFunction(io.nosqlbench.engine.extensions.computefunctions.RelevancyFunctions.precision("precision",k));
relevancy.addFunction(io.nosqlbench.engine.extensions.computefunctions.RelevancyFunctions.F1("F1",k));
relevancy.addFunction(io.nosqlbench.engine.extensions.computefunctions.RelevancyFunctions.reciprocal_rank("RR",k));
relevancy.addFunction(io.nosqlbench.engine.extensions.computefunctions.RelevancyFunctions.average_precision("AP",k));
}
verifier: |
// driver-specific function
actual_indices=io.nosqlbench.adapter.qdrant.QdrantAdapterUtils.searchPointsResponseIdNumToIntArray(result)
// System.out.println("actual_indices ------>>>>: " + actual_indices);
// driver-agnostic function
relevancy.accept({relevant_indices_TEMPLATE(filetype)},actual_indices);
// because we are "verifying" although this needs to be reorganized
return true;
count_vectors:
ops:
count_points_op:
count_points: "TEMPLATE(collection)"
exact: true

View File

@ -0,0 +1,41 @@
# qdrant driver adapter
The qdrant driver adapter is a nb adapter for the qdrant driver, an open source Java driver for connecting to and
performing operations on an instance of a Qdrant Vector database. The driver is hosted on GitHub at
https://github.com/qdrant/java-client.
## activity parameters
The following parameters must be supplied to the adapter at runtime in order to successfully connect to an
instance of the [Qdrant database](https://qdrant.tech/documentation):
* `token` - In order to use the Qdrant database you must have an account. Once the account is created you can [request
an api key/token](https://qdrant.tech/documentation/cloud/authentication/). This key will need to be provided any
time a database connection is desired. Alternatively, the api key can be stored in a file securely and referenced via
the `token_file` config option pointing to the path of the file.
* `uri` - When a collection/index is created in the database the URI (aka endpoint) must be specified as well. The adapter will
use the default value of `localhost:6334` if none is provided at runtime. Remember to *not* provide the `https://`
suffix.
* `grpc_port` - the GRPC port used by the Qdrant database. Defaults to `6334`.
* `use_tls` - option to leverage TLS for the connection. Defaults to `true`.
* `timeout_ms` - sets the timeout in milliseconds for all requests. Defaults to `3000`ms.
## Op Templates
The Qdrant adapter supports [**all operations**](../java/io/nosqlbench/adapter/qdrant/ops) supported by the [Java
driver published by Qdrant](https://github.com/qdrant/java-client). The official Qdrant API reference can be found at
https://qdrant.github.io/java-client/io/qdrant/client/package-summary.html
The operations include a full-fledged support for key APIs available in the Qdrant Java driver.
The following are a couple high level API operations.
* Create Collection
* Count Points
* Drop Collection
* Search Points (vectors)
## Examples
Check out the [full example available here](activities/qdrant_vectors_live.yaml).
---

View File

@ -37,7 +37,7 @@
</description>
<properties>
<s4j.version>4.1.2-alpha</s4j.version>
<s4j.version>4.1.3-alpha</s4j.version>
</properties>
<dependencies>

View File

@ -141,6 +141,20 @@
</dependencies>
</profile>
<profile>
<id>adapter-dataapi-include</id>
<activation>
<activeByDefault>true</activeByDefault>
</activation>
<dependencies>
<dependency>
<groupId>io.nosqlbench</groupId>
<artifactId>adapter-dataapi</artifactId>
<version>${revision}</version>
</dependency>
</dependencies>
</profile>
<profile>
<id>adapter-dynamodb-include</id>
<activation>
@ -186,7 +200,7 @@
<profile>
<id>adapter-s4j-include</id>
<activation>
<activeByDefault>true</activeByDefault>
<activeByDefault>false</activeByDefault>
</activation>
<dependencies>
<dependency>
@ -238,6 +252,21 @@
</dependency>
</dependencies>
</profile>
<profile>
<id>adapter-qdrant-include</id>
<activation>
<activeByDefault>false</activeByDefault>
</activation>
<dependencies>
<dependency>
<groupId>io.nosqlbench</groupId>
<artifactId>adapter-qdrant</artifactId>
<version>${revision}</version>
</dependency>
</dependencies>
</profile>
</profiles>
</project>

View File

@ -147,7 +147,7 @@
<profile>
<id>adapter-s4j-module</id>
<activation>
<activeByDefault>true</activeByDefault>
<activeByDefault>false</activeByDefault>
</activation>
<modules>
<module>adapter-s4j</module>
@ -174,5 +174,25 @@
</modules>
</profile>
<profile>
<id>adapter-dataapi-module</id>
<activation>
<activeByDefault>true</activeByDefault>
</activation>
<modules>
<module>adapter-dataapi</module>
</modules>
</profile>
<profile>
<id>adapter-qdrant-module</id>
<activation>
<activeByDefault>false</activeByDefault>
</activation>
<modules>
<module>adapter-qdrant</module>
</modules>
</profile>
</profiles>
</project>

View File

@ -1,5 +1,5 @@
/*
* Copyright (c) 2022-2023 nosqlbench
* Copyright (c) 2022-2024 nosqlbench
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@ -18,21 +18,21 @@ package io.nosqlbench.adapters.api.templating;
import io.nosqlbench.adapters.api.activityconfig.yaml.OpData;
import io.nosqlbench.adapters.api.activityconfig.yaml.OpTemplate;
import io.nosqlbench.nb.api.labels.NBLabelSpec;
import io.nosqlbench.nb.api.labels.NBLabels;
import io.nosqlbench.nb.api.config.fieldreaders.DynamicFieldReader;
import io.nosqlbench.nb.api.config.fieldreaders.StaticFieldReader;
import io.nosqlbench.nb.api.config.standard.NBConfigError;
import io.nosqlbench.nb.api.config.standard.NBConfiguration;
import io.nosqlbench.nb.api.errors.OpConfigError;
import io.nosqlbench.nb.api.components.core.NBComponent;
import io.nosqlbench.nb.api.components.core.NBBaseComponent;
import io.nosqlbench.engine.api.templating.ObjectCache;
import io.nosqlbench.engine.api.templating.ParsedTemplateMap;
import io.nosqlbench.engine.api.templating.TypeAndTarget;
import io.nosqlbench.engine.api.templating.binders.ArrayBinder;
import io.nosqlbench.engine.api.templating.binders.ListBinder;
import io.nosqlbench.engine.api.templating.binders.OrderedMapBinder;
import io.nosqlbench.nb.api.components.core.NBBaseComponent;
import io.nosqlbench.nb.api.components.core.NBComponent;
import io.nosqlbench.nb.api.config.fieldreaders.DynamicFieldReader;
import io.nosqlbench.nb.api.config.fieldreaders.StaticFieldReader;
import io.nosqlbench.nb.api.config.standard.NBConfigError;
import io.nosqlbench.nb.api.config.standard.NBConfiguration;
import io.nosqlbench.nb.api.errors.OpConfigError;
import io.nosqlbench.nb.api.labels.NBLabelSpec;
import io.nosqlbench.nb.api.labels.NBLabels;
import io.nosqlbench.virtdata.core.templates.BindPoint;
import io.nosqlbench.virtdata.core.templates.CapturePoint;
import io.nosqlbench.virtdata.core.templates.ParsedTemplateString;
@ -1034,8 +1034,8 @@ public class ParsedOp extends NBBaseComponent implements LongFunction<Map<String
ParsedOp parsedOp = makeSubOp(fromOpField, String.format(format, i), stmt.toString(), naming);
subOpMap.put(parsedOp.getName(), parsedOp);
} else {
throw new OpConfigError("For sub-ops field " + fromOpField + " of op '" + this.getName() + "', element " +
"types must be of Map or String, not '" + o.getClass().getCanonicalName() + "'");
throw new OpConfigError("For sub-ops field '" + fromOpField + "' of op '" + this.getName() + "'," +
" element types must be of Map or String, not '" + listElem.getClass().getCanonicalName() + "'");
}
}
} else if (o instanceof Map map) {
@ -1048,8 +1048,8 @@ public class ParsedOp extends NBBaseComponent implements LongFunction<Map<String
ParsedOp subOp = makeSubOp(fromOpField, nameKey.toString(), stmt.toString(), naming);
subOpMap.put(subOp.getName(), subOp);
} else {
throw new OpConfigError("For sub-ops field " + fromOpField + " of op '" + this.getName() + "', element " +
"types must be of Map or String, not '" + o.getClass().getCanonicalName() + "'");
throw new OpConfigError("For sub-ops field '" + fromOpField + "(" + nameKey.toString() + ")' of op '" + this.getName() + "'" +
", element " + "types must be of Map or String, not '" + opref.getClass().getCanonicalName() + "'");
}
}
} else {

View File

@ -178,7 +178,7 @@ public class NBConfiguration {
}
}
} else {
throw new NBConfigError("Parameter was not found for " + Arrays.toString(names) + ".");
throw new NBConfigError("Parameter definition was not found for " + Arrays.toString(names) + ".");
}
}
if (o == null) {

View File

@ -136,6 +136,9 @@ public class Param<T> {
* @return
*/
public static <V> Param<V> defaultTo(String name, V defaultValue) {
if (defaultValue instanceof Class clazz) {
throw new RuntimeException("This is not a supported type.");
}
return new Param<V>(List.of(name), (Class<V>) defaultValue.getClass(), null, true, defaultValue, null);
}

View File

@ -130,7 +130,7 @@ public class NBCLIScenarioPreprocessorTest {
"driver", "stdout",
"labels", "workload:scenario_test,scenario:schema_only",
"step", "schema",
"tags", "block:\"schema.*\"",
"tags", "block:schema.*",
"workload", "scenario_test"
));
NBCLIOptions opts1 = new NBCLIOptions(new String[]{"scenario_test", "schema_only", "doundef=20"}, NBCLIOptions.Mode.ParseAllOptions);
@ -185,7 +185,7 @@ public class NBCLIScenarioPreprocessorTest {
"driver", "stdout",
"labels", "workload:scenario_test,scenario:schema_only",
"step", "schema",
"tags", "block:\"schema.*\"",
"tags", "block:schema.*",
"workload", "scenario_test"
));
NBCLIOptions opts1 = new NBCLIOptions(new String[]{"example_scenarios", "namedsteps.one", "testparam1=testvalue2"}, NBCLIOptions.Mode.ParseAllOptions);
@ -202,6 +202,39 @@ public class NBCLIScenarioPreprocessorTest {
.withMessageContaining("Duplicate occurrence of parameter \"threads\"");
}
@Test
public void testCommandSplitter() {
String normalCmd = "run driver=stdout tags==block:main-read cycles==10 threads=auto param=test1";
assertThat(NBCLIScenarioPreprocessor.splitCommand(normalCmd))
.isEqualTo(new String[]{"run", "driver=stdout", "tags==block:main-read", "cycles==10", "threads=auto", "param=test1"});
// param='test1' or pram="test1" -> param=test1
String quotedParamCmd = "run driver=stdout tags==block:\"main.*\" cycles==10 threads=auto param='test1'";
assertThat(NBCLIScenarioPreprocessor.splitCommand(quotedParamCmd))
.isEqualTo(new String[]{"run", "driver=stdout", "tags==block:main.*", "cycles==10", "threads=auto", "param=test1"});
// param="test 1" or params='test 1' -> param=test 1
String paramWithSpaceCmd = "run driver=stdout tags==block:\"main.*\" cycles==10 threads=auto param='test 1'";
assertThat(NBCLIScenarioPreprocessor.splitCommand(paramWithSpaceCmd))
.isEqualTo(new String[]{"run", "driver=stdout", "tags==block:main.*", "cycles==10", "threads=auto", "param=test 1"});
// param=\"test1\" -> param="test1", param=\'test1\' -> param='test1'
String escapingQuotesParamCmd = "run driver=stdout tags==block:'main.*' cycles==10 threads=auto param=\\\"test1\\\"";
assertThat(NBCLIScenarioPreprocessor.splitCommand(escapingQuotesParamCmd))
.isEqualTo(new String[]{"run", "driver=stdout", "tags==block:main.*", "cycles==10", "threads=auto", "param=\"test1\""});
// param=test1\\test2 -> param=test1\test2
String escapingSlashParamCmd = "run driver=stdout tags==block:'main.*' cycles==10 threads=auto param=test1\\\\test2";
assertThat(NBCLIScenarioPreprocessor.splitCommand(escapingSlashParamCmd))
.isEqualTo(new String[]{"run", "driver=stdout", "tags==block:main.*", "cycles==10", "threads=auto", "param=test1\\test2"});
// param="test1 -> unclosed quote "
String unclosedQuoteCmd = "run driver=stdout tags==block:'main.*' cycles==10 threads=auto param=\"test1";
assertThatExceptionOfType(BasicError.class)
.isThrownBy(() -> NBCLIScenarioPreprocessor.splitCommand(unclosedQuoteCmd))
.withMessageContaining("Unclosed quote found in scenario cmd");
}
@Test
public void testThatSuggestionsAreShownForDirectStepNameUsage() {
assertThatExceptionOfType(BasicError.class)

View File

@ -1,5 +1,5 @@
/*
* Copyright (c) 2022-2023 nosqlbench
* Copyright (c) 2022-2024 nosqlbench
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@ -16,40 +16,40 @@
package io.nosqlbench.engine.api.activityimpl;
import io.nosqlbench.adapters.api.activityimpl.uniform.opwrappers.EmitterOpDispenserWrapper;
import io.nosqlbench.adapters.api.activityimpl.uniform.flowtypes.CycleOp;
import io.nosqlbench.engine.api.activityapi.simrate.*;
import io.nosqlbench.engine.core.lifecycle.scenario.container.InvokableResult;
import io.nosqlbench.nb.api.components.core.NBComponent;
import io.nosqlbench.nb.api.components.events.ParamChange;
import io.nosqlbench.engine.api.activityapi.core.*;
import io.nosqlbench.engine.api.activityapi.core.progress.ActivityMetricProgressMeter;
import io.nosqlbench.engine.api.activityapi.core.progress.ProgressMeterDisplay;
import io.nosqlbench.engine.api.activityapi.errorhandling.ErrorMetrics;
import io.nosqlbench.engine.api.activityapi.errorhandling.modular.NBErrorHandler;
import io.nosqlbench.engine.api.activityapi.planning.OpSequence;
import io.nosqlbench.engine.api.activityapi.planning.SequencerType;
import io.nosqlbench.adapters.api.activityimpl.OpDispenser;
import io.nosqlbench.adapters.api.activityimpl.OpMapper;
import io.nosqlbench.nb.api.components.status.NBStatusComponent;
import io.nosqlbench.nb.api.labels.NBLabels;
import io.nosqlbench.nb.api.engine.activityimpl.ActivityDef;
import io.nosqlbench.nb.api.errors.BasicError;
import io.nosqlbench.nb.api.errors.OpConfigError;
import io.nosqlbench.engine.api.activityapi.cyclelog.filters.IntPredicateDispenser;
import io.nosqlbench.engine.api.activityapi.input.InputDispenser;
import io.nosqlbench.engine.api.activityapi.output.OutputDispenser;
import io.nosqlbench.engine.api.activityapi.planning.SequencePlanner;
import io.nosqlbench.adapters.api.activityconfig.OpsLoader;
import io.nosqlbench.adapters.api.activityconfig.yaml.OpTemplate;
import io.nosqlbench.adapters.api.activityconfig.yaml.OpTemplateFormat;
import io.nosqlbench.adapters.api.activityconfig.yaml.OpsDocList;
import io.nosqlbench.engine.api.activityimpl.motor.RunStateTally;
import io.nosqlbench.adapters.api.activityimpl.OpDispenser;
import io.nosqlbench.adapters.api.activityimpl.OpMapper;
import io.nosqlbench.adapters.api.activityimpl.uniform.DriverAdapter;
import io.nosqlbench.adapters.api.activityimpl.uniform.opwrappers.DryRunOpDispenserWrapper;
import io.nosqlbench.adapters.api.activityimpl.uniform.decorators.SyntheticOpTemplateProvider;
import io.nosqlbench.adapters.api.activityimpl.uniform.flowtypes.CycleOp;
import io.nosqlbench.adapters.api.activityimpl.uniform.flowtypes.Op;
import io.nosqlbench.adapters.api.activityimpl.uniform.opwrappers.DryRunOpDispenserWrapper;
import io.nosqlbench.adapters.api.activityimpl.uniform.opwrappers.EmitterOpDispenserWrapper;
import io.nosqlbench.adapters.api.templating.ParsedOp;
import io.nosqlbench.engine.api.activityapi.core.*;
import io.nosqlbench.engine.api.activityapi.core.progress.ActivityMetricProgressMeter;
import io.nosqlbench.engine.api.activityapi.core.progress.ProgressMeterDisplay;
import io.nosqlbench.engine.api.activityapi.cyclelog.filters.IntPredicateDispenser;
import io.nosqlbench.engine.api.activityapi.errorhandling.ErrorMetrics;
import io.nosqlbench.engine.api.activityapi.errorhandling.modular.NBErrorHandler;
import io.nosqlbench.engine.api.activityapi.input.InputDispenser;
import io.nosqlbench.engine.api.activityapi.output.OutputDispenser;
import io.nosqlbench.engine.api.activityapi.planning.OpSequence;
import io.nosqlbench.engine.api.activityapi.planning.SequencePlanner;
import io.nosqlbench.engine.api.activityapi.planning.SequencerType;
import io.nosqlbench.engine.api.activityapi.simrate.*;
import io.nosqlbench.engine.api.activityimpl.motor.RunStateTally;
import io.nosqlbench.engine.core.lifecycle.scenario.container.InvokableResult;
import io.nosqlbench.nb.api.components.core.NBComponent;
import io.nosqlbench.nb.api.components.events.ParamChange;
import io.nosqlbench.nb.api.components.status.NBStatusComponent;
import io.nosqlbench.nb.api.engine.activityimpl.ActivityDef;
import io.nosqlbench.nb.api.errors.BasicError;
import io.nosqlbench.nb.api.errors.OpConfigError;
import io.nosqlbench.nb.api.labels.NBLabels;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
@ -58,7 +58,6 @@ import java.io.PrintWriter;
import java.nio.charset.StandardCharsets;
import java.util.*;
import java.util.function.Function;
import java.util.function.Supplier;
/**
* A default implementation of an Activity, suitable for building upon.
@ -388,7 +387,7 @@ public class SimpleActivity extends NBStatusComponent implements Activity, Invok
}
if (0 < this.activityDef.getCycleCount() && seq.getOps().isEmpty()) {
throw new BasicError("You have configured a zero-length sequence and non-zero cycles. Tt is not possible to continue with this activity.");
throw new BasicError("You have configured a zero-length sequence and non-zero cycles. It is not possible to continue with this activity.");
}
}

View File

@ -264,10 +264,31 @@ public class NBCLIScenarioPreprocessor {
private static final Pattern WordAndMaybeAssignment = Pattern.compile("(?<name>\\w[-_\\d\\w.]+)((?<oper>=+)(?<val>.+))?");
public static String[] splitCommand(String cmd) {
// split command by honoring single quotes, double quotes and escape characters
String[] namedStepPieces = cmd.split(" +(?=([^\"]*\"[^\"]*\")*[^\"]*$)(?=([^']*'[^']*')*[^']*$)");
Pattern pattern1 = Pattern.compile("(?<!\\\\)\"");
Pattern pattern2 = Pattern.compile("(?<!\\\\)'");
for (int i = 0; i < namedStepPieces.length; i++) {
// check if the quotes are balanced
String stepPiece = namedStepPieces[i];
boolean balanced = pattern1.matcher(stepPiece).results().count() % 2 == 0;
balanced = balanced && (pattern2.matcher(stepPiece).results().count() % 2 == 0);
if (!balanced) {
throw new BasicError("Unclosed quote found in scenario cmd '" + cmd + "'");
}
// remove single quotes, double quotes and escape character
stepPiece = pattern1.matcher(stepPiece).replaceAll("");
stepPiece = pattern2.matcher(stepPiece).replaceAll("");
namedStepPieces[i] = stepPiece.replaceAll(Matcher.quoteReplacement("\\(?!\\)"), "");
}
return namedStepPieces;
}
private static LinkedHashMap<String, SCNamedParam> parseStep(String cmd, String stepName, String scenarioName) {
LinkedHashMap<String, SCNamedParam> parsedStep = new LinkedHashMap<>();
String[] namedStepPieces = cmd.split(" +");
String[] namedStepPieces = splitCommand(cmd);
for (String commandFragment : namedStepPieces) {
Matcher matcher = WordAndMaybeAssignment.matcher(commandFragment);

View File

@ -1,5 +1,5 @@
/*
* Copyright (c) 2022-2023 nosqlbench
* Copyright (c) 2020-2024 nosqlbench
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@ -22,11 +22,17 @@ import org.apache.logging.log4j.Logger;
import org.apache.logging.log4j.core.Appender;
import org.apache.logging.log4j.core.Filter;
import org.apache.logging.log4j.core.LoggerContext;
import org.apache.logging.log4j.core.appender.*;
import org.apache.logging.log4j.core.appender.AbstractOutputStreamAppender;
import org.apache.logging.log4j.core.appender.ConsoleAppender;
import org.apache.logging.log4j.core.appender.FileAppender;
import org.apache.logging.log4j.core.appender.OutputStreamManager;
import org.apache.logging.log4j.core.config.Configuration;
import org.apache.logging.log4j.core.config.ConfigurationFactory;
import org.apache.logging.log4j.core.config.ConfigurationSource;
import org.apache.logging.log4j.core.config.builder.api.*;
import org.apache.logging.log4j.core.config.builder.api.AppenderComponentBuilder;
import org.apache.logging.log4j.core.config.builder.api.ConfigurationBuilder;
import org.apache.logging.log4j.core.config.builder.api.LayoutComponentBuilder;
import org.apache.logging.log4j.core.config.builder.api.RootLoggerComponentBuilder;
import org.apache.logging.log4j.core.config.builder.impl.BuiltConfiguration;
import java.io.File;
@ -399,7 +405,8 @@ public class NBLoggerConfig extends ConfigurationFactory {
File[] files = loggerDir.toFile().listFiles(new FileFilter() {
@Override
public boolean accept(File pathname) {
return pathname.getPath().endsWith(".log") || pathname.getPath().endsWith(".log.gz");
return pathname.getPath().endsWith(".log") || pathname.getPath().endsWith(".log.gz")
|| pathname.getPath().endsWith(".txt") || pathname.getPath().endsWith(".yaml");
}
});

View File

@ -1,5 +1,5 @@
/*
* Copyright (c) 2022-2023 nosqlbench
* Copyright (c) 2020-2024 nosqlbench
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@ -302,6 +302,7 @@ public class ParsedTemplateMap implements LongFunction<Map<String, ?>>, StaticFi
} else if (isConfig(field)) {
return getConfig(field);
}
logger.warn("static field '{}' was requested, but it does not exist", field);
return null;
}

View File

@ -1,5 +1,5 @@
/*
* Copyright (c) 2023 nosqlbench
* Copyright (c) 2020-2024 nosqlbench
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@ -28,6 +28,22 @@ import static org.assertj.core.api.Assertions.assertThat;
public class ParsedTemplateMapTest {
// @Test
// public void testBindpointTypes() {
// ParsedTemplateMap ptm = new ParsedTemplateMap(
// "name1",
// Map.of(
// "field1","{binding1}",
// "field2", " {binding2}"
// ),
// Map.of(
// "binding1","TestingStringFunc('b1')",
// "binding2","TestingStringFunc('b2')"
// ),
// List.of(Map.of()));
//
// }
@Test
public void testParsedTemplateMap() {
ParsedTemplateMap ptm = new ParsedTemplateMap("name1", Map.of("string1", "string2"), Map.of(), List.of());
@ -58,4 +74,10 @@ public class ParsedTemplateMapTest {
}
@Test
public void testForNullWhenNoFieldFoundWhenCallingStaticValue() {
ParsedTemplateMap ptm = new ParsedTemplateMap("name1", Map.of("string1", "string2"), Map.of(), List.of());
assertThat(ptm.getStaticValue("notfound", String.class)).isNull();
}
}

View File

@ -18,10 +18,13 @@ package io.nosqlbench.virtdata.library.basics.shared.from_long.to_string;
import io.nosqlbench.virtdata.api.annotations.Categories;
import io.nosqlbench.virtdata.api.annotations.Category;
import io.nosqlbench.virtdata.api.annotations.Example;
import io.nosqlbench.virtdata.api.annotations.ThreadSafeMapper;
import io.nosqlbench.virtdata.api.bindings.VirtDataConversions;
import io.nosqlbench.virtdata.library.basics.shared.from_long.to_long.Hash;
import java.util.function.LongFunction;
import java.util.function.LongToIntFunction;
/**
* Create an alpha-numeric string of the specified length, character-by-character.
@ -32,20 +35,39 @@ public class AlphaNumericString implements LongFunction<String> {
private static final String AVAILABLE_CHARS = "0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZ";
private final ThreadLocal<StringBuilder> threadStringBuilder = ThreadLocal.withInitial(StringBuilder::new);
private final Hash hash = new Hash();
private final int length;
private final LongToIntFunction lengthFunc;
public AlphaNumericString(int length)
{
if (length < 0)
{
throw new RuntimeException("AlphaNumericString must have length >= 0");
@Example({
"AlphaNumericString(10)",
"Create a 10-character alpha-numeric string"
})
@Example({
"AlphaNumericString(HashRange(10, 20))",
"Create an alpha-numeric string with a length between 10 and 20 characters"
})
public AlphaNumericString(int length) {
this.lengthFunc = l -> length;
}
public AlphaNumericString(Object lengthfunc) {
if (lengthfunc instanceof Number) {
int length = ((Number) lengthfunc).intValue();
this.lengthFunc = l -> length;
}
else {
this.lengthFunc = VirtDataConversions.adaptFunction(lengthfunc, LongToIntFunction.class);
}
this.length = length;
}
@Override
public String apply(long operand)
{
int length = lengthFunc.applyAsInt(operand);
if (length < 0)
{
throw new RuntimeException("AlphaNumericString must have length >= 0");
}
long hashValue = operand;
StringBuilder sb = threadStringBuilder.get();
sb.setLength(0);
@ -61,6 +83,6 @@ public class AlphaNumericString implements LongFunction<String> {
@Override
public String toString()
{
return "AlphaNumericString(length=" + length + ")";
return "AlphaNumericString(lengthFunc.class=" + lengthFunc.getClass().getSimpleName() + ")";
}
}