more updates on milvus, distinguishing rows vs fields

This commit is contained in:
Jonathan Shook 2024-03-21 12:58:55 -05:00
parent 66bd819710
commit 03bc0adf16
50 changed files with 1298 additions and 566 deletions

View File

@ -60,7 +60,10 @@ public class MilvusOpMapper implements OpMapper<MilvusBaseOp<?>> {
case create_collection -> new MilvusCreateCollectionOpDispenser(adapter, op, typeAndTarget.targetFunction);
case create_index -> new MilvusCreateIndexOpDispenser(adapter, op, typeAndTarget.targetFunction);
case drop_index -> new MilvusDropIndexOpDispenser(adapter, op, typeAndTarget.targetFunction);
case insert -> new MilvusInsertOpDispenser(adapter, op, typeAndTarget.targetFunction);
// Uses the Collection-specific fields (columnar) insert mode
case insert_fields -> new MilvusInsertOpDispenser(adapter, op, typeAndTarget.targetFunction);
// Uses the High-Level row-by-row JSONObject (tabular) insert mode
case insert_rows -> new MilvusInsertRowsOpDispenser(adapter, op, typeAndTarget.targetFunction);
case delete -> new MilvusDeleteOpDispenser(adapter, op, typeAndTarget.targetFunction);
case search -> new MilvusSearchOpDispenser(adapter, op, typeAndTarget.targetFunction);
case alter_alias -> new MilvusAlterAliasOpDispenser(adapter, op, typeAndTarget.targetFunction);

View File

@ -72,7 +72,7 @@ public class MilvusSpace implements AutoCloseable {
private MilvusServiceClient createClient() {
var builder = ConnectParam.newBuilder();
builder = builder.withUri(cfg.get("uri"));
builder = builder.withDatabaseName(cfg.get("database_name"));
cfg.getOptional("database_name").ifPresent(builder::withDatabaseName);
var requiredToken = cfg.getOptional("token_file")
.map(Paths::get)
@ -113,11 +113,11 @@ public class MilvusSpace implements AutoCloseable {
.setDescription("the Milvus/Zilliz token to use to connect to the database")
)
.add(
Param.defaultTo("uri", "localhost:19530")
Param.defaultTo("uri", "127.0.0.1:19530")
.setDescription("the URI endpoint in which the database is running.")
)
.add(
Param.defaultTo("database_name", "baselines")
Param.optional("database_name")
.setDescription("the name of the database to use. Defaults to 'baselines'")
)
.asReadOnly();

View File

@ -52,7 +52,8 @@ public class MilvusAlterAliasOpDispenser extends MilvusBaseOpDispenser<AlterAlia
l -> AlterAliasParam.newBuilder().withAlias(targetF.apply(l));
// Add enhancement functions here
ebF = op.enhanceFuncOptionally(ebF,"collection_name",String.class,AlterAliasParam.Builder::withCollectionName);
ebF = op.enhanceFuncOptionally(
ebF,List.of("collection_name","collection"),String.class,AlterAliasParam.Builder::withCollectionName);
final LongFunction<AlterAliasParam.Builder> lastF = ebF;
final LongFunction<AlterAliasParam> collectionParamF = l -> lastF.apply(l).build();

View File

@ -23,6 +23,7 @@ import io.nosqlbench.adapter.milvus.ops.MilvusBaseOp;
import io.nosqlbench.adapter.milvus.ops.MilvusCreateAliasOp;
import io.nosqlbench.adapters.api.templating.ParsedOp;
import java.util.List;
import java.util.function.LongFunction;
public class MilvusCreateAliasOpDispenser extends MilvusBaseOpDispenser<CreateAliasParam> {
@ -41,9 +42,10 @@ public class MilvusCreateAliasOpDispenser extends MilvusBaseOpDispenser<CreateAl
) {
LongFunction<CreateAliasParam.Builder> ebF =
l -> CreateAliasParam.newBuilder().withAlias(targetF.apply(l));
ebF = op.enhanceFuncOptionally(ebF, "collection_name",String.class,
ebF = op.enhanceFuncOptionally(ebF, List.of("collection_name","collection"),String.class,
CreateAliasParam.Builder::withCollectionName);
final LongFunction<CreateAliasParam.Builder> lastF = ebF;
final LongFunction<CreateAliasParam> collectionParamF = l -> lastF.apply(l).build();
return collectionParamF;

View File

@ -19,7 +19,6 @@ package io.nosqlbench.adapter.milvus.opdispensers;
import io.milvus.client.MilvusServiceClient;
import io.milvus.common.clientenum.ConsistencyLevelEnum;
import io.milvus.grpc.DataType;
import io.milvus.param.alias.CreateAliasParam;
import io.milvus.param.collection.CreateCollectionParam;
import io.milvus.param.collection.FieldType;
import io.nosqlbench.adapter.milvus.MilvusDriverAdapter;
@ -64,15 +63,22 @@ public class MilvusCreateCollectionOpDispenser extends MilvusBaseOpDispenser<Cre
ebF = op.enhanceFuncOptionally(ebF, "shards_num", Integer.class,
CreateCollectionParam.Builder::withShardsNum);
ebF = op.enhanceFuncOptionally(ebF, "partition_num", Integer.class,
CreateCollectionParam.Builder::withPartitionsNum);
ebF = op.enhanceFuncOptionally(ebF, "description", String.class,
CreateCollectionParam.Builder::withDescription);
ebF = op.enhanceEnumOptionally(ebF, "consistency_level",
ConsistencyLevelEnum.class, CreateCollectionParam.Builder::withConsistencyLevel);
ebF = op.enhanceFuncOptionally(ebF, "database_name", String.class,
CreateCollectionParam.Builder::withDatabaseName);
Map<String,Object> fieldTypesMap = op.getStaticValue("field_types", Map.class);
List<FieldType> fieldTypes = buildFieldTypesStruct(fieldTypesMap);
List<FieldType> fieldTypes = buildFieldTypesStruct(
op.getAsSubOps("field_types", ParsedOp.SubOpNaming.SubKey),
ebF
);
final LongFunction<CreateCollectionParam.Builder> f = ebF;
ebF = l -> f.apply(l).withFieldTypes(fieldTypes);
final LongFunction<CreateCollectionParam.Builder> lastF = ebF;
return l -> lastF.apply(l).build();
}
@ -91,39 +97,39 @@ public class MilvusCreateCollectionOpDispenser extends MilvusBaseOpDispenser<Cre
/**
* Function to build the {@link FieldType}s for the {@link CreateCollectionParam}.
*
* @param fieldTypesData The static map of config data from the create collection request
* @param fieldTypesData
* The static map of config data from the create collection request
* @param ebF
* @return a list of static field types
*/
private List<FieldType> buildFieldTypesStruct(Map<String,Object> fieldTypesData) {
private List<FieldType> buildFieldTypesStruct(Map<String, ParsedOp> fieldTypesData, LongFunction<CreateCollectionParam.Builder> ebF) {
List<FieldType> fieldTypes = new ArrayList<>();
fieldTypesData.forEach((name, properties) -> {
FieldType.Builder fieldTypeBuilder = FieldType.newBuilder()
fieldTypesData.forEach((name, fieldspec) -> {
FieldType.Builder builder = FieldType.newBuilder()
.withName(name);
if (properties instanceof Map<?,?> map) {
fieldspec.getOptionalStaticValue("primary_key", Boolean.class)
.ifPresent(builder::withPrimaryKey);
fieldspec.getOptionalStaticValue("auto_id", Boolean.class)
.ifPresent(builder::withAutoID);
fieldspec.getOptionalStaticConfig("max_length", Integer.class)
.ifPresent(builder::withMaxLength);
fieldspec.getOptionalStaticConfig("max_capacity", Integer.class)
.ifPresent(builder::withMaxCapacity);
fieldspec.getOptionalStaticValue(List.of("partition_key","partition"), Boolean.class)
.ifPresent(builder::withPartitionKey);
fieldspec.getOptionalStaticValue("dimension", Integer.class)
.ifPresent(builder::withDimension);
fieldspec.getOptionalStaticConfig("data_type", String.class)
.map(DataType::valueOf)
.ifPresent(builder::withDataType);
fieldspec.getOptionalStaticConfig("type_params", Map.class)
.ifPresent(builder::withTypeParams);
fieldspec.getOptionalStaticConfig("element_type",String.class)
.map(DataType::valueOf)
.ifPresent(builder::withElementType);
if (map.containsKey("primary_key")) {
fieldTypeBuilder.withPrimaryKey(Boolean.parseBoolean((String) map.get("primary_key")));
}
if (map.containsKey("auto_id")) {
fieldTypeBuilder.withAutoID(Boolean.parseBoolean((String) map.get("auto_id")));
}
if (map.containsKey("partition_key")) {
fieldTypeBuilder.withPartitionKey(Boolean.parseBoolean((String) map.get("partition_key")));
}
if (map.containsKey("dimension")) {
fieldTypeBuilder.withDimension(Integer.parseInt((String) map.get("dimension")));
}
if (map.containsKey("data_type")) {
fieldTypeBuilder.withDataType(DataType.valueOf((String) map.get("data_type")));
}
} else {
throw new RuntimeException("Invalid type for field_types specified." +
" It needs to be a map. Check out the examples in the driver documentation.");
}
fieldTypes.add(fieldTypeBuilder.build());
fieldTypes.add(builder.build());
});
return fieldTypes;
}

View File

@ -17,6 +17,8 @@
package io.nosqlbench.adapter.milvus.opdispensers;
import io.milvus.client.MilvusServiceClient;
import io.milvus.param.IndexType;
import io.milvus.param.MetricType;
import io.milvus.param.index.CreateIndexParam;
import io.nosqlbench.adapter.milvus.MilvusDriverAdapter;
import io.nosqlbench.adapter.milvus.ops.MilvusBaseOp;
@ -25,6 +27,7 @@ import io.nosqlbench.adapters.api.templating.ParsedOp;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import java.util.List;
import java.util.function.LongFunction;
public class MilvusCreateIndexOpDispenser extends MilvusBaseOpDispenser<CreateIndexParam> {
@ -53,14 +56,17 @@ public class MilvusCreateIndexOpDispenser extends MilvusBaseOpDispenser<CreateIn
LongFunction<CreateIndexParam.Builder> bF =
l -> CreateIndexParam.newBuilder().withIndexName(targetF.apply(l));
bF = op.enhanceFunc(bF, "collection_name", String.class, CreateIndexParam.Builder::withCollectionName);
bF = op.enhanceFunc(bF, List.of("collection","collection_name"), String.class,
CreateIndexParam.Builder::withCollectionName);
bF = op.enhanceFunc(bF, "field_name", String.class, CreateIndexParam.Builder::withFieldName);
bF = op.enhanceFunc(bF, "index_type", String.class, CreateIndexParam.Builder::withFieldName);
bF = op.enhanceFunc(bF, "metric_type", String.class, CreateIndexParam.Builder::withFieldName);
bF = op.enhanceFuncOptionally(bF, "extra_param", String.class, CreateIndexParam.Builder::withFieldName);
bF = op.enhanceEnumOptionally(bF, "index_type", IndexType.class, CreateIndexParam.Builder::withIndexType);
bF = op.enhanceEnumOptionally(bF, "metric_type", MetricType.class, CreateIndexParam.Builder::withMetricType);
bF = op.enhanceFuncOptionally(bF, "extra_param", String.class, CreateIndexParam.Builder::withExtraParam);
bF = op.enhanceFuncOptionally(bF, "sync_mode", Boolean.class, CreateIndexParam.Builder::withSyncMode);
bF = op.enhanceFuncOptionally(bF, "sync_waiting_interval", Long.class, CreateIndexParam.Builder::withSyncWaitingInterval);
bF = op.enhanceFuncOptionally(bF, "sync_waiting_timeout", Long.class, CreateIndexParam.Builder::withSyncWaitingTimeout);
bF = op.enhanceFuncOptionally(bF, List.of("database","database_name"), String.class,
CreateIndexParam.Builder::withDatabaseName);
LongFunction<CreateIndexParam.Builder> finalBF1 = bF;
return l -> finalBF1.apply(l).build();
}

View File

@ -23,6 +23,7 @@ import io.nosqlbench.adapter.milvus.ops.MilvusBaseOp;
import io.nosqlbench.adapter.milvus.ops.MilvusCreatePartitionOp;
import io.nosqlbench.adapters.api.templating.ParsedOp;
import java.util.List;
import java.util.function.LongFunction;
public class MilvusCreatePartitionOpDispenser extends MilvusBaseOpDispenser<CreatePartitionParam> {
@ -42,7 +43,8 @@ public class MilvusCreatePartitionOpDispenser extends MilvusBaseOpDispenser<Crea
LongFunction<CreatePartitionParam.Builder> ebF =
l -> CreatePartitionParam.newBuilder().withCollectionName(targetF.apply(l));
// Add enhancement functions here
ebF = op.enhanceFunc(ebF,"collection_name",String.class,CreatePartitionParam.Builder::withCollectionName);
ebF = op.enhanceFunc(ebF, List.of("collection","collection_name"),String.class,
CreatePartitionParam.Builder::withCollectionName);
final LongFunction<CreatePartitionParam.Builder> lastF = ebF;
final LongFunction<CreatePartitionParam> collectionParamF = l -> lastF.apply(l).build();

View File

@ -26,6 +26,7 @@ import io.nosqlbench.adapter.milvus.ops.MilvusDeleteParamOp;
import io.nosqlbench.adapters.api.templating.ParsedOp;
import io.milvus.param.dml.DeleteParam.Builder;
import java.util.List;
import java.util.function.LongFunction;
public class MilvusDeleteOpDispenser extends MilvusBaseOpDispenser<DeleteParam> {
@ -39,7 +40,8 @@ public class MilvusDeleteOpDispenser extends MilvusBaseOpDispenser<DeleteParam>
public LongFunction<DeleteParam> getParamFunc(LongFunction<MilvusServiceClient> clientF, ParsedOp op, LongFunction<String> targetF) {
LongFunction<DeleteParam.Builder> f =
l -> DeleteParam.newBuilder().withCollectionName(targetF.apply(l));
f = op.enhanceFuncOptionally(f, "partition", String.class, DeleteParam.Builder::withPartitionName);
f = op.enhanceFuncOptionally(f, List.of("partition_name","partition"), String.class,
DeleteParam.Builder::withPartitionName);
f = op.enhanceFuncOptionally(f, "expression", String.class, DeleteParam.Builder::withExpr);
f = op.enhanceFuncOptionally(f, "expr", String.class, Builder::withExpr);
LongFunction<DeleteParam.Builder> finalF = f;

View File

@ -24,6 +24,7 @@ import io.nosqlbench.adapter.milvus.ops.MilvusBaseOp;
import io.nosqlbench.adapter.milvus.ops.MilvusDescribeCollectionOp;
import io.nosqlbench.adapters.api.templating.ParsedOp;
import java.util.List;
import java.util.function.LongFunction;
public class MilvusDescribeCollectionOpDispenser extends MilvusBaseOpDispenser<DescribeCollectionParam> {
@ -43,7 +44,8 @@ public class MilvusDescribeCollectionOpDispenser extends MilvusBaseOpDispenser<D
LongFunction<DescribeCollectionParam.Builder> ebF =
l -> DescribeCollectionParam.newBuilder().withCollectionName(targetF.apply(l));
ebF = op.enhanceFuncOptionally(ebF,"database_name",String.class, DescribeCollectionParam.Builder::withDatabaseName);
ebF = op.enhanceFuncOptionally(ebF, List.of("database","database_name"),String.class,
DescribeCollectionParam.Builder::withDatabaseName);
final LongFunction<DescribeCollectionParam.Builder> lastF = ebF;
final LongFunction<DescribeCollectionParam> collectionParamF = l -> lastF.apply(l).build();

View File

@ -24,6 +24,7 @@ import io.nosqlbench.adapter.milvus.ops.MilvusBaseOp;
import io.nosqlbench.adapter.milvus.ops.MilvusDescribeIndexOp;
import io.nosqlbench.adapters.api.templating.ParsedOp;
import java.util.List;
import java.util.function.LongFunction;
public class MilvusDescribeIndexOpDispenser extends MilvusBaseOpDispenser<DescribeIndexParam> {
@ -42,8 +43,10 @@ public class MilvusDescribeIndexOpDispenser extends MilvusBaseOpDispenser<Descri
) {
LongFunction<DescribeIndexParam.Builder> ebF =
l -> DescribeIndexParam.newBuilder().withIndexName(targetF.apply(l));
ebF = op.enhanceFunc(ebF,"collection_name",String.class, DescribeIndexParam.Builder::withCollectionName);
ebF = op.enhanceFunc(ebF,"database_name",String.class,DescribeIndexParam.Builder::withDatabaseName);
ebF = op.enhanceFunc(ebF, List.of("collection","collection_name"),String.class,
DescribeIndexParam.Builder::withCollectionName);
ebF = op.enhanceFunc(ebF,List.of("database_name","database"),String.class,
DescribeIndexParam.Builder::withDatabaseName);
final LongFunction<DescribeIndexParam.Builder> lastF = ebF;
final LongFunction<DescribeIndexParam> collectionParamF = l -> lastF.apply(l).build();

View File

@ -27,6 +27,7 @@ import io.nosqlbench.adapters.api.templating.ParsedOp;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import java.util.List;
import java.util.function.LongFunction;
public class MilvusDropCollectionOpDispenser extends MilvusBaseOpDispenser<DropCollectionParam> {
@ -54,7 +55,8 @@ public class MilvusDropCollectionOpDispenser extends MilvusBaseOpDispenser<DropC
LongFunction<String> targetF) {
LongFunction<DropCollectionParam.Builder> f =
l -> DropCollectionParam.newBuilder().withCollectionName(targetF.apply(l));
f = op.enhanceFuncOptionally(f, "database_name", String.class, DropCollectionParam.Builder::withDatabaseName);
f = op.enhanceFuncOptionally(f, List.of("database","database_name"),String.class,
DropCollectionParam.Builder::withDatabaseName);
LongFunction<DropCollectionParam.Builder> finalF = f;
return l -> finalF.apply(l).build();
}

View File

@ -27,6 +27,7 @@ import io.nosqlbench.adapters.api.templating.ParsedOp;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import java.util.List;
import java.util.function.LongFunction;
public class MilvusDropIndexOpDispenser extends MilvusBaseOpDispenser<DropIndexParam> {
@ -52,7 +53,8 @@ public class MilvusDropIndexOpDispenser extends MilvusBaseOpDispenser<DropIndexP
public LongFunction<DropIndexParam> getParamFunc(LongFunction<MilvusServiceClient> clientF, ParsedOp op, LongFunction<String> targetF) {
LongFunction<DropIndexParam.Builder> f =
l -> DropIndexParam.newBuilder().withIndexName(targetF.apply(l));
f = op.enhanceFunc(f,"collection_name",String.class, DropIndexParam.Builder::withCollectionName);
f = op.enhanceFunc(f, List.of("collection_name","collection"),String.class,
DropIndexParam.Builder::withCollectionName);
LongFunction<DropIndexParam.Builder> finalF = f;
return l -> finalF.apply(1).build();
}

View File

@ -24,6 +24,7 @@ import io.nosqlbench.adapter.milvus.ops.MilvusBaseOp;
import io.nosqlbench.adapter.milvus.ops.MilvusDropPartitionOp;
import io.nosqlbench.adapters.api.templating.ParsedOp;
import java.util.List;
import java.util.function.LongFunction;
public class MilvusDropPartitionOpDispenser extends MilvusBaseOpDispenser<DropPartitionParam> {
@ -42,7 +43,8 @@ public class MilvusDropPartitionOpDispenser extends MilvusBaseOpDispenser<DropPa
) {
LongFunction<DropPartitionParam.Builder> ebF =
l -> DropPartitionParam.newBuilder().withPartitionName(targetF.apply(l));
ebF = op.enhanceFunc(ebF,"collection_name",String.class, DropPartitionParam.Builder::withCollectionName);
ebF = op.enhanceFunc(ebF, List.of("collection_name","collection"),String.class,
DropPartitionParam.Builder::withCollectionName);
final LongFunction<DropPartitionParam.Builder> lastF = ebF;
final LongFunction<DropPartitionParam> collectionParamF = l -> lastF.apply(l).build();

View File

@ -51,7 +51,8 @@ public class MilvusFlushOpDispenser extends MilvusBaseOpDispenser<FlushParam> {
};
LongFunction<FlushParam.Builder> finalEbF = ebF;
ebF = l -> finalEbF.apply(l).withCollectionNames(cnames.apply(l));
ebF = op.enhanceFuncOptionally(ebF, "database_name",String.class,FlushParam.Builder::withDatabaseName);
ebF = op.enhanceFuncOptionally(ebF, List.of("database_name","database"),String.class,
FlushParam.Builder::withDatabaseName);
ebF = op.enhanceFuncOptionally(ebF, "sync_flush_waiting_interval",Long.class,
FlushParam.Builder::withSyncFlushWaitingInterval);
ebF = op.enhanceFuncOptionally(ebF, "sync_flush_waiting_timeout",Long.class,

View File

@ -23,6 +23,7 @@ import io.nosqlbench.adapter.milvus.ops.MilvusBaseOp;
import io.nosqlbench.adapter.milvus.ops.MilvusGetCollectionStatisticsOp;
import io.nosqlbench.adapters.api.templating.ParsedOp;
import java.util.List;
import java.util.function.LongFunction;
public class MilvusGetCollectionStatisticsOpDispenser extends MilvusBaseOpDispenser<GetCollectionStatisticsParam> {
@ -42,7 +43,7 @@ public class MilvusGetCollectionStatisticsOpDispenser extends MilvusBaseOpDispen
LongFunction<GetCollectionStatisticsParam.Builder> ebF =
l -> GetCollectionStatisticsParam.newBuilder().withCollectionName(targetF.apply(l));
// Add enhancement functions here
ebF = op.enhanceFuncOptionally(ebF,"database_name", String.class,
ebF = op.enhanceFuncOptionally(ebF, List.of("database_name","database"), String.class,
GetCollectionStatisticsParam.Builder::withDatabaseName);
ebF = op.enhanceFuncOptionally(ebF,"flush",Boolean.class,GetCollectionStatisticsParam.Builder::withFlush);

View File

@ -52,7 +52,8 @@ public class MilvusGetFlushStateOpDispenser extends MilvusBaseOpDispenser<GetFlu
};
LongFunction<GetFlushStateParam.Builder> finalEbF = ebF;
ebF = l -> finalEbF.apply(l).withSegmentIDs(idsF.apply(l));
ebF = op.enhanceFuncOptionally(ebF,"collection_name",String.class,GetFlushStateParam.Builder::withCollectionName);
ebF = op.enhanceFuncOptionally(ebF,List.of("collection","collection_name"),String.class,
GetFlushStateParam.Builder::withCollectionName);
ebF = op.enhanceFuncOptionally(ebF,"flush_ts",Long.class,GetFlushStateParam.Builder::withFlushTs);
final LongFunction<GetFlushStateParam.Builder> lastF = ebF;

View File

@ -23,6 +23,7 @@ import io.nosqlbench.adapter.milvus.ops.MilvusBaseOp;
import io.nosqlbench.adapter.milvus.ops.MilvusGetIndexBuildProgressOp;
import io.nosqlbench.adapters.api.templating.ParsedOp;
import java.util.List;
import java.util.function.LongFunction;
public class MilvusGetIndexBuildProgressOpDispenser extends MilvusBaseOpDispenser<GetIndexBuildProgressParam> {
@ -41,7 +42,8 @@ public class MilvusGetIndexBuildProgressOpDispenser extends MilvusBaseOpDispense
) {
LongFunction<GetIndexBuildProgressParam.Builder> ebF =
l -> GetIndexBuildProgressParam.newBuilder().withIndexName(targetF.apply(l));
ebF = op.enhanceFuncOptionally(ebF,"collection_name",String.class,GetIndexBuildProgressParam.Builder::withCollectionName);
ebF = op.enhanceFuncOptionally(ebF, List.of("collection_name","collection"),String.class,
GetIndexBuildProgressParam.Builder::withCollectionName);
final LongFunction<GetIndexBuildProgressParam.Builder> lastF = ebF;
final LongFunction<GetIndexBuildProgressParam> collectionParamF = l -> lastF.apply(l).build();

View File

@ -23,6 +23,7 @@ import io.nosqlbench.adapter.milvus.ops.MilvusBaseOp;
import io.nosqlbench.adapter.milvus.ops.MilvusGetIndexStateOp;
import io.nosqlbench.adapters.api.templating.ParsedOp;
import java.util.List;
import java.util.function.LongFunction;
public class MilvusGetIndexStateOpDispenser extends MilvusBaseOpDispenser<GetIndexStateParam> {
@ -41,7 +42,8 @@ public class MilvusGetIndexStateOpDispenser extends MilvusBaseOpDispenser<GetInd
) {
LongFunction<GetIndexStateParam.Builder> ebF =
l -> GetIndexStateParam.newBuilder().withIndexName(targetF.apply(l));
ebF = op.enhanceFuncOptionally(ebF,"collection_name",String.class,GetIndexStateParam.Builder::withCollectionName);
ebF = op.enhanceFuncOptionally(ebF, List.of("collection_name","collection"),String.class,
GetIndexStateParam.Builder::withCollectionName);
final LongFunction<GetIndexStateParam.Builder> lastF = ebF;
final LongFunction<GetIndexStateParam> collectionParamF = l -> lastF.apply(l).build();
return collectionParamF;

View File

@ -25,6 +25,7 @@ import io.nosqlbench.adapter.milvus.ops.MilvusGetLoadStateOp;
import io.nosqlbench.adapters.api.templating.ParsedOp;
import java.util.Arrays;
import java.util.List;
import java.util.Optional;
import java.util.function.LongFunction;
@ -44,7 +45,8 @@ public class MilvusGetLoadStateOpDispenser extends MilvusBaseOpDispenser<GetLoad
) {
LongFunction<GetLoadStateParam.Builder> ebF =
l -> GetLoadStateParam.newBuilder().withCollectionName(targetF.apply(l));
ebF = op.enhanceFuncOptionally(ebF,"database_name",String.class,GetLoadStateParam.Builder::withDatabaseName);
ebF = op.enhanceFuncOptionally(ebF, List.of("database_name","database"),String.class,
GetLoadStateParam.Builder::withDatabaseName);
Optional<LongFunction<String>> partitionsF = op.getAsOptionalFunction("partition_name", String.class);
if (partitionsF.isPresent()) {

View File

@ -46,7 +46,8 @@ public class MilvusGetLoadingProgressOpDispenser extends MilvusBaseOpDispenser<G
) {
LongFunction<GetLoadingProgressParam.Builder> ebF =
l -> GetLoadingProgressParam.newBuilder().withCollectionName(targetF.apply(l));
ebF = op.enhanceFuncOptionally(ebF,"partition_names", List.class,GetLoadingProgressParam.Builder::withPartitionNames);
ebF = op.enhanceFuncOptionally(ebF, List.of("partition_names","partitions"), List.class,
GetLoadingProgressParam.Builder::withPartitionNames);
final LongFunction<GetLoadingProgressParam.Builder> lastF = ebF;
return l -> lastF.apply(l).build();
}

View File

@ -77,9 +77,12 @@ public class MilvusGetOpDispenser extends MilvusBaseOpDispenser<GetIdsParam> {
LongFunction<GetIdsParam.Builder> finalEbF2 = ebF;
ebF = l -> finalEbF2.apply(l).withPrimaryIds(pidsF.apply(l));
ebF = op.enhanceFuncOptionally(ebF, "collection_name", String.class, GetIdsParam.Builder::withCollectionName);
ebF = op.enhanceFuncOptionally(ebF, List.of("collection_name","collection"), String.class,
GetIdsParam.Builder::withCollectionName);
ebF = op.enhanceEnumOptionally(ebF, "consistency_level", ConsistencyLevelEnum.class,
GetIdsParam.Builder::withConsistencyLevel);
ebF = op.enhanceEnumOptionally(ebF, "cl", ConsistencyLevelEnum.class,
GetIdsParam.Builder::withConsistencyLevel);
if (op.isDefined("output_fields")) {
LongFunction<Object> outputFieldsF = op.getAsRequiredFunction("output_fields", Object.class);

View File

@ -23,6 +23,7 @@ import io.nosqlbench.adapter.milvus.ops.MilvusBaseOp;
import io.nosqlbench.adapter.milvus.ops.MilvusGetPartitionStatisticsOp;
import io.nosqlbench.adapters.api.templating.ParsedOp;
import java.util.List;
import java.util.function.LongFunction;
public class MilvusGetPartitionStatisticsOpDispenser extends MilvusBaseOpDispenser<GetPartitionStatisticsParam> {
@ -41,7 +42,8 @@ public class MilvusGetPartitionStatisticsOpDispenser extends MilvusBaseOpDispens
) {
LongFunction<GetPartitionStatisticsParam.Builder> ebF =
l -> GetPartitionStatisticsParam.newBuilder().withPartitionName(targetF.apply(l));
ebF = op.enhanceFuncOptionally(ebF,"collection_name",String.class,GetPartitionStatisticsParam.Builder::withCollectionName);
ebF = op.enhanceFuncOptionally(ebF, List.of("collection_name","collection"),String.class,
GetPartitionStatisticsParam.Builder::withCollectionName);
ebF = op.enhanceFuncOptionally(ebF, "flush",Boolean.class, GetPartitionStatisticsParam.Builder::withFlush);
final LongFunction<GetPartitionStatisticsParam.Builder> lastF = ebF;

View File

@ -23,6 +23,7 @@ import io.nosqlbench.adapter.milvus.ops.MilvusBaseOp;
import io.nosqlbench.adapter.milvus.ops.MilvusGetPartitionStatisticsOp;
import io.nosqlbench.adapters.api.templating.ParsedOp;
import java.util.List;
import java.util.function.LongFunction;
public class MilvusGetPersistentSegmentInfoOpDispenser extends MilvusBaseOpDispenser<GetPartitionStatisticsParam> {
@ -43,7 +44,8 @@ public class MilvusGetPersistentSegmentInfoOpDispenser extends MilvusBaseOpDispe
l -> GetPartitionStatisticsParam.newBuilder().withPartitionName(targetF.apply(l));
ebF = op.enhanceFuncOptionally(ebF,"flush",Boolean.class,GetPartitionStatisticsParam.Builder::withFlush);
ebF = op.enhanceFuncOptionally(ebF,"collection_name",String.class,GetPartitionStatisticsParam.Builder::withCollectionName);
ebF = op.enhanceFuncOptionally(ebF, List.of("collection_name","collection"),String.class,
GetPartitionStatisticsParam.Builder::withCollectionName);
final LongFunction<GetPartitionStatisticsParam.Builder> lastF = ebF;
final LongFunction<GetPartitionStatisticsParam> collectionParamF = l -> lastF.apply(l).build();

View File

@ -23,6 +23,7 @@ import io.nosqlbench.adapter.milvus.ops.MilvusBaseOp;
import io.nosqlbench.adapter.milvus.ops.MilvusHasPartitionOp;
import io.nosqlbench.adapters.api.templating.ParsedOp;
import java.util.List;
import java.util.function.LongFunction;
public class MilvusHasPartitionOpDispenser extends MilvusBaseOpDispenser<HasPartitionParam> {
@ -41,7 +42,8 @@ public class MilvusHasPartitionOpDispenser extends MilvusBaseOpDispenser<HasPart
) {
LongFunction<HasPartitionParam.Builder> ebF =
l -> HasPartitionParam.newBuilder().withPartitionName(targetF.apply(l));
ebF = op.enhanceFuncOptionally(ebF,"collection_name",String.class,HasPartitionParam.Builder::withCollectionName);
ebF = op.enhanceFuncOptionally(ebF, List.of("collection_name","collection"),String.class,
HasPartitionParam.Builder::withCollectionName);
final LongFunction<HasPartitionParam.Builder> lastF = ebF;
final LongFunction<HasPartitionParam> collectionParamF = l -> lastF.apply(l).build();

View File

@ -25,12 +25,14 @@ import io.nosqlbench.adapter.milvus.MilvusDriverAdapter;
import io.nosqlbench.adapter.milvus.ops.MilvusBaseOp;
import io.nosqlbench.adapter.milvus.ops.MilvusInsertOp;
import io.nosqlbench.adapters.api.templating.ParsedOp;
import io.nosqlbench.nb.api.errors.OpConfigError;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.function.LongFunction;
public class MilvusInsertOpDispenser extends MilvusBaseOpDispenser<InsertParam> {
@ -53,18 +55,40 @@ public class MilvusInsertOpDispenser extends MilvusBaseOpDispenser<InsertParam>
public LongFunction<InsertParam> getParamFunc(LongFunction<MilvusServiceClient> clientF, ParsedOp op, LongFunction<String> targetF) {
LongFunction<InsertParam.Builder> f =
l -> InsertParam.newBuilder().withCollectionName(targetF.apply(l));
f = op.enhanceFuncOptionally(f, "partition", String.class, InsertParam.Builder::withPartitionName);
f = op.enhanceFuncOptionally(f, "database", String.class, InsertParam.Builder::withDatabaseName);
if (op.isDefined("rows")) {
LongFunction<List<JSONObject>> rowsF = createRowsF(op);
LongFunction<InsertParam.Builder> finalF = f;
f = l -> finalF.apply(l).withRows(rowsF.apply(l));
f = op.enhanceFuncOptionally(f, "rows", List.class, InsertParam.Builder::withRows);
f = op.enhanceFuncOptionally(
f, List.of("partition_name","partition"), String.class,
InsertParam.Builder::withPartitionName
);
f = op.enhanceFuncOptionally(
f, List.of("database_name","database"), String.class,
InsertParam.Builder::withDatabaseName
);
Optional<LongFunction<List<JSONObject>>> optionalRowsF = MilvusOpUtils.getHighLevelRowsFunction(op, "rows");
Optional<LongFunction<List<InsertParam.Field>>> optionalFieldsF = MilvusOpUtils.getFieldsFunction(op, "fields");
if (optionalFieldsF.isPresent() && optionalRowsF.isPresent()) {
throw new OpConfigError("Must provide either rows or fields, but not both.");
}
LongFunction<InsertParam.Builder> finalF1 = f;
LongFunction<List<InsertParam.Field>> fieldsF = createFieldsF(op);
LongFunction<InsertParam> insertParamsF = l -> finalF1.apply(l).withFields(fieldsF.apply(l)).build();
return insertParamsF;
if (optionalFieldsF.isEmpty() && optionalRowsF.isEmpty()) {
throw new OpConfigError("Must provide either rows or fields");
}
if (optionalRowsF.isPresent()) {
var rf = optionalRowsF.get();
LongFunction<InsertParam.Builder> finalF2 = f;
f = l -> finalF2.apply(l).withRows(rf.apply(l));
}
if (optionalFieldsF.isPresent()) {
var ff = optionalFieldsF.get();
LongFunction<InsertParam.Builder> finalF3 = f;
f = l -> finalF3.apply(l).withFields(ff.apply(l));
}
LongFunction<InsertParam.Builder> finalF = f;
return l -> finalF.apply(l).build();
}
@Override
@ -76,21 +100,4 @@ public class MilvusInsertOpDispenser extends MilvusBaseOpDispenser<InsertParam>
return l -> new MilvusInsertOp(clientF.apply(l), paramF.apply(l));
}
private LongFunction<List<InsertParam.Field>> createFieldsF(ParsedOp op) {
LongFunction<Map> fieldDataF = op.getAsRequiredFunction("fields", Map.class);
LongFunction<List<InsertParam.Field>> fieldsF = l -> {
Map<String, Object> fieldmap = fieldDataF.apply(l);
List<InsertParam.Field> fields = new ArrayList<>();
fieldmap.forEach((name, value) -> {
fields.add(new InsertParam.Field(name, (List) value));
});
return fields;
};
return fieldsF;
}
private LongFunction<List<JSONObject>> createRowsF(ParsedOp op) {
throw new RuntimeException("This is not implemented yet");
}
}

View File

@ -0,0 +1,85 @@
package io.nosqlbench.adapter.milvus.opdispensers;
/*
* Copyright (c) 2022 nosqlbench
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import io.milvus.client.MilvusServiceClient;
import io.milvus.param.dml.InsertParam;
import io.milvus.param.highlevel.dml.InsertRowsParam;
import io.nosqlbench.adapter.milvus.MilvusDriverAdapter;
import io.nosqlbench.adapter.milvus.ops.MilvusBaseOp;
import io.nosqlbench.adapter.milvus.ops.MilvusInsertOp;
import io.nosqlbench.adapter.milvus.ops.MilvusInsertRowsOp;
import io.nosqlbench.adapters.api.templating.ParsedOp;
import io.nosqlbench.nb.api.errors.OpConfigError;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.function.LongFunction;
public class MilvusInsertRowsOpDispenser extends MilvusBaseOpDispenser<InsertRowsParam> {
private static final Logger logger = LogManager.getLogger(MilvusInsertRowsOpDispenser.class);
/**
* Create a new MilvusDeleteOpDispenser subclassed from {@link MilvusBaseOpDispenser}.
*
* @param adapter
* The associated {@link MilvusDriverAdapter}
* @param op
* The {@link ParsedOp} encapsulating the activity for this cycle
* @param targetFunction
* A LongFunction that returns the specified Milvus Index for this Op
*/
public MilvusInsertRowsOpDispenser(MilvusDriverAdapter adapter,
ParsedOp op,
LongFunction<String> targetFunction) {
super(adapter, op, targetFunction);
}
@Override
public LongFunction<InsertRowsParam> getParamFunc(LongFunction<MilvusServiceClient> clientF, ParsedOp op,
LongFunction<String> targetF) {
LongFunction<InsertRowsParam.Builder> f =
l -> InsertRowsParam.newBuilder().withCollectionName(targetF.apply(l));
LongFunction<List<JSONObject>> rowsF = MilvusOpUtils
.getHighLevelRowsFunction(op, "rows")
.orElseThrow(() -> new RuntimeException("rows must be provided for op template '" + op.getName() + "'"));
LongFunction<InsertRowsParam.Builder> finalF1 = f;
f = l -> finalF1.apply(l).withRows(rowsF.apply(l));
LongFunction<InsertRowsParam.Builder> finalF = f;
return l -> finalF.apply(l).build();
}
@Override
public LongFunction<MilvusBaseOp<InsertRowsParam>> createOpFunc(
LongFunction<InsertRowsParam> paramF,
LongFunction<MilvusServiceClient> clientF,
ParsedOp op, LongFunction<String> targetF
) {
return l -> new MilvusInsertRowsOp(clientF.apply(l), paramF.apply(l));
}
}

View File

@ -43,7 +43,8 @@ public class MilvusLoadCollectionOpDispenser extends MilvusBaseOpDispenser<LoadC
LongFunction<LoadCollectionParam.Builder> ebF =
l -> LoadCollectionParam.newBuilder().withCollectionName(targetF.apply(l));
ebF = op.enhanceFuncOptionally(ebF,"database_name",String.class,LoadCollectionParam.Builder::withDatabaseName);
ebF = op.enhanceFuncOptionally(ebF,List.of("database_name","database"),String.class,
LoadCollectionParam.Builder::withDatabaseName);
ebF = op.enhanceFuncOptionally(ebF,"refresh",Boolean.class,LoadCollectionParam.Builder::withRefresh);
ebF = op.enhanceFuncOptionally(ebF,"sync_load",Boolean.class,LoadCollectionParam.Builder::withSyncLoad);

View File

@ -50,11 +50,17 @@ public class MilvusLoadPartitionsOpDispenser extends MilvusBaseOpDispenser<LoadP
LongFunction<LoadPartitionsParam.Builder> ebF =
l -> LoadPartitionsParam.newBuilder().withCollectionName(targetF.apply(l));
ebF = op.enhanceFunc(ebF, "partition_names", List.class, LoadPartitionsParam.Builder::withPartitionNames);
ebF = op.enhanceFuncOptionally(ebF, "resource_groups", List.class, LoadPartitionsParam.Builder::withResourceGroups);
ebF = op.enhanceFuncOptionally(ebF, "database_name", String.class, LoadPartitionsParam.Builder::withDatabaseName);
ebF = op.enhanceFunc(ebF, List.of("partition_names","partitions"), List.class,
LoadPartitionsParam.Builder::withPartitionNames);
ebF = op.enhanceFuncOptionally(
ebF, "resource_groups", List.class,
LoadPartitionsParam.Builder::withResourceGroups
);
ebF = op.enhanceFuncOptionally(
ebF, List.of("database_name","database"), String.class,
LoadPartitionsParam.Builder::withDatabaseName
);
ebF = op.enhanceFuncOptionally(ebF, "refresh", Boolean.class, LoadPartitionsParam.Builder::withRefresh);
ebF = op.enhanceFuncOptionally(ebF, "database_name", String.class, LoadPartitionsParam.Builder::withDatabaseName);
ebF = op.enhanceFuncOptionally(ebF, "replica_number", Integer.class, LoadPartitionsParam.Builder::withReplicaNumber);
ebF = op.enhanceFuncOptionally(ebF,"sync_load",Boolean.class,LoadPartitionsParam.Builder::withSyncLoad);
ebF = op.enhanceFuncOptionally(ebF,"sync_load_waiting_interval",Long.class,LoadPartitionsParam.Builder::withSyncLoadWaitingInterval);

View File

@ -42,7 +42,8 @@ public class MilvusLoadingProgressOpDispenser extends MilvusBaseOpDispenser<GetL
) {
LongFunction<GetLoadingProgressParam.Builder> ebF =
l -> GetLoadingProgressParam.newBuilder().withCollectionName(targetF.apply(l));
ebF = op.enhanceFuncOptionally(ebF,"partition_names", List.class,GetLoadingProgressParam.Builder::withPartitionNames);
ebF = op.enhanceFuncOptionally(ebF,List.of("partition_names","partitions"), List.class,
GetLoadingProgressParam.Builder::withPartitionNames);
final LongFunction<GetLoadingProgressParam.Builder> lastF = ebF;
final LongFunction<GetLoadingProgressParam> collectionParamF = l -> lastF.apply(l).build();

View File

@ -0,0 +1,99 @@
/*
* Copyright (c) 2024 nosqlbench
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.nosqlbench.adapter.milvus.opdispensers;
import com.alibaba.fastjson.JSONObject;
import io.milvus.param.dml.InsertParam;
import io.nosqlbench.adapters.api.templating.ParsedOp;
import io.nosqlbench.nb.api.errors.OpConfigError;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.function.LongFunction;
public class MilvusOpUtils {
public static Optional<LongFunction<List<JSONObject>>> getHighLevelRowsFunction(ParsedOp op, String opfield) {
if (!op.isDefined(opfield)) {
return Optional.empty();
}
LongFunction<Object> rowF = op.getAsRequiredFunction(opfield, Object.class);
Object testObject = rowF.apply(0L);
LongFunction<List<JSONObject>> rowsF = null;
if (testObject instanceof JSONObject) {
rowsF = l -> List.of((JSONObject) rowF.apply(l));
} else if (testObject instanceof Map) {
rowsF = l -> List.of(new JSONObject((Map<String, Object>) rowF.apply(l)));
} else if (testObject instanceof List list) {
if (list.size() == 0) {
throw new OpConfigError("Unable to detect type of list object for empty list for op named '" + op.getName() + "'");
} else if (list.get(0) instanceof JSONObject) {
rowsF = l -> (List<JSONObject>) rowF.apply(l);
} else if (list.get(0) instanceof Map) {
rowsF = l -> List.of(new JSONObject((Map<String, Object>) rowF.apply(l)));
}
}
return Optional.ofNullable(rowsF);
//
// if (rowsF==null) {
// throw new OpConfigError("A Milvus row list can only be created from " +
// "1) a JSONObject (1 row), " +
// "2) A Map (1 row), " +
// "3) a List of JSONObject (multiple rows), or " +
// "4) a list of Maps (multiple rows)");
// }
}
public static Optional<LongFunction<List<InsertParam.Field>>> getFieldsFunction(ParsedOp op, String opfield) {
if (!op.isDefined(opfield)) {
return Optional.empty();
}
ParsedOp valueTemplate = op.getAsSubOp(opfield, ParsedOp.SubOpNaming.SubKey);
Map<String, Object> testFieldsValues = valueTemplate.apply(0L);
List<LongFunction<InsertParam.Field>> fieldsF = new ArrayList<>(testFieldsValues.size());
for (String fieldName : testFieldsValues.keySet()) {
Object testFieldValue = testFieldsValues.get(fieldName);
if (!(testFieldValue instanceof List<?> list)) {
throw new OpConfigError("Every value provided to a named field must be a List, not " + testFieldValue.getClass().getSimpleName());
}
}
LongFunction<List<InsertParam.Field>> f = new FieldsFuncFromMap(valueTemplate::apply);
return Optional.of(f);
}
private record FieldsFuncFromMap(
LongFunction<Map<String, Object>> generator
) implements LongFunction<List<InsertParam.Field>> {
@Override
public List<InsertParam.Field> apply(long value) {
Map<String, Object> datamap = generator.apply(value);
ArrayList<InsertParam.Field> fields = new ArrayList<>(datamap.size());
datamap.forEach((k,v) -> {
fields.add(new InsertParam.Field(k,(List)v));
});
return fields;
}
}
}

View File

@ -44,7 +44,8 @@ public class MilvusQueryOpDispenser extends MilvusBaseOpDispenser<QueryParam> {
LongFunction<QueryParam.Builder> ebF =
l -> QueryParam.newBuilder().withCollectionName(targetF.apply(l));
ebF = op.enhanceFuncOptionally(ebF,"partition_names", List.class,QueryParam.Builder::withPartitionNames);
ebF = op.enhanceFuncOptionally(ebF,List.of("partition_names","partitions"), List.class,
QueryParam.Builder::withPartitionNames);
ebF = op.enhanceEnumOptionally(ebF,"consistency_level", ConsistencyLevelEnum.class, QueryParam.Builder::withConsistencyLevel);
ebF = op.enhanceFuncOptionally(ebF,"expr",String.class,QueryParam.Builder::withExpr);
ebF = op.enhanceFuncOptionally(ebF,"limit",Long.class,QueryParam.Builder::withLimit);

View File

@ -47,7 +47,8 @@ public class MilvusReleasePartitionsOpDispenser extends MilvusBaseOpDispenser<Re
LongFunction<ReleasePartitionsParam.Builder> finalEbF = ebF;
ebF = l -> finalEbF.apply(l).withPartitionNames(partNamesF.apply(l));
ebF = op.enhanceFuncOptionally(ebF,"collection_name",String.class,ReleasePartitionsParam.Builder::withCollectionName);
ebF = op.enhanceFuncOptionally(ebF,List.of("collection_name","collection"),String.class,
ReleasePartitionsParam.Builder::withCollectionName);
final LongFunction<ReleasePartitionsParam.Builder> lastF = ebF;
final LongFunction<ReleasePartitionsParam> collectionParamF = l -> lastF.apply(l).build();

View File

@ -47,7 +47,8 @@ public class MilvusSearchOpDispenser extends MilvusBaseOpDispenser<SearchParam>
LongFunction<SearchParam.Builder> ebF =
l -> SearchParam.newBuilder().withCollectionName(targetF.apply(l));
ebF = op.enhanceFuncOptionally(ebF,"partition_names",List.class,SearchParam.Builder::withPartitionNames);
ebF = op.enhanceFuncOptionally(ebF,List.of("partition_names","partitions"),List.class,
SearchParam.Builder::withPartitionNames);
ebF = op.enhanceFuncOptionally(ebF,"out_fields",List.class,SearchParam.Builder::withOutFields);

View File

@ -47,7 +47,8 @@ public class MilvusShowCollectionsOpDispenser extends MilvusBaseOpDispenser<Show
LongFunction<List<String>> collectionsF = l -> MilvusUtils.splitNames(targetF.apply(l));
LongFunction<ShowCollectionsParam.Builder> finalEbF = ebF;
ebF = l -> finalEbF.apply(l).withCollectionNames(collectionsF.apply(l));
ebF = op.enhanceFuncOptionally(ebF,"database_name",String.class,ShowCollectionsParam.Builder::withDatabaseName);
ebF = op.enhanceFuncOptionally(ebF,List.of("database_name","database"),String.class,
ShowCollectionsParam.Builder::withDatabaseName);
ebF = op.enhanceEnumOptionally(ebF,"show_type", ShowType.class,ShowCollectionsParam.Builder::withShowType);
logger.warn(this.getClass().getSimpleName() + " is deprecated, use get_loading_progress instead");

View File

@ -48,7 +48,8 @@ public class MilvusShowPartitionsOpDispenser extends MilvusBaseOpDispenser<ShowP
LongFunction<List<String>> partitionsF = l -> MilvusUtils.splitNames(targetF.apply(l));
LongFunction<ShowPartitionsParam.Builder> finalEbF = ebF;
ebF = l -> finalEbF.apply(l).withPartitionNames(partitionsF.apply(l));
ebF = op.enhanceFuncOptionally(ebF,"collection_name",String.class,ShowPartitionsParam.Builder::withCollectionName);
ebF = op.enhanceFuncOptionally(ebF,List.of("collection_name","collection"),String.class,
ShowPartitionsParam.Builder::withCollectionName);
final LongFunction<ShowPartitionsParam.Builder> lastF = ebF;
final LongFunction<ShowPartitionsParam> collectionParamF = l -> lastF.apply(l).build();
return collectionParamF;

View File

@ -42,7 +42,7 @@ public class MilvusUpdateCredentialOpDispenser extends MilvusBaseOpDispenser<Upd
LongFunction<UpdateCredentialParam.Builder> ebF =
l -> UpdateCredentialParam.newBuilder().withUsername(targetF.apply(l));
ebF = op.enhanceFuncOptionally(ebF,"old_password",String.class,UpdateCredentialParam.Builder::withOldPassword);
ebF = op.enhanceFuncOptionally(ebF,"nwe_password",String.class,UpdateCredentialParam.Builder::withNewPassword);
ebF = op.enhanceFuncOptionally(ebF,"new_password",String.class,UpdateCredentialParam.Builder::withNewPassword);
final LongFunction<UpdateCredentialParam.Builder> lastF = ebF;
final LongFunction<UpdateCredentialParam> collectionParamF = l -> lastF.apply(l).build();

View File

@ -0,0 +1,42 @@
/*
* Copyright (c) 2024 nosqlbench
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.nosqlbench.adapter.milvus.ops;
import io.milvus.client.MilvusServiceClient;
import io.milvus.param.R;
import io.milvus.param.dml.InsertParam;
import io.milvus.param.highlevel.dml.InsertRowsParam;
import io.milvus.param.highlevel.dml.response.InsertResponse;
import io.nosqlbench.adapters.api.templating.ParsedOp;
public class MilvusInsertRowsOp extends MilvusBaseOp<InsertRowsParam> {
/**
* Create a new {@link ParsedOp} encapsulating a call to the Milvus/Zilliz client delete method
*
* @param client The associated {@link MilvusServiceClient} used to communicate with the database
* @param request The {@link InsertParam} built for this operation
*/
public MilvusInsertRowsOp(MilvusServiceClient client, InsertRowsParam request) {
super(client,request);
}
@Override
public R<InsertResponse> applyOp(long value) {
return client.insert(request);
}
}

View File

@ -45,7 +45,8 @@ public enum MilvusOpType {
get_persistent_segment_info(Collection),
get_query_segment_info(Collection),
get_replicas(Collection),
insert(Collection),
insert_fields(Collection),
insert_rows(Collection), // Added because of generic signature causing ambiguous patterns on insert(...)
load_collection(Collection),
release_collection(Collection),
show_collections(Collection),

View File

@ -3,6 +3,10 @@ description: |
This is a template for live vector search testing.
Template Variables:
TEMPLATE(milvushost,localhost)
TEMPLATE(datafile)
TEMPLATE(database,baselines)
TEMPLATE(trainsize)
TEMPLATE(testsize)
schema: Install the schema required to run the test
rampup: Measure how long it takes to load a set of embeddings
@ -15,19 +19,19 @@ description: |
scenarios:
default:
drop: >-
run tags==block:drop errors===stop cycles===UNDEF threads===UNDEF
database_name=baselines uri="http://TEMPLATE(milvushost):19530" token=root:Milvus
run tags==block:drop errors===warn cycles===UNDEF threads===UNDEF
uri=http://TEMPLATE(milvushost):19530 database=TEMPLATE(database) token=root:Milvus
schema: >-
run tags==block:schema errors===stop cycles===2 threads===1
database_name=baselines uri="http://TEMPLATE(milvushost):19530" token=root:Milvus
run tags==block:schema errors===stop cycles===UNDEF threads===1
uri=http://TEMPLATE(milvushost):19530 database=TEMPLATE(database) token=root:Milvus
rampup: >-
run tags==block:rampup errors=counter,warn
cycles===TEMPLATE(trainsize,100) threads===TEMPLATE(rampup_threads,10)
database_name=baselines uri="http://TEMPLATE(milvushost):19530" token=root:Milvus
cycles===TEMPLATE(rampup_cycles,TEMPLATE(trainsize,100)) threads===TEMPLATE(rampup_threads,10)
uri=http://TEMPLATE(milvushost):19530 database=TEMPLATE(database) token=root:Milvus
search: >-
run tags==block:search errors=counter,warn
cycles===TEMPLATE(testsize,100) threads===TEMPLATE(search_threads,10)
database_name=baselines uri="http://TEMPLATE(milvushost):19530" token=root:Milvus
cycles===TEMPLATE(search_cycles,TEMPLATE(testsize,100)) threads===TEMPLATE(search_threads,10)
uri=http://TEMPLATE(milvushost):19530 database=TEMPLATE(database) token=root:Milvus
params:
driver: milvus
@ -36,15 +40,15 @@ params:
bindings:
row_key: ToString()
# filetype=hdf5 for TEMPLATE(filetype,hdf5)
test_floatlist_hdf5: HdfFileToFloatList("testdata/TEMPLATE(datafile).hdf5", "/test"); ToCqlVector();
test_floatlist_hdf5: HdfFileToFloatList("testdata/TEMPLATE(datafile).hdf5", "/test");
relevant_indices_hdf5: HdfFileToIntArray("testdata/TEMPLATE(datafile).hdf5", "/neighbors")
distance_floatlist_hdf5: HdfFileToFloatList("testdata/TEMPLATE(datafile).hdf5", "/distance")
train_floatlist_hdf5: HdfFileToFloatList("testdata/TEMPLATE(datafile).hdf5", "/train"); ToCqlVector();
train_floatlist_hdf5: HdfFileToFloatList("testdata/TEMPLATE(datafile).hdf5", "/train");
# filetype=fvec for TEMPLATE(filetype,fvec)
test_floatlist_fvec: FVecReader("testdata/TEMPLATE(datafile)_TEMPLATE(trainsize)_query_vectors.fvec"); ToCqlVector();
test_floatlist_fvec: FVecReader("testdata/TEMPLATE(datafile)_TEMPLATE(trainsize)_query_vectors.fvec");
relevant_indices_fvec: IVecReader("testdata/TEMPLATE(datafile)_TEMPLATE(trainsize)_indices_query.ivec");
distance_floatlist_fvec: FVecReader("testdata/TEMPLATE(datafile)_TEMPLATE(testsize)_distances_count.fvec",TEMPLATE(dimensions),0);
train_floatlist_fvec: FVecReader("testdata/TEMPLATE(datafile)_TEMPLATE(trainsize)_base_vectors.fvec",TEMPLATE(dimensions),0); ToCqlVector();
train_floatlist_fvec: FVecReader("testdata/TEMPLATE(datafile)_TEMPLATE(trainsize)_base_vectors.fvec",TEMPLATE(dimensions),0);
# synthetic
# synthetic_vectors: HashedFloatVectors(TEMPLATE(dimensions));
@ -54,33 +58,36 @@ blocks:
# https://milvus.io/api-reference/java/v2.3.x/Collection/dropCollection().md
drop_col_op:
drop_collection: "TEMPLATE(collection,vector)"
database_name: "TEMPLATE(database)"
# https://milvus.io/api-reference/java/v2.3.x/Index/dropIndex().md
drop_index_op:
drop_index: "TEMPLATE(collection,vector)_TEMPLATE(vector_field,value)_idx"
collection_name: "TEMPLATE(collection,vector)"
database_name: "TEMPLATE(database)"
drop_db_op:
drop_database: "TEMPLATE(database)"
schema:
ops:
create_db_op:
create_database: "TEMPLATE(database)"
# https://milvus.io/api-reference/java/v2.3.x/Collection/createCollection().md
create_col_op:
create_collection: "TEMPLATE(collection,vector)"
shards_num: TEMPLATE(shards_number,0)
description: "TEMPLATE(desc,a simple milvus/zilliz vector collection)"
consistency_level: "BOUNDED"
field_types:
- field_1:
name: "key"
primary_key: true
description: "primary/part key of the collection"
data_type: "VarChar"
auto_id: false
partition_key: true
- field_2:
name: "value"
primary_key: false
description: "vector column/field"
data_type: "FloatVector"
dimension: TEMPLATE(vec_dimension,25)
key:
primary_key: true
description: "primary/part key of the collection"
data_type: "VarChar"
max_length: 1024
auto_id: false
# partition_key: true
value:
primary_key: false
description: "vector column/field"
data_type: "FloatVector"
dimension: TEMPLATE(vec_dimension,25)
# https://milvus.io/api-reference/java/v2.3.x/Index/createIndex().md
create_index_op:

View File

@ -172,7 +172,8 @@ public abstract class BaseOpDispenser<T extends Op, S> extends NBBaseComponent i
return verifierFunctions;
}
String getOpName() {
@Override
public String getOpName() {
return this.opName;
}
@ -225,4 +226,6 @@ public abstract class BaseOpDispenser<T extends Op, S> extends NBBaseComponent i
return this.labels;
}
}

View File

@ -86,4 +86,5 @@ public interface OpDispenser<T> extends LongFunction<T>, OpResultTracker {
CycleFunction<Boolean> getVerifier();
String getOpName();
}

View File

@ -193,7 +193,11 @@ public interface DriverAdapter<OPTYPE extends Op, SPACETYPE> extends NBComponent
}
default String getAdapterName() {
return this.getClass().getAnnotation(Service.class).selector();
Service svc = this.getClass().getAnnotation(Service.class);
if (svc==null) {
throw new RuntimeException("The Service annotation for adapter of type " + this.getClass().getCanonicalName() + " is missing.");
}
return svc.selector();
}
default Maturity getAdapterMaturity() {

View File

@ -60,7 +60,7 @@ public class ActivityMetricProgressMeter implements ProgressMeterDisplay, Comple
@Override
public double getCurrentValue() {
return activity.getInstrumentation().getOrCreateCyclesServiceTimer().getCount();
return activity.getInstrumentation().getOrCreateBindTimer().getCount();
}
@Override

View File

@ -0,0 +1,35 @@
/*
* Copyright (c) 2022-2023 nosqlbench
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.nosqlbench.engine.api.activityapi.errorhandling.modular.handlers;
import io.nosqlbench.engine.api.activityapi.errorhandling.modular.ErrorDetail;
import io.nosqlbench.engine.api.activityapi.errorhandling.modular.ErrorHandler;
import io.nosqlbench.nb.annotations.Service;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
@Service(value= ErrorHandler.class, selector="error")
public class ErrorErrorHandler implements ErrorHandler {
private final static Logger logger = LogManager.getLogger("ERRORS");
@Override
public ErrorDetail handleError(String name, Throwable t, long cycle, long durationInNanos, ErrorDetail detail) {
logger.error("error with cycle " + cycle + " errmsg: " + t.getMessage());
return detail;
}
}

View File

@ -442,31 +442,36 @@ public class SimpleActivity extends NBStatusComponent implements Activity, Invok
for (int i = 0; i < pops.size(); i++) {
long ratio = ratios.get(i);
ParsedOp pop = pops.get(i);
if (0 == ratio) {
logger.info(() -> "skipped mapping op '" + pop.getName() + '\'');
continue;
}
DriverAdapter<?,?> adapter = adapters.get(i);
OpMapper<? extends Op> opMapper = adapter.getOpMapper();
OpDispenser<? extends Op> dispenser = opMapper.apply(pop);
try {
if (0 == ratio) {
logger.info(() -> "skipped mapping op '" + pop.getName() + '\'');
continue;
}
String dryrunSpec = pop.takeStaticConfigOr("dryrun", "none");
if ("op".equalsIgnoreCase(dryrunSpec)) {
dispenser = new DryRunOpDispenserWrapper((DriverAdapter<Op,Object>)adapter, pop, dispenser);
dryrunCount++;
} else if ("emit".equalsIgnoreCase(dryrunSpec)) {
dispenser = new EmitterOpDispenserWrapper(
(DriverAdapter<Op,Object>)adapter,
pop,
(OpDispenser<? extends CycleOp<?>>) dispenser
);
}
DriverAdapter<?, ?> adapter = adapters.get(i);
OpMapper<? extends Op> opMapper = adapter.getOpMapper();
OpDispenser<? extends Op> dispenser = opMapper.apply(pop);
String dryrunSpec = pop.takeStaticConfigOr("dryrun", "none");
if ("op".equalsIgnoreCase(dryrunSpec)) {
dispenser = new DryRunOpDispenserWrapper((DriverAdapter<Op, Object>) adapter, pop, dispenser);
dryrunCount++;
} else if ("emit".equalsIgnoreCase(dryrunSpec)) {
dispenser = new EmitterOpDispenserWrapper(
(DriverAdapter<Op, Object>) adapter,
pop,
(OpDispenser<? extends CycleOp<?>>) dispenser
);
}
// if (strict) {
// optemplate.assertConsumed();
// }
planner.addOp((OpDispenser<? extends O>) dispenser, ratio);
planner.addOp((OpDispenser<? extends O>) dispenser, ratio);
} catch (Exception e) {
throw new OpConfigError("Error while mapping op from template named '" + pop.getName() + "': " + e.getMessage(),e);
}
}
if (0 < dryrunCount) {
logger.warn("initialized {} op templates for dry run only. These ops will be synthesized for each cycle, but will not be executed.", dryrunCount);
@ -475,7 +480,11 @@ public class SimpleActivity extends NBStatusComponent implements Activity, Invok
return planner.resolve();
} catch (Exception e) {
throw new OpConfigError(e.getMessage(), workloadSource, e);
if (e instanceof OpConfigError oce) {
throw oce;
} else {
throw new OpConfigError(e.getMessage(), workloadSource, e);
}
}

View File

@ -73,14 +73,15 @@ public class StandardAction<A extends StandardActivity<R, ?>, R extends Op> impl
@Override
public int runCycle(long cycle) {
OpDispenser<? extends Op> dispenser;
OpDispenser<? extends Op> dispenser=null;
Op op = null;
try (Timer.Context ct = bindTimer.time()) {
dispenser = opsequence.apply(cycle);
op = dispenser.apply(cycle);
} catch (Exception e) {
throw new RuntimeException("while binding request in cycle " + cycle + ": " + e.getMessage(), e);
throw new RuntimeException("while binding request in cycle " + cycle + " for op template named '" + (dispenser!=null?dispenser.getOpName():"NULL")+
"': " + e.getMessage(), e);
}
int code = 0;

View File

@ -633,34 +633,6 @@
</configuration>
</plugin>
<!-- Javadoc -->
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-javadoc-plugin</artifactId>
<configuration>
<release>21</release>
<doctitle>${javadoc.name}</doctitle>
<windowtitle>${javadoc.name}</windowtitle>
<detectJavaApiLink>false</detectJavaApiLink>
<detectLinks>false</detectLinks>
<detectOfflineLinks>false</detectOfflineLinks>
<!-- <additionalparam>-Xdoclint:none</additionalparam>-->
<additionalOptions>
<additionalOption>-Xdoclint:none</additionalOption>
</additionalOptions>
<!-- <additionalJOption>-Xdoclint:none</additionalJOption>-->
<doclint>none</doclint>
</configuration>
<executions>
<execution>
<id>attach-javadoc</id>
<goals>
<goal>jar</goal>
</goals>
</execution>
</executions>
</plugin>
<!-- Sources -->
<plugin>
@ -896,5 +868,59 @@
</build>
</profile>
<profile>
<id>javadoc</id>
<activation>
<activeByDefault>true</activeByDefault>
</activation>
<build>
<plugins>
<!-- Javadoc -->
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-javadoc-plugin</artifactId>
<configuration>
<release>21</release>
<doctitle>${javadoc.name}</doctitle>
<windowtitle>${javadoc.name}</windowtitle>
<detectJavaApiLink>false</detectJavaApiLink>
<detectLinks>false</detectLinks>
<detectOfflineLinks>false</detectOfflineLinks>
<!-- <additionalparam>-Xdoclint:none</additionalparam>-->
<additionalOptions>
<additionalOption>-Xdoclint:none</additionalOption>
</additionalOptions>
<!-- <additionalJOption>-Xdoclint:none</additionalJOption>-->
<doclint>none</doclint>
</configuration>
<executions>
<execution>
<id>attach-javadoc</id>
<goals>
<goal>jar</goal>
</goals>
</execution>
</executions>
</plugin>
<!-- <plugin>-->
<!-- <groupId>org.apache.maven.plugins</groupId>-->
<!-- <artifactId>maven-javadoc-plugin</artifactId>-->
<!-- <version>3.4.1</version>-->
<!-- <configuration>-->
<!-- </configuration>-->
<!-- &lt;!&ndash; NO Executions by default for main project builds.-->
<!-- This plugin should be called directly on the command line-->
<!-- The site plugin appears to be broken or at least not properly maintained,-->
<!-- so invoking via the site phase is more trouble than it is worth.-->
<!-- &ndash;&gt;-->
<!-- &lt;!&ndash; <executions>&ndash;&gt;-->
<!-- &lt;!&ndash; </executions>&ndash;&gt;-->
<!-- </plugin>-->
</plugins>
</build>
</profile>
</profiles>
</project>

View File

@ -49,13 +49,7 @@
<dependency>
<groupId>io.nosqlbench</groupId>
<artifactId>adapter-tcp</artifactId>
<version>${revision}</version>
</dependency>
<dependency>
<groupId>io.nosqlbench</groupId>
<artifactId>adapter-mongodb</artifactId>
<artifactId>adapter-diag</artifactId>
<version>${revision}</version>
</dependency>
@ -65,80 +59,10 @@
<version>${revision}</version>
</dependency>
<dependency>
<groupId>io.nosqlbench</groupId>
<artifactId>adapter-diag</artifactId>
<version>${revision}</version>
</dependency>
<dependency>
<groupId>io.nosqlbench</groupId>
<artifactId>adapter-dynamodb</artifactId>
<version>${revision}</version>
</dependency>
<dependency>
<groupId>io.nosqlbench</groupId>
<artifactId>adapter-cqld4</artifactId>
<version>${revision}</version>
</dependency>
<dependency>
<groupId>io.nosqlbench</groupId>
<artifactId>adapter-http</artifactId>
<version>${revision}</version>
</dependency>
<dependency>
<groupId>io.nosqlbench</groupId>
<artifactId>adapter-pulsar</artifactId>
<version>${revision}</version>
</dependency>
<dependency>
<groupId>io.nosqlbench</groupId>
<artifactId>adapter-s4j</artifactId>
<version>${revision}</version>
</dependency>
<dependency>
<groupId>io.nosqlbench</groupId>
<artifactId>adapter-kafka</artifactId>
<version>${revision}</version>
</dependency>
<dependency>
<groupId>io.nosqlbench</groupId>
<artifactId>adapter-amqp</artifactId>
<version>${revision}</version>
</dependency>
<dependency>
<groupId>io.nosqlbench</groupId>
<artifactId>adapter-jdbc</artifactId>
<version>${revision}</version>
</dependency>
<dependency>
<groupId>io.nosqlbench</groupId>
<artifactId>adapter-pinecone</artifactId>
<version>${revision}</version>
</dependency>
<dependency>
<groupId>io.nosqlbench</groupId>
<artifactId>adapter-aws-opensearch</artifactId>
<version>${revision}</version>
</dependency>
<dependency>
<groupId>io.nosqlbench</groupId>
<artifactId>adapter-milvus</artifactId>
<version>${revision}</version>
</dependency>
</dependencies>
<build>
<resources>
<resource>
@ -200,19 +124,6 @@
</build>
<profiles>
<profile>
<id>with-mongodb</id>
<activation>
<activeByDefault>true</activeByDefault>
</activation>
<dependencies>
<dependency>
<groupId>io.nosqlbench</groupId>
<artifactId>adapter-mongodb</artifactId>
<version>${revision}</version>
</dependency>
</dependencies>
</profile>
<profile>
<id>build-nb5-appimage</id>
<activation>
@ -271,5 +182,175 @@
</plugins>
</build>
</profile>
<profile>
<id>adapter-opensearch</id>
<activation>
<activeByDefault>true</activeByDefault>
</activation>
<dependencies>
<dependency>
<groupId>io.nosqlbench</groupId>
<artifactId>adapter-aws-opensearch</artifactId>
<version>${revision}</version>
</dependency>
</dependencies>
</profile>
<profile>
<id>adapter-cqld4</id>
<activation>
<activeByDefault>true</activeByDefault>
</activation>
<dependencies>
<dependency>
<groupId>io.nosqlbench</groupId>
<artifactId>adapter-cqld4</artifactId>
<version>${revision}</version>
</dependency>
</dependencies>
</profile>
<profile>
<id>adapter-s4j</id>
<activation>
<activeByDefault>true</activeByDefault>
</activation>
<dependencies>
<dependency>
<groupId>io.nosqlbench</groupId>
<artifactId>adapter-s4j</artifactId>
<version>${revision}</version>
</dependency>
</dependencies>
</profile>
<profile>
<id>adapter-pinecone</id>
<activation>
<activeByDefault>true</activeByDefault>
</activation>
<dependencies>
<dependency>
<groupId>io.nosqlbench</groupId>
<artifactId>adapter-pinecone</artifactId>
<version>${revision}</version>
</dependency>
</dependencies>
</profile>
<profile>
<id>adapter-mongodb</id>
<activation>
<activeByDefault>true</activeByDefault>
</activation>
<dependencies>
<dependency>
<groupId>io.nosqlbench</groupId>
<artifactId>adapter-mongodb</artifactId>
<version>${revision}</version>
</dependency>
</dependencies>
</profile>
<profile>
<id>adapter-tcp</id>
<activation>
<activeByDefault>true</activeByDefault>
</activation>
<dependencies>
<dependency>
<groupId>io.nosqlbench</groupId>
<artifactId>adapter-tcp</artifactId>
<version>${revision}</version>
</dependency>
</dependencies>
</profile>
<profile>
<id>adapter-dynamodb</id>
<activation>
<activeByDefault>true</activeByDefault>
</activation>
<dependencies>
<dependency>
<groupId>io.nosqlbench</groupId>
<artifactId>adapter-dynamodb</artifactId>
<version>${revision}</version>
</dependency>
</dependencies>
</profile>
<profile>
<id>adapter-http</id>
<activation>
<activeByDefault>true</activeByDefault>
</activation>
<dependencies>
<dependency>
<groupId>io.nosqlbench</groupId>
<artifactId>adapter-http</artifactId>
<version>${revision}</version>
</dependency>
</dependencies>
</profile>
<profile>
<id>adapter-pulsar</id>
<activation>
<activeByDefault>true</activeByDefault>
</activation>
<dependencies>
<dependency>
<groupId>io.nosqlbench</groupId>
<artifactId>adapter-pulsar</artifactId>
<version>${revision}</version>
</dependency>
</dependencies>
</profile>
<profile>
<id>adapter-kafka</id>
<activation>
<activeByDefault>true</activeByDefault>
</activation>
<dependencies>
<dependency>
<groupId>io.nosqlbench</groupId>
<artifactId>adapter-kafka</artifactId>
<version>${revision}</version>
</dependency>
</dependencies>
</profile>
<profile>
<id>adapter-amqp</id>
<activation>
<activeByDefault>true</activeByDefault>
</activation>
<dependencies>
<dependency>
<groupId>io.nosqlbench</groupId>
<artifactId>adapter-amqp</artifactId>
<version>${revision}</version>
</dependency>
</dependencies>
</profile>
<profile>
<id>adapter-jdbc</id>
<activation>
<activeByDefault>true</activeByDefault>
</activation>
<dependencies>
<dependency>
<groupId>io.nosqlbench</groupId>
<artifactId>adapter-jdbc</artifactId>
<version>${revision}</version>
</dependency>
</dependencies>
</profile>
<profile>
<id>adapter-milvus</id>
<activation>
<activeByDefault>true</activeByDefault>
</activation>
<dependencies>
<dependency>
<groupId>io.nosqlbench</groupId>
<artifactId>adapter-milvus</artifactId>
<version>${revision}</version>
</dependency>
</dependencies>
</profile>
</profiles>
</project>

155
pom.xml
View File

@ -14,7 +14,9 @@
~ limitations under the License.
-->
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<project xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<artifactId>nosqlbench</artifactId>
@ -40,7 +42,7 @@
<!-- TODO - consider using properties in the module defs. -->
<!-- CORE MODULES -->
<module>mvn-defaults</module>
<!-- <module>nb5-proof</module>-->
<!-- <module>nb5-proof</module>-->
<module>nb5</module>
<module>nbr</module>
<module>nbr-examples</module>
@ -56,19 +58,6 @@
<!-- driver modules -->
<module>adapter-diag</module>
<module>adapter-stdout</module>
<module>adapter-cqld4</module>
<module>adapter-http</module>
<module>adapter-tcp</module>
<module>adapter-dynamodb</module>
<module>adapter-mongodb</module>
<module>adapter-pulsar</module>
<module>adapter-s4j</module>
<module>adapter-kafka</module>
<module>adapter-amqp</module>
<module>adapter-jdbc</module>
<module>adapter-pinecone</module>
<module>adapter-aws-opensearch</module>
<module>adapter-milvus</module>
<!-- VIRTDATA MODULES -->
<module>virtdata-api</module>
@ -86,6 +75,127 @@
<module>docsys</module>
</modules>
<profiles>
<profile>
<id>adapter-http</id>
<activation>
<activeByDefault>true</activeByDefault>
</activation>
<modules>
<module>adapter-http</module>
</modules>
</profile>
<profile>
<id>adapter-tcp</id>
<activation>
<activeByDefault>true</activeByDefault>
</activation>
<modules>
<module>adapter-tcp</module>
</modules>
</profile>
<profile>
<id>adapter-dynamodb</id>
<activation>
<activeByDefault>true</activeByDefault>
</activation>
<modules>
<module>adapter-dynamodb</module>
</modules>
</profile>
<profile>
<id>adapter-pulsar</id>
<activation>
<activeByDefault>true</activeByDefault>
</activation>
<modules>
<module>adapter-pulsar</module>
</modules>
</profile>
<profile>
<id>adapter-kafka</id>
<activation>
<activeByDefault>true</activeByDefault>
</activation>
<modules>
<module>adapter-kafka</module>
</modules>
</profile>
<profile>
<id>adapter-amqp</id>
<activation>
<activeByDefault>true</activeByDefault>
</activation>
<modules>
<module>adapter-amqp</module>
</modules>
</profile>
<profile>
<id>adapter-jdbc</id>
<activation>
<activeByDefault>true</activeByDefault>
</activation>
<modules>
<module>adapter-jdbc</module>
</modules>
</profile>
<profile>
<id>adapter-milvus</id>
<activation>
<activeByDefault>true</activeByDefault>
</activation>
<modules>
<module>adapter-milvus</module>
</modules>
</profile>
<profile>
<id>adapter-pinecone</id>
<activation>
<activeByDefault>true</activeByDefault>
</activation>
<modules>
<module>adapter-pinecone</module>
</modules>
</profile>
<profile>
<id>adapter-mongodb</id>
<activation>
<activeByDefault>true</activeByDefault>
</activation>
<modules>
<module>adapter-mongodb</module>
</modules>
</profile>
<profile>
<id>adapter-opensearch</id>
<activation>
<activeByDefault>true</activeByDefault>
</activation>
<modules>
<module>adapter-aws-opensearch</module>
</modules>
</profile>
<profile>
<id>adapter-cqld4</id>
<activation>
<activeByDefault>true</activeByDefault>
</activation>
<modules>
<module>adapter-cqld4</module>
</modules>
</profile>
<profile>
<id>adapter-s4j</id>
<activation>
<activeByDefault>true</activeByDefault>
</activation>
<modules>
<module>adapter-s4j</module>
</modules>
</profile>
</profiles>
<licenses>
<license>
<name>The Apache License, Version 2.0</name>
@ -105,21 +215,6 @@
</configuration>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-javadoc-plugin</artifactId>
<version>3.4.1</version>
<configuration>
</configuration>
<!-- NO Executions by default for main project builds.
This plugin should be called directly on the command line
The site plugin appears to be broken or at least not properly maintained,
so invoking via the site phase is more trouble than it is worth.
-->
<!-- <executions>-->
<!-- </executions>-->
</plugin>
</plugins>
</build>