unit tests, correct NBConfiguration creation and update to single filter only

This commit is contained in:
Mark Wolters 2023-05-16 22:43:16 +00:00 committed by Madhavan
parent de7ef54cf2
commit 8b30d6251a
5 changed files with 82 additions and 56 deletions

View File

@ -3,6 +3,7 @@ package io.nosqlbench.adapter.pinecone;
import io.nosqlbench.api.config.standard.ConfigModel; import io.nosqlbench.api.config.standard.ConfigModel;
import io.nosqlbench.api.config.standard.NBConfigModel; import io.nosqlbench.api.config.standard.NBConfigModel;
import io.nosqlbench.api.config.standard.NBConfiguration; import io.nosqlbench.api.config.standard.NBConfiguration;
import io.nosqlbench.api.config.standard.Param;
import io.pinecone.PineconeClient; import io.pinecone.PineconeClient;
import io.pinecone.PineconeClientConfig; import io.pinecone.PineconeClientConfig;
import io.pinecone.PineconeConnection; import io.pinecone.PineconeConnection;
@ -65,7 +66,21 @@ public class PineconeSpace {
} }
public static NBConfigModel getConfigModel() { public static NBConfigModel getConfigModel() {
return ConfigModel.of(PineconeSpace.class);
return ConfigModel.of(PineconeSpace.class)
.add(
Param.required("apiKey",String.class)
.setDescription("the Pinecone API key to use to connect to the database")
)
.add(
Param.defaultTo("environment","us-east-1-aws")
.setDescription("the environment in which the desired index is running.")
)
.add(
Param.defaultTo("projectName","default")
.setDescription("the project name associated with the desired index")
)
.asReadOnly();
} }
} }

View File

@ -1,5 +1,7 @@
package io.nosqlbench.adapter.pinecone.opdispensers; package io.nosqlbench.adapter.pinecone.opdispensers;
import com.google.protobuf.Struct;
import com.google.protobuf.Value;
import io.nosqlbench.adapter.pinecone.PineconeDriverAdapter; import io.nosqlbench.adapter.pinecone.PineconeDriverAdapter;
import io.nosqlbench.adapter.pinecone.PineconeSpace; import io.nosqlbench.adapter.pinecone.PineconeSpace;
import io.nosqlbench.adapter.pinecone.ops.PineconeDeleteOp; import io.nosqlbench.adapter.pinecone.ops.PineconeDeleteOp;
@ -9,9 +11,7 @@ import io.pinecone.proto.DeleteRequest;
import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger; import org.apache.logging.log4j.Logger;
import java.util.Arrays; import java.util.*;
import java.util.List;
import java.util.Optional;
import java.util.function.LongFunction; import java.util.function.LongFunction;
@ -41,13 +41,6 @@ public class PineconeDeleteOpDispenser extends PineconeOpDispenser {
deleteRequestFunc.apply(value)); deleteRequestFunc.apply(value));
} }
/*
* return DeleteRequest.newBuilder()
* .setNamespace(namespace)
* .addAllIds(Arrays.asList(idsToDelete))
* .setDeleteAll(false)
* .build();
*/
private LongFunction<DeleteRequest> createDeleteRequestFunction(ParsedOp op) { private LongFunction<DeleteRequest> createDeleteRequestFunction(ParsedOp op) {
LongFunction<DeleteRequest.Builder> rFunc = l -> DeleteRequest.newBuilder(); LongFunction<DeleteRequest.Builder> rFunc = l -> DeleteRequest.newBuilder();
@ -76,22 +69,22 @@ public class PineconeDeleteOpDispenser extends PineconeOpDispenser {
rFunc = l -> finalFunc.apply(l).setDeleteAll(af.apply(l)); rFunc = l -> finalFunc.apply(l).setDeleteAll(af.apply(l));
} }
//TODO: Add filters Optional<LongFunction<String>> filterFunction = op.getAsOptionalFunction("filter", String.class);
if (filterFunction.isPresent()) {
LongFunction<DeleteRequest.Builder> finalFunc = rFunc;
LongFunction<Struct> builtFilter = l -> {
String[] filterFields = filterFunction.get().apply(l).split(" ");
return Struct.newBuilder().putFields(filterFields[0],
Value.newBuilder().setStructValue(Struct.newBuilder().putFields(filterFields[1],
Value.newBuilder().setNumberValue(Integer.valueOf(filterFields[2])).build()))
.build()).build();
};
rFunc = l -> finalFunc.apply(l).setFilter(builtFilter.apply(l));
}
LongFunction<DeleteRequest.Builder> finalRFunc = rFunc; LongFunction<DeleteRequest.Builder> finalRFunc = rFunc;
return l -> finalRFunc.apply(l).build(); return l -> finalRFunc.apply(l).build();
} }
/* private LongFunction<Collection<AttributeDefinition>> resolveAttributeDefinitionFunction(ParsedOp cmd) {
LongFunction<? extends Map> attrsmap = cmd.getAsRequiredFunction("Attributes", Map.class);
return (long l) -> {
List<AttributeDefinition> defs = new ArrayList<>();
attrsmap.apply(l).forEach((k, v) -> {
defs.add(new AttributeDefinition(k.toString(), ScalarAttributeType.valueOf(v.toString())));
});
return defs;
};
}*/
} }

View File

@ -1,5 +1,7 @@
package io.nosqlbench.adapter.pinecone.opdispensers; package io.nosqlbench.adapter.pinecone.opdispensers;
import com.google.protobuf.Struct;
import com.google.protobuf.Value;
import io.nosqlbench.adapter.pinecone.PineconeDriverAdapter; import io.nosqlbench.adapter.pinecone.PineconeDriverAdapter;
import io.nosqlbench.adapter.pinecone.PineconeSpace; import io.nosqlbench.adapter.pinecone.PineconeSpace;
import io.nosqlbench.adapter.pinecone.ops.PineconeDescribeIndexStatsOp; import io.nosqlbench.adapter.pinecone.ops.PineconeDescribeIndexStatsOp;
@ -10,6 +12,7 @@ import jakarta.ws.rs.NotSupportedException;
import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger; import org.apache.logging.log4j.Logger;
import java.util.Optional;
import java.util.function.LongFunction; import java.util.function.LongFunction;
public class PineconeDescribeIndexStatsOpDispenser extends PineconeOpDispenser { public class PineconeDescribeIndexStatsOpDispenser extends PineconeOpDispenser {
@ -34,7 +37,18 @@ public class PineconeDescribeIndexStatsOpDispenser extends PineconeOpDispenser {
private LongFunction<DescribeIndexStatsRequest> createDescribeIndexStatsRequestFunction(ParsedOp op) { private LongFunction<DescribeIndexStatsRequest> createDescribeIndexStatsRequestFunction(ParsedOp op) {
LongFunction<DescribeIndexStatsRequest.Builder> rFunc = l -> DescribeIndexStatsRequest.newBuilder(); LongFunction<DescribeIndexStatsRequest.Builder> rFunc = l -> DescribeIndexStatsRequest.newBuilder();
//TODO: Add filters Optional<LongFunction<String>> filterFunction = op.getAsOptionalFunction("filter", String.class);
if (filterFunction.isPresent()) {
LongFunction<DescribeIndexStatsRequest.Builder> finalFunc = rFunc;
LongFunction<Struct> builtFilter = l -> {
String[] filterFields = filterFunction.get().apply(l).split(" ");
return Struct.newBuilder().putFields(filterFields[0],
Value.newBuilder().setStructValue(Struct.newBuilder().putFields(filterFields[1],
Value.newBuilder().setNumberValue(Integer.valueOf(filterFields[2])).build()))
.build()).build();
};
rFunc = l -> finalFunc.apply(l).setFilter(builtFilter.apply(l));
}
LongFunction<DescribeIndexStatsRequest.Builder> finalRFunc = rFunc; LongFunction<DescribeIndexStatsRequest.Builder> finalRFunc = rFunc;
return l -> finalRFunc.apply(l).build(); return l -> finalRFunc.apply(l).build();
} }

View File

@ -64,25 +64,13 @@ ops:
namespace: delete_namespace namespace: delete_namespace
ids: csv_list_of_vectors_to_delete ids: csv_list_of_vectors_to_delete
deleteall: [true,false] deleteall: [true,false]
filters: filter: <field operator compval>
- filter_field: delete_filter_field
operator: [$lt, $gt, $eq, ...]
comparator: query_compval
- filter_field: delete_filter_field
operator: [$lt, $gt, $eq, ...]
comparator: query_compval
# A describe index stats op. Specify metadata filters to narrow the range of indices described. # A describe index stats op. Specify metadata filters to narrow the range of indices described.
describe-index-stats-example: describe-index-stats-example:
type: describe-index-stats type: describe-index-stats
index: describe_index index: describe_index
filters: filter: <field operator compval>
- filter_field: delete_filter_field
operator: [$lt, $gt, $eq, ...]
comparator: query_compval
- filter_field: delete_filter_field
operator: [$lt, $gt, $eq, ...]
comparator: query_compval
# A pinecone fetch op # A pinecone fetch op
fetch-example: fetch-example:

View File

@ -1,5 +1,7 @@
package io.nosqlbench.adapter.pinecone; package io.nosqlbench.adapter.pinecone;
import io.nosqlbench.adapter.pinecone.opdispensers.PineconeDeleteOpDispenser;
import io.nosqlbench.adapter.pinecone.opdispensers.PineconeQueryOpDispenser;
import io.nosqlbench.adapter.pinecone.ops.PineconeOp; import io.nosqlbench.adapter.pinecone.ops.PineconeOp;
import io.nosqlbench.api.config.standard.NBConfiguration; import io.nosqlbench.api.config.standard.NBConfiguration;
import io.nosqlbench.engine.api.activityconfig.OpsLoader; import io.nosqlbench.engine.api.activityconfig.OpsLoader;
@ -26,7 +28,10 @@ public class PineconeOpMapperTest {
@BeforeAll @BeforeAll
public static void initializeTestMapper() { public static void initializeTestMapper() {
cfg = PineconeSpace.getConfigModel().apply(Map.of()); Map<String,String> configMap = Map.of("apiKey","2f55b2f0-670f-4c51-9073-4d37142b761a",
"environment","us-east-1-aws",
"projectName","default");
cfg = PineconeSpace.getConfigModel().apply(configMap);
adapter = new PineconeDriverAdapter(); adapter = new PineconeDriverAdapter();
adapter.applyConfig(cfg); adapter.applyConfig(cfg);
DriverSpaceCache<? extends PineconeSpace> cache = adapter.getSpaceCache(); DriverSpaceCache<? extends PineconeSpace> cache = adapter.getSpaceCache();
@ -36,34 +41,45 @@ public class PineconeOpMapperTest {
private static ParsedOp parsedOpFor(String yaml) { private static ParsedOp parsedOpFor(String yaml) {
OpsDocList docs = OpsLoader.loadString(yaml, OpTemplateFormat.yaml, Map.of(), null); OpsDocList docs = OpsLoader.loadString(yaml, OpTemplateFormat.yaml, Map.of(), null);
OpTemplate opTemplate = docs.getOps().get(0); OpTemplate opTemplate = docs.getOps().get(0);
ParsedOp parsedOp = new ParsedOp(opTemplate, cfg, List.of(adapter.getPreprocessor())); return new ParsedOp(opTemplate, cfg, List.of(adapter.getPreprocessor()));
return parsedOp;
} }
@Test @Test
public void testQueryOpDispenserSimple() { public void testQueryOpDispenserSimple() {
ParsedOp pop = parsedOpFor(""" ParsedOp pop = parsedOpFor("""
ops: ops:
query-op1: op1:
query: "test-index" type: "query"
vector: "1.0,2.0,3.0" index: "test-index"
namespace: "test-namespace" vector: "1.0,2.0,3.0"
top_k: 10 namespace: "test-namespace"
filters: top_k: 10
- filter_field: "value" filters:
operator: "$lt" - "value $lt 2"
comparator: 2 - "value $gt 10"
include_values: true include_values: true
include_metadata: true include_metadata: true
"""); """);
OpDispenser<? extends PineconeOp> dispenser = mapper.apply(pop); OpDispenser<? extends PineconeOp> dispenser = mapper.apply(pop);
//assertions go here... assert(dispenser instanceof PineconeQueryOpDispenser);
} }
@Test @Test
public void testDeleteOpDispenser() { public void testDeleteOpDispenser() {
ParsedOp pop = parsedOpFor("""
ops:
op1:
type: "delete"
index: "test-index"
ids: "1.0,2.0,3.0"
namespace: "test-namespace"
deleteall: true
filters:
- "value $lt 2"
- "value $gt 10"
""");
OpDispenser<? extends PineconeOp> dispenser = mapper.apply(pop);
assert(dispenser instanceof PineconeDeleteOpDispenser);
} }
@Test @Test