diff --git a/.github/workflows/preview.yml b/.github/workflows/preview.yml index 334887968..a1a923350 100644 --- a/.github/workflows/preview.yml +++ b/.github/workflows/preview.yml @@ -79,7 +79,7 @@ jobs: password: ${{ secrets.DOCKER_PASSWORD }} - name: docker test build - uses: docker/build-push-action@v5.1.0 + uses: docker/build-push-action@v5.3.0 with: context: . file: Dockerfile @@ -130,7 +130,7 @@ jobs: scripts/bump-minor-version - name: docker push to hub - uses: docker/build-push-action@v5.1.0 + uses: docker/build-push-action@v5.3.0 with: context: . platforms: linux/amd64,linux/arm64 @@ -141,7 +141,7 @@ jobs: # https://github.com/softprops/action-gh-release - name: create github release - uses: softprops/action-gh-release@v0.1.15 + uses: softprops/action-gh-release@v2.0.4 if: startsWith(github.ref, 'refs/tags/') with: # body: ${{ steps.prepare_summary.outputs.release_summary }} diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index ca97ccbc8..2eef099fa 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -74,7 +74,7 @@ jobs: password: ${{ secrets.DOCKER_PASSWORD }} - name: docker test build - uses: docker/build-push-action@v5.1.0 + uses: docker/build-push-action@v5.3.0 with: context: . file: Dockerfile @@ -115,7 +115,7 @@ jobs: scripts/bump-minor-version - name: docker push to hub - uses: docker/build-push-action@v5.1.0 + uses: docker/build-push-action@v5.3.0 with: context: . platforms: linux/amd64,linux/arm64 @@ -126,7 +126,7 @@ jobs: # https://github.com/softprops/action-gh-release - name: create github release - uses: softprops/action-gh-release@v0.1.15 + uses: softprops/action-gh-release@v2.0.4 if: startsWith(github.ref, 'refs/tags/') with: # body: ${{ steps.prepare_summary.outputs.release_summary }} diff --git a/.run/await_index local.run.xml b/.run/await_index local.run.xml new file mode 100644 index 000000000..d2d5f42a7 --- /dev/null +++ b/.run/await_index local.run.xml @@ -0,0 +1,14 @@ + + + + + + \ No newline at end of file diff --git a/.run/drop local.run.xml b/.run/drop local.run.xml new file mode 100644 index 000000000..f04c2ae4c --- /dev/null +++ b/.run/drop local.run.xml @@ -0,0 +1,14 @@ + + + + + + \ No newline at end of file diff --git a/.run/load_collection local.run.xml b/.run/load_collection local.run.xml new file mode 100644 index 000000000..78dd5b77f --- /dev/null +++ b/.run/load_collection local.run.xml @@ -0,0 +1,14 @@ + + + + + + \ No newline at end of file diff --git a/.run/rampup local.run.xml b/.run/rampup local.run.xml new file mode 100644 index 000000000..e3141bbe9 --- /dev/null +++ b/.run/rampup local.run.xml @@ -0,0 +1,14 @@ + + + + + + \ No newline at end of file diff --git a/.run/rampup_batch 100x local.run.xml b/.run/rampup_batch 100x local.run.xml new file mode 100644 index 000000000..919c2f317 --- /dev/null +++ b/.run/rampup_batch 100x local.run.xml @@ -0,0 +1,14 @@ + + + + + + \ No newline at end of file diff --git a/.run/rampup_batch 2x local.run.xml b/.run/rampup_batch 2x local.run.xml new file mode 100644 index 000000000..32de70792 --- /dev/null +++ b/.run/rampup_batch 2x local.run.xml @@ -0,0 +1,14 @@ + + + + + + \ No newline at end of file diff --git a/.run/schema local.run.xml b/.run/schema local.run.xml new file mode 100644 index 000000000..a66c0e50a --- /dev/null +++ b/.run/schema local.run.xml @@ -0,0 +1,14 @@ + + + + + + \ No newline at end of file diff --git a/.run/schema_index local.run.xml b/.run/schema_index local.run.xml new file mode 100644 index 000000000..698ce6f39 --- /dev/null +++ b/.run/schema_index local.run.xml @@ -0,0 +1,14 @@ + + + + + + \ No newline at end of file diff --git a/.run/search_and_verify threads=1 local.run.xml b/.run/search_and_verify threads=1 local.run.xml new file mode 100644 index 000000000..1269f7d95 --- /dev/null +++ b/.run/search_and_verify threads=1 local.run.xml @@ -0,0 +1,14 @@ + + + + + + \ No newline at end of file diff --git a/.run/search_and_verify threads=100 local.run.xml b/.run/search_and_verify threads=100 local.run.xml new file mode 100644 index 000000000..fc4a0bbe9 --- /dev/null +++ b/.run/search_and_verify threads=100 local.run.xml @@ -0,0 +1,14 @@ + + + + + + \ No newline at end of file diff --git a/adapter-amqp/src/main/java/io/nosqlbench/adapter/amqp/dispensers/AmqpMsgRecvOpDispenser.java b/adapter-amqp/src/main/java/io/nosqlbench/adapter/amqp/dispensers/AmqpMsgRecvOpDispenser.java index c5583a619..6255d4142 100644 --- a/adapter-amqp/src/main/java/io/nosqlbench/adapter/amqp/dispensers/AmqpMsgRecvOpDispenser.java +++ b/adapter-amqp/src/main/java/io/nosqlbench/adapter/amqp/dispensers/AmqpMsgRecvOpDispenser.java @@ -120,7 +120,7 @@ public class AmqpMsgRecvOpDispenser extends AmqpBaseOpDispenser { @Override - public AmqpTimeTrackOp apply(long cycle) { + public AmqpTimeTrackOp getOp(long cycle) { Channel channel = getAmqpChannelForReceiver(cycle); if (channel == null) { throw new AmqpAdapterUnexpectedException( diff --git a/adapter-amqp/src/main/java/io/nosqlbench/adapter/amqp/dispensers/AmqpMsgSendOpDispenser.java b/adapter-amqp/src/main/java/io/nosqlbench/adapter/amqp/dispensers/AmqpMsgSendOpDispenser.java index 90fabd50b..ec483b2c5 100644 --- a/adapter-amqp/src/main/java/io/nosqlbench/adapter/amqp/dispensers/AmqpMsgSendOpDispenser.java +++ b/adapter-amqp/src/main/java/io/nosqlbench/adapter/amqp/dispensers/AmqpMsgSendOpDispenser.java @@ -175,7 +175,7 @@ public class AmqpMsgSendOpDispenser extends AmqpBaseOpDispenser { } @Override - public AmqpTimeTrackOp apply(long cycle) { + public AmqpTimeTrackOp getOp(long cycle) { String msgPayload = msgPayloadFunc.apply(cycle); if (StringUtils.isBlank(msgPayload)) { throw new AmqpAdapterInvalidParamException("Message payload must be specified and can't be empty!"); diff --git a/adapter-aws-opensearch/src/main/java/io/nosqlbench/adapter/opensearch/OpenSearchAdapter.java b/adapter-aws-opensearch/src/main/java/io/nosqlbench/adapter/opensearch/AOSAdapter.java similarity index 74% rename from adapter-aws-opensearch/src/main/java/io/nosqlbench/adapter/opensearch/OpenSearchAdapter.java rename to adapter-aws-opensearch/src/main/java/io/nosqlbench/adapter/opensearch/AOSAdapter.java index d959d3434..02fa55b27 100644 --- a/adapter-aws-opensearch/src/main/java/io/nosqlbench/adapter/opensearch/OpenSearchAdapter.java +++ b/adapter-aws-opensearch/src/main/java/io/nosqlbench/adapter/opensearch/AOSAdapter.java @@ -19,7 +19,6 @@ package io.nosqlbench.adapter.opensearch; import io.nosqlbench.adapters.api.activityimpl.OpMapper; import io.nosqlbench.adapters.api.activityimpl.uniform.BaseDriverAdapter; import io.nosqlbench.adapters.api.activityimpl.uniform.DriverAdapter; -import io.nosqlbench.adapters.api.activityimpl.uniform.DriverSpaceCache; import io.nosqlbench.adapters.api.activityimpl.uniform.flowtypes.Op; import io.nosqlbench.nb.annotations.Service; import io.nosqlbench.nb.api.components.core.NBComponent; @@ -30,23 +29,23 @@ import io.nosqlbench.nb.api.labels.NBLabels; import java.util.function.Function; @Service(value= DriverAdapter.class, selector = "opensearch") -public class OpenSearchAdapter extends BaseDriverAdapter { - public OpenSearchAdapter(NBComponent parentComponent, NBLabels labels) { +public class AOSAdapter extends BaseDriverAdapter { + public AOSAdapter(NBComponent parentComponent, NBLabels labels) { super(parentComponent, labels); } @Override - public Function getSpaceInitializer(NBConfiguration cfg) { - return (String spaceName) -> new OpenSearchSpace(cfg); + public Function getSpaceInitializer(NBConfiguration cfg) { + return (String spaceName) -> new AOSSpace(cfg); } @Override public OpMapper getOpMapper() { - return new OpenSearchOpMapper(this); + return new AOSOpMapper(this); } @Override public NBConfigModel getConfigModel() { - return super.getConfigModel().add(OpenSearchSpace.getConfigModel()); + return super.getConfigModel().add(AOSSpace.getConfigModel()); } } diff --git a/adapter-aws-opensearch/src/main/java/io/nosqlbench/adapter/opensearch/OpenSearchAdapterLoader.java b/adapter-aws-opensearch/src/main/java/io/nosqlbench/adapter/opensearch/AOSAdapterLoader.java similarity index 81% rename from adapter-aws-opensearch/src/main/java/io/nosqlbench/adapter/opensearch/OpenSearchAdapterLoader.java rename to adapter-aws-opensearch/src/main/java/io/nosqlbench/adapter/opensearch/AOSAdapterLoader.java index f7deb5f97..08ce8d938 100644 --- a/adapter-aws-opensearch/src/main/java/io/nosqlbench/adapter/opensearch/OpenSearchAdapterLoader.java +++ b/adapter-aws-opensearch/src/main/java/io/nosqlbench/adapter/opensearch/AOSAdapterLoader.java @@ -22,9 +22,9 @@ import io.nosqlbench.nb.api.components.core.NBComponent; import io.nosqlbench.nb.api.labels.NBLabels; @Service(value = DriverAdapterLoader.class,selector = "opensearch") -public class OpenSearchAdapterLoader implements DriverAdapterLoader { +public class AOSAdapterLoader implements DriverAdapterLoader { @Override - public OpenSearchAdapter load(NBComponent parent, NBLabels childLabels) { - return new OpenSearchAdapter(parent, childLabels); + public AOSAdapter load(NBComponent parent, NBLabels childLabels) { + return new AOSAdapter(parent, childLabels); } } diff --git a/adapter-aws-opensearch/src/main/java/io/nosqlbench/adapter/opensearch/OpenSearchOpMapper.java b/adapter-aws-opensearch/src/main/java/io/nosqlbench/adapter/opensearch/AOSOpMapper.java similarity index 51% rename from adapter-aws-opensearch/src/main/java/io/nosqlbench/adapter/opensearch/OpenSearchOpMapper.java rename to adapter-aws-opensearch/src/main/java/io/nosqlbench/adapter/opensearch/AOSOpMapper.java index 392eee93d..714cfa3d8 100644 --- a/adapter-aws-opensearch/src/main/java/io/nosqlbench/adapter/opensearch/OpenSearchOpMapper.java +++ b/adapter-aws-opensearch/src/main/java/io/nosqlbench/adapter/opensearch/AOSOpMapper.java @@ -17,35 +17,31 @@ package io.nosqlbench.adapter.opensearch; import io.nosqlbench.adapter.opensearch.dispensers.*; -import io.nosqlbench.adapter.opensearch.ops.UpdateOp; import io.nosqlbench.adapters.api.activityimpl.OpDispenser; import io.nosqlbench.adapters.api.activityimpl.OpMapper; -import io.nosqlbench.adapters.api.activityimpl.uniform.DriverSpaceCache; import io.nosqlbench.adapters.api.activityimpl.uniform.flowtypes.Op; import io.nosqlbench.adapters.api.templating.ParsedOp; import io.nosqlbench.engine.api.templating.TypeAndTarget; -import io.nosqlbench.nb.api.config.standard.NBConfiguration; -import org.opensearch.client.opensearch.OpenSearchClient; -public class OpenSearchOpMapper implements OpMapper { - private final OpenSearchAdapter adapter; +public class AOSOpMapper implements OpMapper { + private final AOSAdapter adapter; - public OpenSearchOpMapper(OpenSearchAdapter openSearchAdapter) { - this.adapter = openSearchAdapter; + public AOSOpMapper(AOSAdapter AOSAdapter) { + this.adapter = AOSAdapter; } @Override public OpDispenser apply(ParsedOp op) { - TypeAndTarget typeAndTarget = - op.getTypeAndTarget(OpenSearchOpTypes.class, String.class, "verb", "index"); + TypeAndTarget typeAndTarget = + op.getTypeAndTarget(AOSOpTypes.class, String.class, "verb", "index"); return switch (typeAndTarget.enumId) { - case create_index -> new CreateIndexOpDispenser(adapter, op, typeAndTarget.targetFunction); - case delete_index -> new DeleteIndexOpDispenser(adapter, op, typeAndTarget.targetFunction); - case index -> new IndexOpDispenser(adapter,op, typeAndTarget.targetFunction); - case update -> new UpdateOpDispenser(adapter,op, typeAndTarget.targetFunction); - case delete -> new DeleteOpDispenser(adapter,op, typeAndTarget.targetFunction); - case knn_search -> new KnnSearchOpDispenser(adapter,op, typeAndTarget.targetFunction); - case bulk -> new BulkOpDispenser(adapter, op, typeAndTarget.targetFunction); + case create_index -> new AOSCreateIndexOpDispenser(adapter, op, typeAndTarget.targetFunction); + case delete_index -> new AOSDeleteIndexOpDispenser(adapter, op, typeAndTarget.targetFunction); + case index -> new AOSIndexOpDispenser(adapter,op, typeAndTarget.targetFunction); + case update -> new AOSUpdateOpDispenser(adapter,op, typeAndTarget.targetFunction); + case delete -> new AOSDeleteOpDispenser(adapter,op, typeAndTarget.targetFunction); + case knn_search -> new AOSKnnSearchOpDispenser(adapter,op, typeAndTarget.targetFunction); + case bulk -> new AOSBulkOpDispenser(adapter, op, typeAndTarget.targetFunction); default -> throw new RuntimeException("Unrecognized op type '" + typeAndTarget.enumId.name() + "' while " + "mapping parsed op " + op); }; diff --git a/adapter-aws-opensearch/src/main/java/io/nosqlbench/adapter/opensearch/OpenSearchOpTypes.java b/adapter-aws-opensearch/src/main/java/io/nosqlbench/adapter/opensearch/AOSOpTypes.java similarity index 95% rename from adapter-aws-opensearch/src/main/java/io/nosqlbench/adapter/opensearch/OpenSearchOpTypes.java rename to adapter-aws-opensearch/src/main/java/io/nosqlbench/adapter/opensearch/AOSOpTypes.java index 92a1beb09..5e98ad9cd 100644 --- a/adapter-aws-opensearch/src/main/java/io/nosqlbench/adapter/opensearch/OpenSearchOpTypes.java +++ b/adapter-aws-opensearch/src/main/java/io/nosqlbench/adapter/opensearch/AOSOpTypes.java @@ -16,7 +16,7 @@ package io.nosqlbench.adapter.opensearch; -public enum OpenSearchOpTypes { +public enum AOSOpTypes { create_index, delete_index, index, diff --git a/adapter-aws-opensearch/src/main/java/io/nosqlbench/adapter/opensearch/AwsOsServiceType.java b/adapter-aws-opensearch/src/main/java/io/nosqlbench/adapter/opensearch/AOSServiceType.java similarity index 95% rename from adapter-aws-opensearch/src/main/java/io/nosqlbench/adapter/opensearch/AwsOsServiceType.java rename to adapter-aws-opensearch/src/main/java/io/nosqlbench/adapter/opensearch/AOSServiceType.java index 4e8f84f1d..88ddd1850 100644 --- a/adapter-aws-opensearch/src/main/java/io/nosqlbench/adapter/opensearch/AwsOsServiceType.java +++ b/adapter-aws-opensearch/src/main/java/io/nosqlbench/adapter/opensearch/AOSServiceType.java @@ -16,7 +16,7 @@ package io.nosqlbench.adapter.opensearch; -public enum AwsOsServiceType { +public enum AOSServiceType { aoss, es } diff --git a/adapter-aws-opensearch/src/main/java/io/nosqlbench/adapter/opensearch/OpenSearchSpace.java b/adapter-aws-opensearch/src/main/java/io/nosqlbench/adapter/opensearch/AOSSpace.java similarity index 89% rename from adapter-aws-opensearch/src/main/java/io/nosqlbench/adapter/opensearch/OpenSearchSpace.java rename to adapter-aws-opensearch/src/main/java/io/nosqlbench/adapter/opensearch/AOSSpace.java index 0cadc55d0..6d83433c7 100644 --- a/adapter-aws-opensearch/src/main/java/io/nosqlbench/adapter/opensearch/OpenSearchSpace.java +++ b/adapter-aws-opensearch/src/main/java/io/nosqlbench/adapter/opensearch/AOSSpace.java @@ -17,10 +17,6 @@ package io.nosqlbench.adapter.opensearch; -import com.amazonaws.auth.BasicAWSCredentials; -import com.amazonaws.auth.profile.internal.BasicProfile; -import com.amazonaws.auth.profile.internal.ProfileKeyConstants; -import com.amazonaws.auth.profile.internal.ProfileStaticCredentialsProvider; import io.nosqlbench.nb.api.config.standard.ConfigModel; import io.nosqlbench.nb.api.config.standard.NBConfigModel; import io.nosqlbench.nb.api.config.standard.NBConfiguration; @@ -35,14 +31,13 @@ import software.amazon.awssdk.http.crt.AwsCrtAsyncHttpClient; import software.amazon.awssdk.regions.Region; import java.io.IOException; -import java.util.Map; -public class OpenSearchSpace implements AutoCloseable { +public class AOSSpace implements AutoCloseable { private final NBConfiguration cfg; protected OpenSearchClient client; - public OpenSearchSpace(NBConfiguration cfg) { + public AOSSpace(NBConfiguration cfg) { this.cfg = cfg; } @@ -75,7 +70,7 @@ public class OpenSearchSpace implements AutoCloseable { AwsSdk2TransportOptions transportOptions = transportOptionsBuilder.build(); - AwsOsServiceType svctype = AwsOsServiceType.valueOf(cfg.get("svctype")); + AOSServiceType svctype = AOSServiceType.valueOf(cfg.get("svctype")); AwsSdk2Transport awsSdk2Transport = new AwsSdk2Transport( @@ -101,7 +96,7 @@ public class OpenSearchSpace implements AutoCloseable { } public static NBConfigModel getConfigModel() { - return ConfigModel.of(OpenSearchSpace.class) + return ConfigModel.of(AOSSpace.class) .add(Param.required("region", String.class).setDescription("The region to connect to")) .add(Param.required("host", String.class).setDescription("The Open Search API endpoint host")) .add(Param.optional("profile") diff --git a/adapter-aws-opensearch/src/main/java/io/nosqlbench/adapter/opensearch/Utils.java b/adapter-aws-opensearch/src/main/java/io/nosqlbench/adapter/opensearch/AOSUtils.java similarity index 97% rename from adapter-aws-opensearch/src/main/java/io/nosqlbench/adapter/opensearch/Utils.java rename to adapter-aws-opensearch/src/main/java/io/nosqlbench/adapter/opensearch/AOSUtils.java index 9d8dacc04..5e3fb7d74 100644 --- a/adapter-aws-opensearch/src/main/java/io/nosqlbench/adapter/opensearch/Utils.java +++ b/adapter-aws-opensearch/src/main/java/io/nosqlbench/adapter/opensearch/AOSUtils.java @@ -20,7 +20,7 @@ import io.nosqlbench.adapter.opensearch.pojos.Doc; import org.opensearch.client.opensearch.core.SearchResponse; import org.opensearch.client.opensearch.core.search.Hit; -public class Utils { +public class AOSUtils { public static int[] DocHitsToIntIndicesArray(SearchResponse response) { int[] indices = response.hits().hits() diff --git a/adapter-aws-opensearch/src/main/java/io/nosqlbench/adapter/opensearch/dispensers/BaseOpenSearchOpDispenser.java b/adapter-aws-opensearch/src/main/java/io/nosqlbench/adapter/opensearch/dispensers/AOSBaseOpDispenser.java similarity index 78% rename from adapter-aws-opensearch/src/main/java/io/nosqlbench/adapter/opensearch/dispensers/BaseOpenSearchOpDispenser.java rename to adapter-aws-opensearch/src/main/java/io/nosqlbench/adapter/opensearch/dispensers/AOSBaseOpDispenser.java index a58da603c..aa4f185e5 100644 --- a/adapter-aws-opensearch/src/main/java/io/nosqlbench/adapter/opensearch/dispensers/BaseOpenSearchOpDispenser.java +++ b/adapter-aws-opensearch/src/main/java/io/nosqlbench/adapter/opensearch/dispensers/AOSBaseOpDispenser.java @@ -16,8 +16,8 @@ package io.nosqlbench.adapter.opensearch.dispensers; -import io.nosqlbench.adapter.opensearch.OpenSearchAdapter; -import io.nosqlbench.adapter.opensearch.OpenSearchSpace; +import io.nosqlbench.adapter.opensearch.AOSAdapter; +import io.nosqlbench.adapter.opensearch.AOSSpace; import io.nosqlbench.adapters.api.activityimpl.BaseOpDispenser; import io.nosqlbench.adapters.api.activityimpl.uniform.flowtypes.Op; import io.nosqlbench.adapters.api.templating.ParsedOp; @@ -25,12 +25,12 @@ import org.opensearch.client.opensearch.OpenSearchClient; import java.util.function.LongFunction; -public abstract class BaseOpenSearchOpDispenser extends BaseOpDispenser { - protected final LongFunction spaceF; +public abstract class AOSBaseOpDispenser extends BaseOpDispenser { + protected final LongFunction spaceF; protected final LongFunction clientF; private final LongFunction opF; - protected BaseOpenSearchOpDispenser(OpenSearchAdapter adapter, ParsedOp op, LongFunction targetF) { + protected AOSBaseOpDispenser(AOSAdapter adapter, ParsedOp op, LongFunction targetF) { super(adapter, op); this.spaceF =adapter.getSpaceFunc(op); this.clientF = (long l) -> this.spaceF.apply(l).getClient(); @@ -44,7 +44,7 @@ public abstract class BaseOpenSearchOpDispenser extends BaseOpDispenser targetF) { + public AOSBulkOpDispenser(AOSAdapter adapter, ParsedOp op, LongFunction targetF) { super(adapter, op, targetF); } @@ -55,8 +40,8 @@ public class BulkOpDispenser extends BaseOpenSearchOpDispenser { ParsedOp op, LongFunction targetF ) { - LongFunction func = OpenSearchRequests.bulk(op,targetF); - return l -> new BulkOp(clientF.apply(l), func.apply(l)); + LongFunction func = AOSRequests.bulk(op,targetF); + return l -> new AOSBulkOp(clientF.apply(l), func.apply(l)); } } diff --git a/adapter-aws-opensearch/src/main/java/io/nosqlbench/adapter/opensearch/dispensers/BulkOpTypes.java b/adapter-aws-opensearch/src/main/java/io/nosqlbench/adapter/opensearch/dispensers/AOSBulkOpTypes.java similarity index 96% rename from adapter-aws-opensearch/src/main/java/io/nosqlbench/adapter/opensearch/dispensers/BulkOpTypes.java rename to adapter-aws-opensearch/src/main/java/io/nosqlbench/adapter/opensearch/dispensers/AOSBulkOpTypes.java index 06b17b048..25a0fea52 100644 --- a/adapter-aws-opensearch/src/main/java/io/nosqlbench/adapter/opensearch/dispensers/BulkOpTypes.java +++ b/adapter-aws-opensearch/src/main/java/io/nosqlbench/adapter/opensearch/dispensers/AOSBulkOpTypes.java @@ -16,7 +16,7 @@ package io.nosqlbench.adapter.opensearch.dispensers; -public enum BulkOpTypes { +public enum AOSBulkOpTypes { create, index, delete, diff --git a/adapter-aws-opensearch/src/main/java/io/nosqlbench/adapter/opensearch/dispensers/CreateIndexOpDispenser.java b/adapter-aws-opensearch/src/main/java/io/nosqlbench/adapter/opensearch/dispensers/AOSCreateIndexOpDispenser.java similarity index 84% rename from adapter-aws-opensearch/src/main/java/io/nosqlbench/adapter/opensearch/dispensers/CreateIndexOpDispenser.java rename to adapter-aws-opensearch/src/main/java/io/nosqlbench/adapter/opensearch/dispensers/AOSCreateIndexOpDispenser.java index 0768f2d24..bdf528a0f 100644 --- a/adapter-aws-opensearch/src/main/java/io/nosqlbench/adapter/opensearch/dispensers/CreateIndexOpDispenser.java +++ b/adapter-aws-opensearch/src/main/java/io/nosqlbench/adapter/opensearch/dispensers/AOSCreateIndexOpDispenser.java @@ -16,26 +16,25 @@ package io.nosqlbench.adapter.opensearch.dispensers; -import io.nosqlbench.adapter.opensearch.OpenSearchAdapter; -import io.nosqlbench.adapter.opensearch.ops.CreateIndexOp; +import io.nosqlbench.adapter.opensearch.AOSAdapter; +import io.nosqlbench.adapter.opensearch.ops.AOSCreateIndexOp; import io.nosqlbench.adapters.api.templating.ParsedOp; import org.opensearch.client.json.JsonData; import org.opensearch.client.opensearch.OpenSearchClient; import org.opensearch.client.opensearch._types.mapping.*; import org.opensearch.client.opensearch.indices.CreateIndexRequest; -import org.opensearch.client.opensearch.indices.IndexSettings; import java.util.Map; import java.util.function.LongFunction; -public class CreateIndexOpDispenser extends BaseOpenSearchOpDispenser { +public class AOSCreateIndexOpDispenser extends AOSBaseOpDispenser { private final ParsedOp pop; private final int dimensions; private final int ef_construction; private final int m; - public CreateIndexOpDispenser(OpenSearchAdapter adapter, ParsedOp op, LongFunction targetF) { + public AOSCreateIndexOpDispenser(AOSAdapter adapter, ParsedOp op, LongFunction targetF) { super(adapter, op, targetF); this.pop = op; this.dimensions = pop.getStaticValue("dimensions",Integer.class).intValue(); @@ -44,8 +43,8 @@ public class CreateIndexOpDispenser extends BaseOpenSearchOpDispenser { } @Override - public LongFunction createOpFunc(LongFunction clientF, ParsedOp op, - LongFunction targetF) { + public LongFunction createOpFunc(LongFunction clientF, ParsedOp op, + LongFunction targetF) { CreateIndexRequest.Builder eb = new CreateIndexRequest.Builder(); LongFunction bfunc = l -> new CreateIndexRequest.Builder() @@ -54,7 +53,7 @@ public class CreateIndexOpDispenser extends BaseOpenSearchOpDispenser { bfunc = op.enhanceFunc(bfunc, "mappings", Map.class, this::resolveTypeMapping); LongFunction finalBfunc = bfunc; - return (long l) -> new CreateIndexOp(clientF.apply(l), finalBfunc.apply(l).build()); + return (long l) -> new AOSCreateIndexOp(clientF.apply(l), finalBfunc.apply(l).build()); } // https://opensearch.org/docs/latest/search-plugins/knn/knn-index/ diff --git a/adapter-aws-opensearch/src/main/java/io/nosqlbench/adapter/opensearch/dispensers/DeleteIndexOpDispenser.java b/adapter-aws-opensearch/src/main/java/io/nosqlbench/adapter/opensearch/dispensers/AOSDeleteIndexOpDispenser.java similarity index 68% rename from adapter-aws-opensearch/src/main/java/io/nosqlbench/adapter/opensearch/dispensers/DeleteIndexOpDispenser.java rename to adapter-aws-opensearch/src/main/java/io/nosqlbench/adapter/opensearch/dispensers/AOSDeleteIndexOpDispenser.java index f852e99aa..ddd0e50b7 100644 --- a/adapter-aws-opensearch/src/main/java/io/nosqlbench/adapter/opensearch/dispensers/DeleteIndexOpDispenser.java +++ b/adapter-aws-opensearch/src/main/java/io/nosqlbench/adapter/opensearch/dispensers/AOSDeleteIndexOpDispenser.java @@ -16,26 +16,25 @@ package io.nosqlbench.adapter.opensearch.dispensers; -import io.nosqlbench.adapter.opensearch.OpenSearchAdapter; -import io.nosqlbench.adapter.opensearch.ops.DeleteIndexOp; +import io.nosqlbench.adapter.opensearch.AOSAdapter; +import io.nosqlbench.adapter.opensearch.ops.AOSDeleteIndexOp; import io.nosqlbench.adapters.api.templating.ParsedOp; import org.opensearch.client.opensearch.OpenSearchClient; import org.opensearch.client.opensearch.indices.DeleteIndexRequest; -import java.util.Map; import java.util.function.LongFunction; -public class DeleteIndexOpDispenser extends BaseOpenSearchOpDispenser { +public class AOSDeleteIndexOpDispenser extends AOSBaseOpDispenser { - public DeleteIndexOpDispenser(OpenSearchAdapter adapter, ParsedOp op, LongFunction targetF) { + public AOSDeleteIndexOpDispenser(AOSAdapter adapter, ParsedOp op, LongFunction targetF) { super(adapter, op, targetF); } @Override - public LongFunction createOpFunc(LongFunction clientF, ParsedOp op, LongFunction targetF) { + public LongFunction createOpFunc(LongFunction clientF, ParsedOp op, LongFunction targetF) { DeleteIndexRequest.Builder eb = new DeleteIndexRequest.Builder(); LongFunction f = l -> new DeleteIndexRequest.Builder().index(targetF.apply(l)); - return l -> new DeleteIndexOp(clientF.apply(l),f.apply(1).build()); + return l -> new AOSDeleteIndexOp(clientF.apply(l),f.apply(1).build()); } } diff --git a/adapter-aws-opensearch/src/main/java/io/nosqlbench/adapter/opensearch/dispensers/DeleteOpDispenser.java b/adapter-aws-opensearch/src/main/java/io/nosqlbench/adapter/opensearch/dispensers/AOSDeleteOpDispenser.java similarity index 59% rename from adapter-aws-opensearch/src/main/java/io/nosqlbench/adapter/opensearch/dispensers/DeleteOpDispenser.java rename to adapter-aws-opensearch/src/main/java/io/nosqlbench/adapter/opensearch/dispensers/AOSDeleteOpDispenser.java index 4383a9459..f7fab2f23 100644 --- a/adapter-aws-opensearch/src/main/java/io/nosqlbench/adapter/opensearch/dispensers/DeleteOpDispenser.java +++ b/adapter-aws-opensearch/src/main/java/io/nosqlbench/adapter/opensearch/dispensers/AOSDeleteOpDispenser.java @@ -16,30 +16,25 @@ package io.nosqlbench.adapter.opensearch.dispensers; -import io.nosqlbench.adapter.opensearch.OpenSearchAdapter; -import io.nosqlbench.adapter.opensearch.ops.CreateIndexOp; -import io.nosqlbench.adapter.opensearch.ops.DeleteOp; +import io.nosqlbench.adapter.opensearch.AOSAdapter; +import io.nosqlbench.adapter.opensearch.ops.AOSDeleteOp; import io.nosqlbench.adapters.api.templating.ParsedOp; -import org.opensearch.client.json.JsonData; import org.opensearch.client.opensearch.OpenSearchClient; -import org.opensearch.client.opensearch._types.mapping.*; import org.opensearch.client.opensearch.core.DeleteRequest; -import org.opensearch.client.opensearch.indices.CreateIndexRequest; -import java.util.Map; import java.util.function.LongFunction; -public class DeleteOpDispenser extends BaseOpenSearchOpDispenser { +public class AOSDeleteOpDispenser extends AOSBaseOpDispenser { - public DeleteOpDispenser(OpenSearchAdapter adapter, ParsedOp op, LongFunction targetF) { + public AOSDeleteOpDispenser(AOSAdapter adapter, ParsedOp op, LongFunction targetF) { super(adapter, op, targetF); } @Override - public LongFunction createOpFunc(LongFunction clientF, ParsedOp op, LongFunction targetF) { + public LongFunction createOpFunc(LongFunction clientF, ParsedOp op, LongFunction targetF) { DeleteRequest.Builder eb = new DeleteRequest.Builder(); LongFunction bfunc = l -> new DeleteRequest.Builder().index(targetF.apply(l)); - return (long l) -> new DeleteOp(clientF.apply(l), bfunc.apply(l).build()); + return (long l) -> new AOSDeleteOp(clientF.apply(l), bfunc.apply(l).build()); } } diff --git a/adapter-aws-opensearch/src/main/java/io/nosqlbench/adapter/opensearch/dispensers/IndexOpDispenser.java b/adapter-aws-opensearch/src/main/java/io/nosqlbench/adapter/opensearch/dispensers/AOSIndexOpDispenser.java similarity index 74% rename from adapter-aws-opensearch/src/main/java/io/nosqlbench/adapter/opensearch/dispensers/IndexOpDispenser.java rename to adapter-aws-opensearch/src/main/java/io/nosqlbench/adapter/opensearch/dispensers/AOSIndexOpDispenser.java index d47cdae5e..e52a1354c 100644 --- a/adapter-aws-opensearch/src/main/java/io/nosqlbench/adapter/opensearch/dispensers/IndexOpDispenser.java +++ b/adapter-aws-opensearch/src/main/java/io/nosqlbench/adapter/opensearch/dispensers/AOSIndexOpDispenser.java @@ -18,8 +18,8 @@ package io.nosqlbench.adapter.opensearch.dispensers; import com.google.gson.Gson; import com.google.gson.GsonBuilder; -import io.nosqlbench.adapter.opensearch.OpenSearchAdapter; -import io.nosqlbench.adapter.opensearch.ops.IndexOp; +import io.nosqlbench.adapter.opensearch.AOSAdapter; +import io.nosqlbench.adapter.opensearch.ops.AOSIndexOp; import io.nosqlbench.adapters.api.activityimpl.uniform.flowtypes.Op; import io.nosqlbench.adapters.api.templating.ParsedOp; import org.apache.logging.log4j.LogManager; @@ -29,20 +29,20 @@ import org.opensearch.client.opensearch.core.IndexRequest; import java.util.function.LongFunction; -public class IndexOpDispenser extends BaseOpenSearchOpDispenser { - private final static Logger logger = LogManager.getLogger(IndexOpDispenser.class); +public class AOSIndexOpDispenser extends AOSBaseOpDispenser { + private final static Logger logger = LogManager.getLogger(AOSIndexOpDispenser.class); private static Gson gson = new GsonBuilder().setPrettyPrinting().create(); private final String diag; - public IndexOpDispenser(OpenSearchAdapter adapter, ParsedOp op, LongFunction targetF) { + public AOSIndexOpDispenser(AOSAdapter adapter, ParsedOp op, LongFunction targetF) { super(adapter, op, targetF); this.diag = op.getStaticConfigOr("daig","false"); } @Override public LongFunction createOpFunc(LongFunction clientF, ParsedOp op, LongFunction targetF) { - LongFunction irqF = OpenSearchRequests.index(op); - return l -> new IndexOp(clientF.apply(l), irqF.apply(l)); + LongFunction irqF = AOSRequests.index(op); + return l -> new AOSIndexOp(clientF.apply(l), irqF.apply(l)); } } diff --git a/adapter-aws-opensearch/src/main/java/io/nosqlbench/adapter/opensearch/dispensers/KnnSearchOpDispenser.java b/adapter-aws-opensearch/src/main/java/io/nosqlbench/adapter/opensearch/dispensers/AOSKnnSearchOpDispenser.java similarity index 88% rename from adapter-aws-opensearch/src/main/java/io/nosqlbench/adapter/opensearch/dispensers/KnnSearchOpDispenser.java rename to adapter-aws-opensearch/src/main/java/io/nosqlbench/adapter/opensearch/dispensers/AOSKnnSearchOpDispenser.java index c4a7b6dfe..60de1800c 100644 --- a/adapter-aws-opensearch/src/main/java/io/nosqlbench/adapter/opensearch/dispensers/KnnSearchOpDispenser.java +++ b/adapter-aws-opensearch/src/main/java/io/nosqlbench/adapter/opensearch/dispensers/AOSKnnSearchOpDispenser.java @@ -16,9 +16,8 @@ package io.nosqlbench.adapter.opensearch.dispensers; -import io.nosqlbench.adapter.opensearch.OpenSearchAdapter; -import io.nosqlbench.adapter.opensearch.ops.KnnSearchOp; -import io.nosqlbench.adapter.opensearch.pojos.Doc; +import io.nosqlbench.adapter.opensearch.AOSAdapter; +import io.nosqlbench.adapter.opensearch.ops.AOSKnnSearchOp; import io.nosqlbench.adapters.api.templating.ParsedOp; import org.opensearch.client.json.JsonData; import org.opensearch.client.opensearch.OpenSearchClient; @@ -32,10 +31,10 @@ import java.util.Map; import java.util.Optional; import java.util.function.LongFunction; -public class KnnSearchOpDispenser extends BaseOpenSearchOpDispenser { +public class AOSKnnSearchOpDispenser extends AOSBaseOpDispenser { private Class schemaClass; - public KnnSearchOpDispenser(OpenSearchAdapter adapter, ParsedOp op, LongFunction targetF) { + public AOSKnnSearchOpDispenser(AOSAdapter adapter, ParsedOp op, LongFunction targetF) { super(adapter, op, targetF); String schemaClassStr = op.getStaticConfigOr("schema", "io.nosqlbench.adapter.opensearch.pojos.Doc"); try { @@ -46,7 +45,7 @@ public class KnnSearchOpDispenser extends BaseOpenSearchOpDispenser { } @Override - public LongFunction createOpFunc(LongFunction clientF, ParsedOp op,LongFunction targetF) { + public LongFunction createOpFunc(LongFunction clientF, ParsedOp op, LongFunction targetF) { LongFunction knnfunc = l -> new KnnQuery.Builder(); knnfunc = op.enhanceFuncOptionally(knnfunc, "k",Integer.class, KnnQuery.Builder::k); knnfunc = op.enhanceFuncOptionally(knnfunc, "vector", List.class, this::convertVector); @@ -64,7 +63,7 @@ public class KnnSearchOpDispenser extends BaseOpenSearchOpDispenser { .index(targetF.apply(l)) .query(new Query.Builder().knn(finalKnnfunc.apply(l).build()).build()); - return (long l) -> new KnnSearchOp(clientF.apply(l), bfunc.apply(l).build(), schemaClass); + return (long l) -> new AOSKnnSearchOp(clientF.apply(l), bfunc.apply(l).build(), schemaClass); } private LongFunction buildFilterQuery(LongFunction mapLongFunction) { diff --git a/adapter-aws-opensearch/src/main/java/io/nosqlbench/adapter/opensearch/dispensers/OpenSearchRequests.java b/adapter-aws-opensearch/src/main/java/io/nosqlbench/adapter/opensearch/dispensers/AOSRequests.java similarity index 93% rename from adapter-aws-opensearch/src/main/java/io/nosqlbench/adapter/opensearch/dispensers/OpenSearchRequests.java rename to adapter-aws-opensearch/src/main/java/io/nosqlbench/adapter/opensearch/dispensers/AOSRequests.java index 5b3ac0d29..cf253f5f3 100644 --- a/adapter-aws-opensearch/src/main/java/io/nosqlbench/adapter/opensearch/dispensers/OpenSearchRequests.java +++ b/adapter-aws-opensearch/src/main/java/io/nosqlbench/adapter/opensearch/dispensers/AOSRequests.java @@ -35,13 +35,11 @@ import org.opensearch.client.opensearch.core.bulk.IndexOperation; import java.util.ArrayList; import java.util.List; -import java.util.Map; -import java.util.Optional; import java.util.function.LongFunction; -public class OpenSearchRequests { +public class AOSRequests { - private final static Logger logger = LogManager.getLogger(IndexOpDispenser.class); + private final static Logger logger = LogManager.getLogger(AOSIndexOpDispenser.class); private static Gson gson = new GsonBuilder().setPrettyPrinting().create(); public static LongFunction bulk(ParsedOp op, LongFunction targetF) { @@ -57,12 +55,12 @@ public class OpenSearchRequests { ParsedOp subop = op.getAsSubOp("op_template", ParsedOp.SubOpNaming.ParentAndSubKey); int repeat = subop.getStaticConfigOr("repeat", 1); - TypeAndTarget typeinfo = - subop.getTypeAndTarget(BulkOpTypes.class, String.class); + TypeAndTarget typeinfo = + subop.getTypeAndTarget(AOSBulkOpTypes.class, String.class); LongFunction bop = switch (typeinfo.enumId) { - case create -> OpenSearchRequests.createOperation(subop); - case index -> OpenSearchRequests.indexOperation(subop); + case create -> AOSRequests.createOperation(subop); + case index -> AOSRequests.indexOperation(subop); default -> throw new OpConfigError("Unsupported type in bulk operation: '" + typeinfo.enumId + "'"); }; @@ -116,7 +114,7 @@ public class OpenSearchRequests { func = op.enhanceFuncOptionally(func, "version", long.class, IndexRequest.Builder::version); func = op.enhanceEnumOptionally(func, "opType", OpType.class, IndexRequest.Builder::opType); func = op.enhanceEnumOptionally(func, "versionType", VersionType.class, IndexRequest.Builder::versionType); - func = op.enhanceFuncPivot(func, "document", Object.class, OpenSearchRequests::bindDocument); + func = op.enhanceFuncPivot(func, "document", Object.class, AOSRequests::bindDocument); LongFunction finalFunc1 = func; return l -> finalFunc1.apply(l).build(); } diff --git a/adapter-aws-opensearch/src/main/java/io/nosqlbench/adapter/opensearch/dispensers/UpdateOpDispenser.java b/adapter-aws-opensearch/src/main/java/io/nosqlbench/adapter/opensearch/dispensers/AOSUpdateOpDispenser.java similarity index 58% rename from adapter-aws-opensearch/src/main/java/io/nosqlbench/adapter/opensearch/dispensers/UpdateOpDispenser.java rename to adapter-aws-opensearch/src/main/java/io/nosqlbench/adapter/opensearch/dispensers/AOSUpdateOpDispenser.java index 39b8d6de0..5e51f7fee 100644 --- a/adapter-aws-opensearch/src/main/java/io/nosqlbench/adapter/opensearch/dispensers/UpdateOpDispenser.java +++ b/adapter-aws-opensearch/src/main/java/io/nosqlbench/adapter/opensearch/dispensers/AOSUpdateOpDispenser.java @@ -16,30 +16,25 @@ package io.nosqlbench.adapter.opensearch.dispensers; -import io.nosqlbench.adapter.opensearch.OpenSearchAdapter; -import io.nosqlbench.adapter.opensearch.ops.CreateIndexOp; -import io.nosqlbench.adapter.opensearch.ops.UpdateOp; +import io.nosqlbench.adapter.opensearch.AOSAdapter; +import io.nosqlbench.adapter.opensearch.ops.AOSUpdateOp; import io.nosqlbench.adapters.api.templating.ParsedOp; -import org.opensearch.client.json.JsonData; import org.opensearch.client.opensearch.OpenSearchClient; -import org.opensearch.client.opensearch._types.mapping.*; import org.opensearch.client.opensearch.core.UpdateRequest; -import org.opensearch.client.opensearch.indices.CreateIndexRequest; -import java.util.Map; import java.util.function.LongFunction; -public class UpdateOpDispenser extends BaseOpenSearchOpDispenser { +public class AOSUpdateOpDispenser extends AOSBaseOpDispenser { - public UpdateOpDispenser(OpenSearchAdapter adapter, ParsedOp op, LongFunction targetF) { + public AOSUpdateOpDispenser(AOSAdapter adapter, ParsedOp op, LongFunction targetF) { super(adapter, op, targetF); } @Override - public LongFunction createOpFunc(LongFunction clientF, ParsedOp op, LongFunction targetF) { + public LongFunction createOpFunc(LongFunction clientF, ParsedOp op, LongFunction targetF) { LongFunction bfunc = l -> new UpdateRequest.Builder().index(targetF.apply(l)); // TODO: add details here - return l -> new UpdateOp(clientF.apply(l),bfunc.apply(l).build(),Object.class); + return l -> new AOSUpdateOp(clientF.apply(l),bfunc.apply(l).build(),Object.class); } } diff --git a/adapter-aws-opensearch/src/main/java/io/nosqlbench/adapter/opensearch/ops/BaseOpenSearchOp.java b/adapter-aws-opensearch/src/main/java/io/nosqlbench/adapter/opensearch/ops/AOSBaseOp.java similarity index 91% rename from adapter-aws-opensearch/src/main/java/io/nosqlbench/adapter/opensearch/ops/BaseOpenSearchOp.java rename to adapter-aws-opensearch/src/main/java/io/nosqlbench/adapter/opensearch/ops/AOSBaseOp.java index a54c2e67e..1e265f286 100644 --- a/adapter-aws-opensearch/src/main/java/io/nosqlbench/adapter/opensearch/ops/BaseOpenSearchOp.java +++ b/adapter-aws-opensearch/src/main/java/io/nosqlbench/adapter/opensearch/ops/AOSBaseOp.java @@ -19,10 +19,10 @@ package io.nosqlbench.adapter.opensearch.ops; import io.nosqlbench.adapters.api.activityimpl.uniform.flowtypes.CycleOp; import org.opensearch.client.opensearch.OpenSearchClient; -public abstract class BaseOpenSearchOp implements CycleOp { +public abstract class AOSBaseOp implements CycleOp { protected final OpenSearchClient client; - public BaseOpenSearchOp(OpenSearchClient client) { + public AOSBaseOp(OpenSearchClient client) { this.client = client; } diff --git a/adapter-aws-opensearch/src/main/java/io/nosqlbench/adapter/opensearch/ops/BulkOp.java b/adapter-aws-opensearch/src/main/java/io/nosqlbench/adapter/opensearch/ops/AOSBulkOp.java similarity index 74% rename from adapter-aws-opensearch/src/main/java/io/nosqlbench/adapter/opensearch/ops/BulkOp.java rename to adapter-aws-opensearch/src/main/java/io/nosqlbench/adapter/opensearch/ops/AOSBulkOp.java index 50db60124..b814c8cd4 100644 --- a/adapter-aws-opensearch/src/main/java/io/nosqlbench/adapter/opensearch/ops/BulkOp.java +++ b/adapter-aws-opensearch/src/main/java/io/nosqlbench/adapter/opensearch/ops/AOSBulkOp.java @@ -20,17 +20,14 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.opensearch.client.opensearch.OpenSearchClient; import org.opensearch.client.opensearch.core.BulkRequest; -import org.opensearch.client.opensearch.core.BulkResponse; -import org.opensearch.client.opensearch.core.IndexRequest; -import org.opensearch.client.opensearch.core.IndexResponse; import java.io.IOException; -public class BulkOp extends BaseOpenSearchOp { - private final static Logger logger = LogManager.getLogger(BulkOp.class); +public class AOSBulkOp extends AOSBaseOp { + private final static Logger logger = LogManager.getLogger(AOSBulkOp.class); private final BulkRequest rq; - public BulkOp(OpenSearchClient client, BulkRequest rq) { + public AOSBulkOp(OpenSearchClient client, BulkRequest rq) { super(client); this.rq = rq; } diff --git a/adapter-aws-opensearch/src/main/java/io/nosqlbench/adapter/opensearch/ops/CreateIndexOp.java b/adapter-aws-opensearch/src/main/java/io/nosqlbench/adapter/opensearch/ops/AOSCreateIndexOp.java similarity index 89% rename from adapter-aws-opensearch/src/main/java/io/nosqlbench/adapter/opensearch/ops/CreateIndexOp.java rename to adapter-aws-opensearch/src/main/java/io/nosqlbench/adapter/opensearch/ops/AOSCreateIndexOp.java index b8aebb9ba..36f6710f4 100644 --- a/adapter-aws-opensearch/src/main/java/io/nosqlbench/adapter/opensearch/ops/CreateIndexOp.java +++ b/adapter-aws-opensearch/src/main/java/io/nosqlbench/adapter/opensearch/ops/AOSCreateIndexOp.java @@ -20,10 +20,10 @@ import org.opensearch.client.opensearch.OpenSearchClient; import org.opensearch.client.opensearch.indices.CreateIndexRequest; import org.opensearch.client.opensearch.indices.CreateIndexResponse; -public class CreateIndexOp extends BaseOpenSearchOp { +public class AOSCreateIndexOp extends AOSBaseOp { private final CreateIndexRequest rq; - public CreateIndexOp(OpenSearchClient client, CreateIndexRequest rq) { + public AOSCreateIndexOp(OpenSearchClient client, CreateIndexRequest rq) { super(client); this.rq = rq; } diff --git a/adapter-aws-opensearch/src/main/java/io/nosqlbench/adapter/opensearch/ops/DeleteIndexOp.java b/adapter-aws-opensearch/src/main/java/io/nosqlbench/adapter/opensearch/ops/AOSDeleteIndexOp.java similarity index 83% rename from adapter-aws-opensearch/src/main/java/io/nosqlbench/adapter/opensearch/ops/DeleteIndexOp.java rename to adapter-aws-opensearch/src/main/java/io/nosqlbench/adapter/opensearch/ops/AOSDeleteIndexOp.java index 2ed88188c..ea64cb21d 100644 --- a/adapter-aws-opensearch/src/main/java/io/nosqlbench/adapter/opensearch/ops/DeleteIndexOp.java +++ b/adapter-aws-opensearch/src/main/java/io/nosqlbench/adapter/opensearch/ops/AOSDeleteIndexOp.java @@ -17,15 +17,14 @@ package io.nosqlbench.adapter.opensearch.ops; import org.opensearch.client.opensearch.OpenSearchClient; -import org.opensearch.client.opensearch.indices.CreateIndexRequest; import org.opensearch.client.opensearch.indices.DeleteIndexRequest; import java.io.IOException; -public class DeleteIndexOp extends BaseOpenSearchOp { +public class AOSDeleteIndexOp extends AOSBaseOp { private final DeleteIndexRequest rq; - public DeleteIndexOp(OpenSearchClient client, DeleteIndexRequest rq) { + public AOSDeleteIndexOp(OpenSearchClient client, DeleteIndexRequest rq) { super(client); this.rq = rq; } diff --git a/adapter-aws-opensearch/src/main/java/io/nosqlbench/adapter/opensearch/ops/DeleteOp.java b/adapter-aws-opensearch/src/main/java/io/nosqlbench/adapter/opensearch/ops/AOSDeleteOp.java similarity index 85% rename from adapter-aws-opensearch/src/main/java/io/nosqlbench/adapter/opensearch/ops/DeleteOp.java rename to adapter-aws-opensearch/src/main/java/io/nosqlbench/adapter/opensearch/ops/AOSDeleteOp.java index 3d50ee44f..e501cc4e1 100644 --- a/adapter-aws-opensearch/src/main/java/io/nosqlbench/adapter/opensearch/ops/DeleteOp.java +++ b/adapter-aws-opensearch/src/main/java/io/nosqlbench/adapter/opensearch/ops/AOSDeleteOp.java @@ -18,14 +18,13 @@ package io.nosqlbench.adapter.opensearch.ops; import org.opensearch.client.opensearch.OpenSearchClient; import org.opensearch.client.opensearch.core.DeleteRequest; -import org.opensearch.client.opensearch.core.IndexRequest; import java.io.IOException; -public class DeleteOp extends BaseOpenSearchOp { +public class AOSDeleteOp extends AOSBaseOp { private final DeleteRequest rq; - public DeleteOp(OpenSearchClient client, DeleteRequest rq) { + public AOSDeleteOp(OpenSearchClient client, DeleteRequest rq) { super(client); this.rq = rq; } diff --git a/adapter-aws-opensearch/src/main/java/io/nosqlbench/adapter/opensearch/ops/IndexOp.java b/adapter-aws-opensearch/src/main/java/io/nosqlbench/adapter/opensearch/ops/AOSIndexOp.java similarity index 82% rename from adapter-aws-opensearch/src/main/java/io/nosqlbench/adapter/opensearch/ops/IndexOp.java rename to adapter-aws-opensearch/src/main/java/io/nosqlbench/adapter/opensearch/ops/AOSIndexOp.java index a8f0d56bc..6adc6723c 100644 --- a/adapter-aws-opensearch/src/main/java/io/nosqlbench/adapter/opensearch/ops/IndexOp.java +++ b/adapter-aws-opensearch/src/main/java/io/nosqlbench/adapter/opensearch/ops/AOSIndexOp.java @@ -21,15 +21,14 @@ import org.apache.logging.log4j.Logger; import org.opensearch.client.opensearch.OpenSearchClient; import org.opensearch.client.opensearch.core.IndexRequest; import org.opensearch.client.opensearch.core.IndexResponse; -import org.opensearch.client.opensearch.core.UpdateRequest; import java.io.IOException; -public class IndexOp extends BaseOpenSearchOp { - private final static Logger logger = LogManager.getLogger(IndexOp.class); +public class AOSIndexOp extends AOSBaseOp { + private final static Logger logger = LogManager.getLogger(AOSIndexOp.class); private final IndexRequest rq; - public IndexOp(OpenSearchClient client, IndexRequest rq) { + public AOSIndexOp(OpenSearchClient client, IndexRequest rq) { super(client); this.rq = rq; } diff --git a/adapter-aws-opensearch/src/main/java/io/nosqlbench/adapter/opensearch/ops/KnnSearchOp.java b/adapter-aws-opensearch/src/main/java/io/nosqlbench/adapter/opensearch/ops/AOSKnnSearchOp.java similarity index 81% rename from adapter-aws-opensearch/src/main/java/io/nosqlbench/adapter/opensearch/ops/KnnSearchOp.java rename to adapter-aws-opensearch/src/main/java/io/nosqlbench/adapter/opensearch/ops/AOSKnnSearchOp.java index 16ca011e0..39c6dcd15 100644 --- a/adapter-aws-opensearch/src/main/java/io/nosqlbench/adapter/opensearch/ops/KnnSearchOp.java +++ b/adapter-aws-opensearch/src/main/java/io/nosqlbench/adapter/opensearch/ops/AOSKnnSearchOp.java @@ -19,14 +19,12 @@ package io.nosqlbench.adapter.opensearch.ops; import org.opensearch.client.opensearch.OpenSearchClient; import org.opensearch.client.opensearch.core.SearchRequest; import org.opensearch.client.opensearch.core.SearchResponse; -import org.opensearch.client.opensearch.indices.GetIndexRequest; -import org.opensearch.client.opensearch.indices.GetIndexResponse; -public class KnnSearchOp extends BaseOpenSearchOp { +public class AOSKnnSearchOp extends AOSBaseOp { private final SearchRequest rq; private final Class doctype; - public KnnSearchOp(OpenSearchClient client, SearchRequest rq, Class doctype) { + public AOSKnnSearchOp(OpenSearchClient client, SearchRequest rq, Class doctype) { super(client); this.rq = rq; this.doctype = doctype; diff --git a/adapter-aws-opensearch/src/main/java/io/nosqlbench/adapter/opensearch/ops/UpdateOp.java b/adapter-aws-opensearch/src/main/java/io/nosqlbench/adapter/opensearch/ops/AOSUpdateOp.java similarity index 89% rename from adapter-aws-opensearch/src/main/java/io/nosqlbench/adapter/opensearch/ops/UpdateOp.java rename to adapter-aws-opensearch/src/main/java/io/nosqlbench/adapter/opensearch/ops/AOSUpdateOp.java index 4b2e18534..a271e0e86 100644 --- a/adapter-aws-opensearch/src/main/java/io/nosqlbench/adapter/opensearch/ops/UpdateOp.java +++ b/adapter-aws-opensearch/src/main/java/io/nosqlbench/adapter/opensearch/ops/AOSUpdateOp.java @@ -21,11 +21,11 @@ import org.opensearch.client.opensearch.core.UpdateRequest; import java.io.IOException; -public class UpdateOp extends BaseOpenSearchOp { +public class AOSUpdateOp extends AOSBaseOp { private final UpdateRequest rq; private final Class doctype; - public UpdateOp(OpenSearchClient client, UpdateRequest rq, Class doctype) { + public AOSUpdateOp(OpenSearchClient client, UpdateRequest rq, Class doctype) { super(client); this.rq = rq; this.doctype = doctype; diff --git a/adapter-cqld4/pom.xml b/adapter-cqld4/pom.xml index 145210a05..953be5201 100644 --- a/adapter-cqld4/pom.xml +++ b/adapter-cqld4/pom.xml @@ -98,12 +98,12 @@ com.fasterxml.jackson.core jackson-databind - 2.16.1 + 2.17.0 com.fasterxml.jackson.core jackson-core - 2.16.1 + 2.17.0 diff --git a/adapter-cqld4/src/main/java/io/nosqlbench/adapter/cqld4/opdispensers/CqlD4BatchStmtDispenser.java b/adapter-cqld4/src/main/java/io/nosqlbench/adapter/cqld4/opdispensers/CqlD4BatchStmtDispenser.java new file mode 100644 index 000000000..ea33ec599 --- /dev/null +++ b/adapter-cqld4/src/main/java/io/nosqlbench/adapter/cqld4/opdispensers/CqlD4BatchStmtDispenser.java @@ -0,0 +1,97 @@ +/* + * Copyright (c) 2024 nosqlbench + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package io.nosqlbench.adapter.cqld4.opdispensers; + +import com.datastax.oss.driver.api.core.CqlSession; +import com.datastax.oss.driver.api.core.cql.*; +import io.nosqlbench.adapter.cqld4.optionhelpers.BatchTypeEnum; +import io.nosqlbench.adapter.cqld4.optypes.Cqld4CqlBatchStatement; +import io.nosqlbench.adapter.cqld4.optypes.Cqld4CqlOp; +import io.nosqlbench.adapters.api.activityimpl.OpDispenser; +import io.nosqlbench.adapters.api.activityimpl.OpMapper; +import io.nosqlbench.adapters.api.activityimpl.uniform.DriverAdapter; +import io.nosqlbench.adapters.api.templating.ParsedOp; +import org.jetbrains.annotations.NotNull; + +import java.util.function.LongFunction; + +public class CqlD4BatchStmtDispenser extends Cqld4BaseOpDispenser { + private final int repeat; + private final ParsedOp subop; + private final OpMapper submapper; + private LongFunction opfunc; + + public CqlD4BatchStmtDispenser( + DriverAdapter adapter, + LongFunction sessionFunc, + ParsedOp op, + int repeat, + ParsedOp subop, + OpDispenser subopDispenser + ) { + super(adapter, sessionFunc, op); + this.repeat = repeat; + this.subop = subop; + this.opfunc = createStmtFunc(op, subopDispenser); + this.submapper = adapter.getOpMapper(); + subopDispenser = submapper.apply(subop); + + } + + private LongFunction createStmtFunc(ParsedOp topOp, OpDispenser subopDispenser) { + Cqld4CqlOp exampleOp = subopDispenser.apply(0L); + Statement example = exampleOp.getStmt(); + if (!(example instanceof BatchableStatement b)) { + throw new RuntimeException("Statement type '" + example.getClass().getCanonicalName() + " is not " + + "batchable. query=" + exampleOp.getQueryString()); + } + BatchTypeEnum bte = topOp.getEnumFromFieldOr(BatchTypeEnum.class, BatchTypeEnum.unlogged, "batchtype"); + LongFunction bsbf = l -> new BatchStatementBuilder(bte.batchtype); + LongFunction bsf = getBatchAccumulator(bsbf, subopDispenser); + bsf = getEnhancedStmtFunc(bsf,topOp); + return bsf; + } + + @NotNull + private LongFunction getBatchAccumulator(LongFunction bsb, OpDispenser subopDispenser) { + LongFunction f = l -> { + BatchStatementBuilder bsa = bsb.apply(l); + for (int i = 0; i < repeat; i++) { + Cqld4CqlOp op = subopDispenser.apply(i+l); + BatchableStatement stmt = (BatchableStatement) op.getStmt(); + bsa= bsa.addStatement(stmt); + } + return bsa; + }; + + LongFunction bsf = (long l) -> f.apply(l).build(); + return bsf; + } + + @Override + public Cqld4CqlOp getOp(long value) { + Statement bstmt = opfunc.apply(value); + return new Cqld4CqlBatchStatement( + getSessionFunc().apply(value), + (BatchStatement) bstmt, + getMaxPages(), + getMaxLwtRetries(), + isRetryReplace(), + this + ); + } +} diff --git a/adapter-cqld4/src/main/java/io/nosqlbench/adapter/cqld4/opdispensers/CqlD4RainbowTableDispenser.java b/adapter-cqld4/src/main/java/io/nosqlbench/adapter/cqld4/opdispensers/CqlD4RainbowTableDispenser.java index f5b6aa735..41e05e134 100644 --- a/adapter-cqld4/src/main/java/io/nosqlbench/adapter/cqld4/opdispensers/CqlD4RainbowTableDispenser.java +++ b/adapter-cqld4/src/main/java/io/nosqlbench/adapter/cqld4/opdispensers/CqlD4RainbowTableDispenser.java @@ -35,7 +35,7 @@ public class CqlD4RainbowTableDispenser extends Cqld4BaseOpDispenser { } @Override - public Cqld4CqlOp apply(long cycle) { + public Cqld4CqlOp getOp(long cycle) { throw new RuntimeException("implement me"); // return new Cqld4RainbowTableOp( // getSessionFunc().apply(value), diff --git a/adapter-cqld4/src/main/java/io/nosqlbench/adapter/cqld4/opdispensers/Cqld4FluentGraphOpDispenser.java b/adapter-cqld4/src/main/java/io/nosqlbench/adapter/cqld4/opdispensers/Cqld4FluentGraphOpDispenser.java index c1d3c9378..145cd07cb 100644 --- a/adapter-cqld4/src/main/java/io/nosqlbench/adapter/cqld4/opdispensers/Cqld4FluentGraphOpDispenser.java +++ b/adapter-cqld4/src/main/java/io/nosqlbench/adapter/cqld4/opdispensers/Cqld4FluentGraphOpDispenser.java @@ -57,7 +57,7 @@ public class Cqld4FluentGraphOpDispenser extends BaseOpDispenser } @Override - public Op apply(long value) { + public Op getOp(long value) { String graphname = graphnameFunc.apply(value); Script script = tlScript.get(); Map allMap = virtdataBindings.getAllMap(value); diff --git a/adapter-cqld4/src/main/java/io/nosqlbench/adapter/cqld4/opdispensers/Cqld4GremlinOpDispenser.java b/adapter-cqld4/src/main/java/io/nosqlbench/adapter/cqld4/opdispensers/Cqld4GremlinOpDispenser.java index 44799e2f0..8f3c74686 100644 --- a/adapter-cqld4/src/main/java/io/nosqlbench/adapter/cqld4/opdispensers/Cqld4GremlinOpDispenser.java +++ b/adapter-cqld4/src/main/java/io/nosqlbench/adapter/cqld4/opdispensers/Cqld4GremlinOpDispenser.java @@ -55,7 +55,7 @@ public class Cqld4GremlinOpDispenser extends BaseOpDispenser0L) { System.out.println("## GREMLIN DIAG: ScriptGraphStatement on graphname(" + stmt.getGraphName() + "):\n" + stmt.getScript()); diff --git a/adapter-cqld4/src/main/java/io/nosqlbench/adapter/cqld4/opdispensers/Cqld4PreparedStmtDispenser.java b/adapter-cqld4/src/main/java/io/nosqlbench/adapter/cqld4/opdispensers/Cqld4PreparedStmtDispenser.java index c0684c7eb..acdfa6b65 100644 --- a/adapter-cqld4/src/main/java/io/nosqlbench/adapter/cqld4/opdispensers/Cqld4PreparedStmtDispenser.java +++ b/adapter-cqld4/src/main/java/io/nosqlbench/adapter/cqld4/opdispensers/Cqld4PreparedStmtDispenser.java @@ -85,7 +85,7 @@ public class Cqld4PreparedStmtDispenser extends Cqld4BaseOpDispenser { } @Override - public Cqld4CqlOp apply(long cycle) { + public Cqld4CqlOp getOp(long cycle) { BoundStatement boundStatement; try { diff --git a/adapter-cqld4/src/main/java/io/nosqlbench/adapter/cqld4/opdispensers/Cqld4RawStmtDispenser.java b/adapter-cqld4/src/main/java/io/nosqlbench/adapter/cqld4/opdispensers/Cqld4RawStmtDispenser.java index bafa43270..9c5139c0f 100644 --- a/adapter-cqld4/src/main/java/io/nosqlbench/adapter/cqld4/opdispensers/Cqld4RawStmtDispenser.java +++ b/adapter-cqld4/src/main/java/io/nosqlbench/adapter/cqld4/opdispensers/Cqld4RawStmtDispenser.java @@ -44,7 +44,7 @@ public class Cqld4RawStmtDispenser extends Cqld4BaseOpDispenser { } @Override - public Cqld4CqlOp apply(long value) { + public Cqld4CqlOp getOp(long value) { return new Cqld4CqlSimpleStatement( getSessionFunc().apply(value), (SimpleStatement) stmtFunc.apply(value), diff --git a/adapter-cqld4/src/main/java/io/nosqlbench/adapter/cqld4/opdispensers/Cqld4SimpleCqlStmtDispenser.java b/adapter-cqld4/src/main/java/io/nosqlbench/adapter/cqld4/opdispensers/Cqld4SimpleCqlStmtDispenser.java index 3d4ad02f7..c4c3fbfea 100644 --- a/adapter-cqld4/src/main/java/io/nosqlbench/adapter/cqld4/opdispensers/Cqld4SimpleCqlStmtDispenser.java +++ b/adapter-cqld4/src/main/java/io/nosqlbench/adapter/cqld4/opdispensers/Cqld4SimpleCqlStmtDispenser.java @@ -41,7 +41,7 @@ public class Cqld4SimpleCqlStmtDispenser extends Cqld4BaseOpDispenser { } @Override - public Cqld4CqlSimpleStatement apply(long value) { + public Cqld4CqlSimpleStatement getOp(long value) { return new Cqld4CqlSimpleStatement( getSessionFunc().apply(value), (SimpleStatement) stmtFunc.apply(value), diff --git a/adapter-cqld4/src/main/java/io/nosqlbench/adapter/cqld4/opdispensers/Cqld4SsTableDispenser.java b/adapter-cqld4/src/main/java/io/nosqlbench/adapter/cqld4/opdispensers/Cqld4SsTableDispenser.java index 2967ca3e8..2d2f2b5a3 100644 --- a/adapter-cqld4/src/main/java/io/nosqlbench/adapter/cqld4/opdispensers/Cqld4SsTableDispenser.java +++ b/adapter-cqld4/src/main/java/io/nosqlbench/adapter/cqld4/opdispensers/Cqld4SsTableDispenser.java @@ -40,7 +40,7 @@ public class Cqld4SsTableDispenser extends Cqld4BaseOpDispenser { } @Override - public Cqld4CqlOp apply(long cycle) { + public Cqld4CqlOp getOp(long cycle) { // return new CqlD4SsTable( // getSessionFunc().apply(value), // (SsTable) stmtFunc.apply(value), diff --git a/adapter-cqld4/src/main/java/io/nosqlbench/adapter/cqld4/opmappers/CqlD4BatchStmtMapper.java b/adapter-cqld4/src/main/java/io/nosqlbench/adapter/cqld4/opmappers/CqlD4BatchStmtMapper.java new file mode 100644 index 000000000..e20f153a8 --- /dev/null +++ b/adapter-cqld4/src/main/java/io/nosqlbench/adapter/cqld4/opmappers/CqlD4BatchStmtMapper.java @@ -0,0 +1,59 @@ +/* + * Copyright (c) 2022-2023 nosqlbench + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package io.nosqlbench.adapter.cqld4.opmappers; + +import com.datastax.oss.driver.api.core.CqlSession; +import io.nosqlbench.adapter.cqld4.opdispensers.CqlD4BatchStmtDispenser; +import io.nosqlbench.adapter.cqld4.optypes.Cqld4CqlBatchStatement; +import io.nosqlbench.adapter.cqld4.optypes.Cqld4CqlOp; +import io.nosqlbench.adapters.api.activityimpl.OpDispenser; +import io.nosqlbench.adapters.api.activityimpl.OpMapper; +import io.nosqlbench.adapters.api.activityimpl.uniform.DriverAdapter; +import io.nosqlbench.adapters.api.templating.ParsedOp; +import io.nosqlbench.engine.api.templating.TypeAndTarget; + +import java.util.function.LongFunction; + +public class CqlD4BatchStmtMapper implements OpMapper { + + private final LongFunction sessionFunc; + private final TypeAndTarget target; + private final DriverAdapter adapter; + + + public CqlD4BatchStmtMapper(DriverAdapter adapter, LongFunction sessionFunc, TypeAndTarget target) { + this.sessionFunc=sessionFunc; + this.target = target; + this.adapter = adapter; + } + + /** + * TODO: Make this not require a sub-op element for "uniform batches", + * but allow a sub-op sequence for custom batches. + * @param op the function argument + * @return + */ + public OpDispenser apply(ParsedOp op) { + + ParsedOp subop = op.getAsSubOp("op_template", ParsedOp.SubOpNaming.ParentAndSubKey); + int repeat = op.getStaticValue("repeat"); + OpMapper subopMapper = adapter.getOpMapper(); + OpDispenser subopDispenser = subopMapper.apply(subop); + return new CqlD4BatchStmtDispenser(adapter, sessionFunc, op,repeat, subop, subopDispenser); + + } +} diff --git a/adapter-cqld4/src/main/java/io/nosqlbench/adapter/cqld4/opmappers/CqlD4OpType.java b/adapter-cqld4/src/main/java/io/nosqlbench/adapter/cqld4/opmappers/CqlD4OpType.java index 828f8ea1e..b37a986cf 100644 --- a/adapter-cqld4/src/main/java/io/nosqlbench/adapter/cqld4/opmappers/CqlD4OpType.java +++ b/adapter-cqld4/src/main/java/io/nosqlbench/adapter/cqld4/opmappers/CqlD4OpType.java @@ -43,6 +43,14 @@ public enum CqlD4OpType { */ prepared, + /** + * Allows for a statement template to be used to create a batch statement. + * The fields 'op_template', and 'repeat' are required, and all fields below + * the op_template field are a nested version of the other op types here, but + * supports only the simple and prepared forms for historic compatibility reasons. + */ + batch, + /** * uses {@link com.datastax.dse.driver.api.core.graph.ScriptGraphStatement} * This is the "raw" mode of using gremlin. It is not as efficient, and thus diff --git a/adapter-cqld4/src/main/java/io/nosqlbench/adapter/cqld4/opmappers/Cqld4CoreOpMapper.java b/adapter-cqld4/src/main/java/io/nosqlbench/adapter/cqld4/opmappers/Cqld4CoreOpMapper.java index df8c1854f..a8f87abbd 100644 --- a/adapter-cqld4/src/main/java/io/nosqlbench/adapter/cqld4/opmappers/Cqld4CoreOpMapper.java +++ b/adapter-cqld4/src/main/java/io/nosqlbench/adapter/cqld4/opmappers/Cqld4CoreOpMapper.java @@ -72,6 +72,7 @@ public class Cqld4CoreOpMapper implements OpMapper { case raw -> new CqlD4RawStmtMapper(adapter, sessionFunc, target.targetFunction).apply(op); case simple -> new CqlD4CqlSimpleStmtMapper(adapter, sessionFunc, target.targetFunction).apply(op); case prepared -> new CqlD4PreparedStmtMapper(adapter, sessionFunc, target).apply(op); + case batch -> new CqlD4BatchStmtMapper(adapter, sessionFunc, target).apply(op); case gremlin -> new Cqld4GremlinOpMapper(adapter, sessionFunc, target.targetFunction).apply(op); case fluent -> new Cqld4FluentGraphOpMapper(adapter, sessionFunc, target).apply(op); case rainbow -> new CqlD4RainbowTableMapper(adapter, sessionFunc, target.targetFunction).apply(op); diff --git a/adapter-cqld4/src/main/java/io/nosqlbench/adapter/cqld4/optionhelpers/BatchTypeEnum.java b/adapter-cqld4/src/main/java/io/nosqlbench/adapter/cqld4/optionhelpers/BatchTypeEnum.java new file mode 100644 index 000000000..063e69ad2 --- /dev/null +++ b/adapter-cqld4/src/main/java/io/nosqlbench/adapter/cqld4/optionhelpers/BatchTypeEnum.java @@ -0,0 +1,32 @@ +/* + * Copyright (c) 2024 nosqlbench + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package io.nosqlbench.adapter.cqld4.optionhelpers; + +import com.datastax.oss.driver.api.core.cql.BatchType; + +public enum BatchTypeEnum { + + logged(BatchType.LOGGED), + unlogged(BatchType.UNLOGGED), + counter(BatchType.COUNTER); + + public final BatchType batchtype; + + BatchTypeEnum(BatchType batchtype) { + this.batchtype = batchtype; + } +} diff --git a/adapter-cqld4/src/main/java/io/nosqlbench/datamappers/functions/to_cqlvector/LoadCqlVector.java b/adapter-cqld4/src/main/java/io/nosqlbench/datamappers/functions/to_cqlvector/LoadCqlVector.java new file mode 100644 index 000000000..ead1868e6 --- /dev/null +++ b/adapter-cqld4/src/main/java/io/nosqlbench/datamappers/functions/to_cqlvector/LoadCqlVector.java @@ -0,0 +1,80 @@ +/* + * Copyright (c) 2022 nosqlbench + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package io.nosqlbench.datamappers.functions.to_cqlvector; + +import com.datastax.oss.driver.api.core.data.CqlVector; +import io.nosqlbench.virtdata.api.annotations.Categories; +import io.nosqlbench.virtdata.api.annotations.Category; +import io.nosqlbench.virtdata.api.annotations.Example; +import io.nosqlbench.virtdata.api.annotations.ThreadSafeMapper; +import io.nosqlbench.virtdata.library.basics.core.threadstate.SharedState; + +import java.util.HashMap; +import java.util.function.Function; + +@Categories(Category.state) +@ThreadSafeMapper +public class LoadCqlVector implements Function { + + private final String name; + private final Function nameFunc; + private final com.datastax.oss.driver.api.core.data.CqlVector defaultValue; + + @Example({"LoadDouble('foo')","for the current thread, load a double value from the named variable."}) + public LoadCqlVector(String name) { + this.name = name; + this.nameFunc=null; + this.defaultValue=com.datastax.oss.driver.api.core.data.CqlVector.newInstance(0.0f); + } + + @Example({"LoadDouble('foo',23D)","for the current thread, load a double value from the named variable," + + "or the default value if the named variable is not defined."}) + public LoadCqlVector(String name, int len) { + this.name = name; + this.nameFunc=null; + Double[] ary = new Double[len]; + for (int i = 0; i < len; i++) { + ary[i]=(double)i; + } + this.defaultValue=com.datastax.oss.driver.api.core.data.CqlVector.newInstance(ary); + } + + @Override + public com.datastax.oss.driver.api.core.data.CqlVector apply(Object o) { + HashMap map = SharedState.tl_ObjectMap.get(); + String varname=(nameFunc!=null) ? String.valueOf(nameFunc.apply(o)) : name; + Object value = map.getOrDefault(varname, defaultValue); + if (value instanceof CqlVector cqlvector) { + return cqlvector; + } else if (value instanceof float[] fa) { + Float[] ary = new Float[fa.length]; + for (int i = 0; i < fa.length; i++) { + ary[i]=fa[i]; + } + return com.datastax.oss.driver.api.core.data.CqlVector.newInstance(ary); + } else if (value instanceof double[] da) { + Double[] ary = new Double[da.length]; + for (int i = 0; i < da.length; i++) { + ary[i]=da[i]; + } + return com.datastax.oss.driver.api.core.data.CqlVector.newInstance(ary); + } else { + return (com.datastax.oss.driver.api.core.data.CqlVector) value; + } + + } +} diff --git a/adapter-cqld4/src/main/java/io/nosqlbench/datamappers/functions/to_cqlvector/LoadCqlVectorFromArray.java b/adapter-cqld4/src/main/java/io/nosqlbench/datamappers/functions/to_cqlvector/LoadCqlVectorFromArray.java new file mode 100644 index 000000000..00a095cda --- /dev/null +++ b/adapter-cqld4/src/main/java/io/nosqlbench/datamappers/functions/to_cqlvector/LoadCqlVectorFromArray.java @@ -0,0 +1,95 @@ +/* + * Copyright (c) 2022 nosqlbench + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package io.nosqlbench.datamappers.functions.to_cqlvector; + +import com.datastax.oss.driver.api.core.data.CqlVector; +import io.nosqlbench.virtdata.api.annotations.Categories; +import io.nosqlbench.virtdata.api.annotations.Category; +import io.nosqlbench.virtdata.api.annotations.ThreadSafeMapper; +import io.nosqlbench.virtdata.library.basics.core.threadstate.SharedState; + +import java.lang.reflect.Array; +import java.util.HashMap; +import java.util.List; +import java.util.function.Function; +import java.util.function.LongFunction; + +@Categories(Category.state) +@ThreadSafeMapper +public class LoadCqlVectorFromArray implements LongFunction { + + private final String name; + private final Function nameFunc; + private final CqlVector[] defaultValue; + private final int len; + private final int batchsize; + + public LoadCqlVectorFromArray(String name, int len, int batchsize) { + this.name = name; + this.nameFunc = null; + Float[] ary = new Float[len]; + for (int i = 0; i < len; i++) { + ary[i] = (float)i; + } + this.defaultValue = new CqlVector[]{CqlVector.newInstance(ary)}; + this.len = len; + this.batchsize = batchsize; + } + + @Override + public CqlVector apply(long cycle) { + int offset = (int) (cycle % batchsize); + HashMap map = SharedState.tl_ObjectMap.get(); + String varname = (nameFunc != null) ? String.valueOf(nameFunc.apply(cycle)) : name; + Object object = map.getOrDefault(varname, defaultValue); + if (object.getClass().isArray()) { + object = Array.get(object,offset); + } else if (object instanceof double[][] dary) { + object = dary[offset]; + } else if (object instanceof float[][] fary) { + object = fary[offset]; + } else if (object instanceof Double[][] dary) { + object = dary[offset]; + } else if (object instanceof Float[][] fary) { + object = fary[offset]; + } else if (object instanceof CqlVector[] cary) { + object = cary[offset]; + } else if (object instanceof List list) { + object = list.get(offset); + } else { + throw new RuntimeException("Unrecognized type for ary of ary:" + object.getClass().getCanonicalName()); + } + + if (object instanceof CqlVector cqlvector) { + return cqlvector; + } else if (object instanceof float[] fa) { + Float[] ary = new Float[fa.length]; + for (int i = 0; i < fa.length; i++) { + ary[i] = fa[i]; + } + return CqlVector.newInstance(ary); + } else if (object instanceof double[] da) { + Double[] ary = new Double[da.length]; + for (int i = 0; i < da.length; i++) { + ary[i] = da[i]; + } + return CqlVector.newInstance(ary); + } else { + return (CqlVector) object; + } + } +} diff --git a/adapter-cqld4/src/main/java/io/nosqlbench/engine/extensions/vectormath/CqlUtils.java b/adapter-cqld4/src/main/java/io/nosqlbench/engine/extensions/vectormath/CqlUtils.java index ddcc692b4..be26e682c 100644 --- a/adapter-cqld4/src/main/java/io/nosqlbench/engine/extensions/vectormath/CqlUtils.java +++ b/adapter-cqld4/src/main/java/io/nosqlbench/engine/extensions/vectormath/CqlUtils.java @@ -45,5 +45,4 @@ public class CqlUtils extends NBBaseComponent { return rows.stream().mapToInt(r -> Integer.parseInt(Objects.requireNonNull(r.getString(fieldName)))).toArray(); } - } diff --git a/adapter-cqld4/src/main/resources/activities/baselinesv2/cql_keyvalue2.yaml b/adapter-cqld4/src/main/resources/activities/baselinesv2/cql_keyvalue2.yaml index ba39a6bce..3767a66f7 100644 --- a/adapter-cqld4/src/main/resources/activities/baselinesv2/cql_keyvalue2.yaml +++ b/adapter-cqld4/src/main/resources/activities/baselinesv2/cql_keyvalue2.yaml @@ -9,11 +9,17 @@ description: | population are replaced with new values which never repeat. During the main phase, random partitions are selected for upsert, with row values never repeating. + TEMPLATE(batchsize,100) + scenarios: default: schema: run driver=cql tags==block:schema threads==1 cycles==UNDEF rampup: run driver=cql tags==block:rampup cycles===TEMPLATE(rampup-cycles,10000000) threads=auto main: run driver=cql tags==block:"main.*" cycles===TEMPLATE(main-cycles,10000000) threads=auto + batch: + schema: run driver=cql tags==block:schema threads==1 cycles==UNDEF + rampup: run driver=cql tags==block:rampup_batch cycles===TEMPLATE(rampup-cycles,10000000) threads=auto + main: run driver=cql tags==block:"main.*" cycles===TEMPLATE(main-cycles,10000000) threads=auto astra: schema: run driver=cql tags==block:schema-astra threads==1 cycles==UNDEF rampup: run driver=cql tags==block:rampup cycles===TEMPLATE(rampup-cycles,10000000) threads=auto @@ -26,6 +32,7 @@ scenarios: bindings: seq_key: Mod(TEMPLATE(keycount,1000000000)); ToString() -> String seq_value: Hash(); Mod(TEMPLATE(valuecount,1000000000)); ToString() -> String + batch_seq_value: Mul(TEMPLATE(batchsize,100)L); Hash(); Mod(TEMPLATE(valuecount,1000000000)); ToString() -> String rw_key: TEMPLATE(keydist,Uniform(0,1000000000)); ToString() -> String rw_value: Hash(); TEMPLATE(valdist,Uniform(0,1000000000)); ToString() -> String @@ -62,6 +69,18 @@ blocks: insert into TEMPLATE(keyspace,baselines).TEMPLATE(table,keyvalue) (key, value) values ({seq_key},{seq_value}); + rampup_batch: + params: + cl: TEMPLATE(write_cl,LOCAL_QUORUM) + ops: + rampup_insert: + batch: testing + repeat: 100 + op_template: + prepared: | + insert into TEMPLATE(keyspace,baselines).TEMPLATE(table,keyvalue) + (key, value) + values ({seq_key},{seq_value}); verify: params: cl: TEMPLATE(read_cl,LOCAL_QUORUM) diff --git a/adapter-cqld4/src/main/resources/cqld4.md b/adapter-cqld4/src/main/resources/cqld4.md index e614d9bb9..256ee7fca 100644 --- a/adapter-cqld4/src/main/resources/cqld4.md +++ b/adapter-cqld4/src/main/resources/cqld4.md @@ -69,6 +69,8 @@ nb5 ... driverconfig='http://gist.github.com...' * **showstmt** - enable per-statement diagnostics which show as much of the statement as possible for the given statement type. *WARNING* - Do not use this for performance testing, only for diagnostics. +* **diag** - a set of options for advanced diagnostics for CQL. Defaults to `diag=none`. + Valid values are none, addr, mid, all. Presently, only none and all are supported. * **maxpages** - configure the maximum number of pages allowed in a CQL result set. This is configured to `maxpages=1` by default, so that users will be aware of any paging that occurs by default. If you expect and want to allow paging in your operation, then set this number @@ -140,6 +142,13 @@ ops: raw: | create table if not exist {ksname}.{tblname} ... + example-batch-stmt: + batch: + repeat: 50 + op_template: + prepared: | + select three, four from knock.onthedoor where ... + # gremlin statement using the fluent API, as it would be written in a client application example-fluent-graph-stmt: fluent: >- diff --git a/adapter-diag/src/main/java/io/nosqlbench/adapter/diag/DiagOpDispenser.java b/adapter-diag/src/main/java/io/nosqlbench/adapter/diag/DiagOpDispenser.java index 6c3c72bc4..7719660d2 100644 --- a/adapter-diag/src/main/java/io/nosqlbench/adapter/diag/DiagOpDispenser.java +++ b/adapter-diag/src/main/java/io/nosqlbench/adapter/diag/DiagOpDispenser.java @@ -129,7 +129,7 @@ public class DiagOpDispenser extends BaseOpDispenser implement } @Override - public DiagOp apply(long value) { + public DiagOp getOp(long value) { return opFunc.apply(value); } } diff --git a/adapter-dynamodb/pom.xml b/adapter-dynamodb/pom.xml index 3ab3d65eb..a48d81e49 100644 --- a/adapter-dynamodb/pom.xml +++ b/adapter-dynamodb/pom.xml @@ -43,7 +43,7 @@ com.amazonaws aws-java-sdk-dynamodb - 1.12.658 + 1.12.681 diff --git a/adapter-dynamodb/src/main/java/io/nosqlbench/adapter/dynamodb/opdispensers/DDBCreateTableOpDispenser.java b/adapter-dynamodb/src/main/java/io/nosqlbench/adapter/dynamodb/opdispensers/DDBCreateTableOpDispenser.java index 647c38407..2a19ef7b8 100644 --- a/adapter-dynamodb/src/main/java/io/nosqlbench/adapter/dynamodb/opdispensers/DDBCreateTableOpDispenser.java +++ b/adapter-dynamodb/src/main/java/io/nosqlbench/adapter/dynamodb/opdispensers/DDBCreateTableOpDispenser.java @@ -128,7 +128,7 @@ public class DDBCreateTableOpDispenser extends BaseOpDispenser org.openapitools openapi-generator - 7.3.0 + 7.4.0 org.slf4j diff --git a/adapter-http/src/main/java/io/nosqlbench/adapter/http/JsonElementUtils.java b/adapter-http/src/main/java/io/nosqlbench/adapter/http/JsonElementUtils.java index d4423dc7e..530812dd3 100644 --- a/adapter-http/src/main/java/io/nosqlbench/adapter/http/JsonElementUtils.java +++ b/adapter-http/src/main/java/io/nosqlbench/adapter/http/JsonElementUtils.java @@ -2,13 +2,13 @@ package io.nosqlbench.adapter.http; /* * Copyright (c) 2022 nosqlbench - * + * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY @@ -22,6 +22,7 @@ import com.google.gson.JsonArray; import com.google.gson.JsonElement; import com.google.gson.JsonObject; +import java.sql.Array; import java.util.ArrayList; import java.util.List; @@ -61,6 +62,56 @@ public class JsonElementUtils { i++; } return keys; - } + + public static List customNumberArrayToFloatList(JsonElement element) { + JsonObject o1 = element.getAsJsonObject(); + JsonElement data = o1.get("data"); + JsonArray dary = data.getAsJsonArray(); + JsonElement element0 = dary.get(0); + JsonObject eobj1 = element0.getAsJsonObject(); + JsonElement embedding = eobj1.get("embedding"); + JsonArray ary = embedding.getAsJsonArray(); + ArrayList list = new ArrayList<>(ary.size()); + for (JsonElement jsonElement : ary) { + list.add(jsonElement.getAsFloat()); + } + return list; + } + + public static float[] customNumberArrayToFloatArray(JsonElement element) { + JsonObject o1 = element.getAsJsonObject(); + JsonElement data = o1.get("data"); + JsonArray dary = data.getAsJsonArray(); + JsonElement element0 = dary.get(0); + JsonObject eobj1 = element0.getAsJsonObject(); + JsonElement embedding = eobj1.get("embedding"); + JsonArray ary = embedding.getAsJsonArray(); + float[] floats = new float[ary.size()]; + for (int i = 0; i < floats.length; i++) { + floats[i]=ary.get(i).getAsFloat(); + } + return floats; + } + + public static float[][] customNumberArrayToFloatArrayBatch(JsonElement element) { + JsonObject o1 = element.getAsJsonObject(); + JsonElement data = o1.get("data"); + JsonArray dary = data.getAsJsonArray(); + float[][] floats2dary = new float[dary.size()][]; + for (int vector_idx = 0; vector_idx < dary.size(); vector_idx++) { + JsonElement element0 = dary.get(vector_idx); + JsonObject eobj1 = element0.getAsJsonObject(); + JsonElement embedding = eobj1.get("embedding"); + JsonArray vectorAry = embedding.getAsJsonArray(); + float[] newV = new float[vectorAry.size()]; + for (int component_idx = 0; component_idx < vectorAry.size(); component_idx++) { + newV[component_idx]=vectorAry.get(component_idx).getAsFloat(); + } + floats2dary[vector_idx]=newV; + } + return floats2dary; + } + + } diff --git a/adapter-http/src/main/java/io/nosqlbench/adapter/http/core/HttpOpDispenser.java b/adapter-http/src/main/java/io/nosqlbench/adapter/http/core/HttpOpDispenser.java index 9351bf140..d3f4559fc 100644 --- a/adapter-http/src/main/java/io/nosqlbench/adapter/http/core/HttpOpDispenser.java +++ b/adapter-http/src/main/java/io/nosqlbench/adapter/http/core/HttpOpDispenser.java @@ -128,7 +128,7 @@ public class HttpOpDispenser extends BaseOpDispenser { } @Override - public HttpOp apply(long value) { + public HttpOp getOp(long value) { HttpOp op = this.opFunc.apply(value); return op; diff --git a/adapter-jdbc/src/main/java/io/nosqlbench/adapter/jdbc/opdispensers/JDBCDDLOpDispenser.java b/adapter-jdbc/src/main/java/io/nosqlbench/adapter/jdbc/opdispensers/JDBCDDLOpDispenser.java index fa8712c95..67042208a 100644 --- a/adapter-jdbc/src/main/java/io/nosqlbench/adapter/jdbc/opdispensers/JDBCDDLOpDispenser.java +++ b/adapter-jdbc/src/main/java/io/nosqlbench/adapter/jdbc/opdispensers/JDBCDDLOpDispenser.java @@ -48,7 +48,7 @@ public class JDBCDDLOpDispenser extends JDBCBaseOpDispenser { } } @Override - public JDBCDDLOp apply(long cycle) { + public JDBCDDLOp getOp(long cycle) { String ddlSqlStr = ddlSqlStrFunc.apply(cycle); return new JDBCDDLOp(jdbcSpace, ddlSqlStr); } diff --git a/adapter-jdbc/src/main/java/io/nosqlbench/adapter/jdbc/opdispensers/JDBCDMLOpDispenser.java b/adapter-jdbc/src/main/java/io/nosqlbench/adapter/jdbc/opdispensers/JDBCDMLOpDispenser.java index d66472e64..f7899ed19 100644 --- a/adapter-jdbc/src/main/java/io/nosqlbench/adapter/jdbc/opdispensers/JDBCDMLOpDispenser.java +++ b/adapter-jdbc/src/main/java/io/nosqlbench/adapter/jdbc/opdispensers/JDBCDMLOpDispenser.java @@ -101,7 +101,7 @@ public class JDBCDMLOpDispenser extends JDBCBaseOpDispenser { } @Override - public JDBCDMLOp apply(long cycle) { + public JDBCDMLOp getOp(long cycle) { if (isReadStatement) { return new JDBCDMLReadOp( jdbcSpace, diff --git a/adapter-kafka/src/main/java/io/nosqlbench/adapter/kafka/dispensers/MessageConsumerOpDispenser.java b/adapter-kafka/src/main/java/io/nosqlbench/adapter/kafka/dispensers/MessageConsumerOpDispenser.java index bde8f1b60..680543092 100644 --- a/adapter-kafka/src/main/java/io/nosqlbench/adapter/kafka/dispensers/MessageConsumerOpDispenser.java +++ b/adapter-kafka/src/main/java/io/nosqlbench/adapter/kafka/dispensers/MessageConsumerOpDispenser.java @@ -155,7 +155,7 @@ public class MessageConsumerOpDispenser extends KafkaBaseOpDispenser { } @Override - public KafkaOp apply(final long cycle) { + public KafkaOp getOp(final long cycle) { final List topicNameList = this.getEffectiveTopicNameList(cycle); final String groupId = this.getEffectiveGroupId(cycle); if ((0 == topicNameList.size()) || StringUtils.isBlank(groupId)) throw new KafkaAdapterInvalidParamException( diff --git a/adapter-kafka/src/main/java/io/nosqlbench/adapter/kafka/dispensers/MessageProducerOpDispenser.java b/adapter-kafka/src/main/java/io/nosqlbench/adapter/kafka/dispensers/MessageProducerOpDispenser.java index e92c3e87d..d0dcab596 100644 --- a/adapter-kafka/src/main/java/io/nosqlbench/adapter/kafka/dispensers/MessageProducerOpDispenser.java +++ b/adapter-kafka/src/main/java/io/nosqlbench/adapter/kafka/dispensers/MessageProducerOpDispenser.java @@ -200,7 +200,7 @@ public class MessageProducerOpDispenser extends KafkaBaseOpDispenser { } @Override - public KafkaOp apply(final long cycle) { + public KafkaOp getOp(final long cycle) { final String topicName = this.topicNameStrFunc.apply(cycle); final String clientId = this.getEffectiveClientId(cycle); diff --git a/adapter-milvus/pom.xml b/adapter-milvus/pom.xml index 135437a53..f4843048f 100644 --- a/adapter-milvus/pom.xml +++ b/adapter-milvus/pom.xml @@ -47,6 +47,11 @@ ${revision} compile + + com.google.protobuf + protobuf-java-util + 3.24.0 + io.milvus milvus-sdk-java diff --git a/adapter-milvus/src/main/java/io/nosqlbench/adapter/milvus/MilvusOpMapper.java b/adapter-milvus/src/main/java/io/nosqlbench/adapter/milvus/MilvusOpMapper.java index 71fe50235..33dc065ad 100644 --- a/adapter-milvus/src/main/java/io/nosqlbench/adapter/milvus/MilvusOpMapper.java +++ b/adapter-milvus/src/main/java/io/nosqlbench/adapter/milvus/MilvusOpMapper.java @@ -53,7 +53,7 @@ public class MilvusOpMapper implements OpMapper> { "type", "target" ); - logger.info(() -> "Using '" + typeAndTarget.enumId + "' statement form for '" + op.getName() + "'"); + logger.info(() -> "Using '" + typeAndTarget.enumId + "' op type for op template '" + op.getName() + "'"); return switch (typeAndTarget.enumId) { case drop_collection -> new MilvusDropCollectionOpDispenser(adapter, op, typeAndTarget.targetFunction); diff --git a/adapter-milvus/src/main/java/io/nosqlbench/adapter/milvus/exceptions/MilvusAwaitStateIncompleteError.java b/adapter-milvus/src/main/java/io/nosqlbench/adapter/milvus/exceptions/MilvusAwaitStateIncompleteError.java new file mode 100644 index 000000000..f15031ec1 --- /dev/null +++ b/adapter-milvus/src/main/java/io/nosqlbench/adapter/milvus/exceptions/MilvusAwaitStateIncompleteError.java @@ -0,0 +1,39 @@ +/* + * Copyright (c) 2024 nosqlbench + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package io.nosqlbench.adapter.milvus.exceptions; + +import io.milvus.grpc.GetLoadStateResponse; +import io.milvus.param.R; + +import java.time.Duration; + +public class MilvusAwaitStateIncompleteError extends RuntimeException { + private final R loadState; + private final Duration timeout; + private final String timeSummary; + + public MilvusAwaitStateIncompleteError(R loadState, Duration timeout, String timeSummary) { + this.loadState = loadState; + this.timeout = timeout; + this.timeSummary = timeSummary; + } + + @Override + public String getMessage() { + return super.getMessage() + ": at time " +timeSummary; + } +} diff --git a/adapter-milvus/src/main/java/io/nosqlbench/adapter/milvus/exceptions/MilvusIndexingIncompleteError.java b/adapter-milvus/src/main/java/io/nosqlbench/adapter/milvus/exceptions/MilvusIndexingIncompleteError.java new file mode 100644 index 000000000..dde96dbf9 --- /dev/null +++ b/adapter-milvus/src/main/java/io/nosqlbench/adapter/milvus/exceptions/MilvusIndexingIncompleteError.java @@ -0,0 +1,43 @@ +/* + * Copyright (c) 2024 nosqlbench + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package io.nosqlbench.adapter.milvus.exceptions; + +import io.milvus.param.index.DescribeIndexParam; +import io.nosqlbench.adapter.milvus.ops.MilvusDescribeIndexOp; + +import java.util.List; + +public class MilvusIndexingIncompleteError extends RuntimeException { + private final DescribeIndexParam milvusDescribeIndexOp; + private final int tried; + private final List stats; + + public MilvusIndexingIncompleteError(DescribeIndexParam milvusDescribeIndexOp, int tried, List stats) { + this.milvusDescribeIndexOp = milvusDescribeIndexOp; + this.tried = tried; + this.stats = stats; + } + + @Override + public String getMessage() { + return super.getMessage() + ": " + + "tries:" + tried + "/" + tried + + ", index:" + milvusDescribeIndexOp.getIndexName() + + ", database:" + milvusDescribeIndexOp.getDatabaseName() + + ", collection:" + milvusDescribeIndexOp.getCollectionName(); + } +} diff --git a/adapter-milvus/src/main/java/io/nosqlbench/adapter/milvus/opdispensers/MilvusAlterCollectionOpDispenser.java b/adapter-milvus/src/main/java/io/nosqlbench/adapter/milvus/opdispensers/MilvusAlterCollectionOpDispenser.java index bdf3aaac5..9d58981ef 100644 --- a/adapter-milvus/src/main/java/io/nosqlbench/adapter/milvus/opdispensers/MilvusAlterCollectionOpDispenser.java +++ b/adapter-milvus/src/main/java/io/nosqlbench/adapter/milvus/opdispensers/MilvusAlterCollectionOpDispenser.java @@ -42,7 +42,8 @@ public class MilvusAlterCollectionOpDispenser extends MilvusBaseOpDispenser ebF = l -> AlterCollectionParam.newBuilder().withCollectionName(targetF.apply(l)); - ebF = op.enhanceFuncOptionally(ebF,"ttl", Integer.class, AlterCollectionParam.Builder::withTTL); + ebF = op.enhanceFuncOptionally(ebF, "ttl", Number.class, + (AlterCollectionParam.Builder b, Number n) -> b.withTTL(n.intValue())); final LongFunction lastF = ebF; final LongFunction collectionParamF = l -> lastF.apply(l).build(); @@ -56,6 +57,6 @@ public class MilvusAlterCollectionOpDispenser extends MilvusBaseOpDispenser targetF ) { - return l -> new MilvusAlterCollectionOp(clientF.apply(l),paramF.apply(l)); + return l -> new MilvusAlterCollectionOp(clientF.apply(l), paramF.apply(l)); } } diff --git a/adapter-milvus/src/main/java/io/nosqlbench/adapter/milvus/opdispensers/MilvusBaseOpDispenser.java b/adapter-milvus/src/main/java/io/nosqlbench/adapter/milvus/opdispensers/MilvusBaseOpDispenser.java index fb9354ad0..c2ff65345 100644 --- a/adapter-milvus/src/main/java/io/nosqlbench/adapter/milvus/opdispensers/MilvusBaseOpDispenser.java +++ b/adapter-milvus/src/main/java/io/nosqlbench/adapter/milvus/opdispensers/MilvusBaseOpDispenser.java @@ -58,7 +58,7 @@ public abstract class MilvusBaseOpDispenser extends BaseOpDispenser apply(long value) { + public MilvusBaseOp getOp(long value) { return opF.apply(value); } } diff --git a/adapter-milvus/src/main/java/io/nosqlbench/adapter/milvus/opdispensers/MilvusCreateCollectionOpDispenser.java b/adapter-milvus/src/main/java/io/nosqlbench/adapter/milvus/opdispensers/MilvusCreateCollectionOpDispenser.java index dc1cd9aec..30ffed7db 100644 --- a/adapter-milvus/src/main/java/io/nosqlbench/adapter/milvus/opdispensers/MilvusCreateCollectionOpDispenser.java +++ b/adapter-milvus/src/main/java/io/nosqlbench/adapter/milvus/opdispensers/MilvusCreateCollectionOpDispenser.java @@ -39,12 +39,9 @@ public class MilvusCreateCollectionOpDispenser extends MilvusBaseOpDispenser ebF = l -> CreateCollectionParam.newBuilder().withCollectionName(targetF.apply(l)); - ebF = op.enhanceFuncOptionally(ebF, "shards_num", Integer.class, - CreateCollectionParam.Builder::withShardsNum); - ebF = op.enhanceFuncOptionally(ebF, "partition_num", Integer.class, - CreateCollectionParam.Builder::withPartitionsNum); + ebF = op.enhanceFuncOptionally(ebF, "shards_num", Number.class, + (CreateCollectionParam.Builder b, Number n) -> b.withShardsNum(n.intValue())); + ebF = op.enhanceFuncOptionally(ebF, "partition_num", Number.class, + (CreateCollectionParam.Builder b, Number n) -> b.withPartitionsNum(n.intValue())); ebF = op.enhanceFuncOptionally(ebF, "description", String.class, CreateCollectionParam.Builder::withDescription); ebF = op.enhanceEnumOptionally(ebF, "consistency_level", @@ -97,8 +94,7 @@ public class MilvusCreateCollectionOpDispenser extends MilvusBaseOpDispenser builder.withMaxLength(n.intValue())); + fieldspec.getOptionalStaticConfig("max_capacity", Number.class) + .ifPresent((Number n) -> builder.withMaxCapacity(n.intValue())); + fieldspec.getOptionalStaticValue(List.of("partition_key", "partition"), Boolean.class) .ifPresent(builder::withPartitionKey); - fieldspec.getOptionalStaticValue("dimension", Integer.class) - .ifPresent(builder::withDimension); + fieldspec.getOptionalStaticValue("dimension", Number.class) + .ifPresent((Number n) -> builder.withDimension(n.intValue())); fieldspec.getOptionalStaticConfig("data_type", String.class) .map(DataType::valueOf) .ifPresent(builder::withDataType); fieldspec.getOptionalStaticConfig("type_params", Map.class) .ifPresent(builder::withTypeParams); - fieldspec.getOptionalStaticConfig("element_type",String.class) - .map(DataType::valueOf) - .ifPresent(builder::withElementType); + fieldspec.getOptionalStaticConfig("element_type", String.class) + .map(DataType::valueOf) + .ifPresent(builder::withElementType); fieldTypes.add(builder.build()); }); diff --git a/adapter-milvus/src/main/java/io/nosqlbench/adapter/milvus/opdispensers/MilvusCreateIndexOpDispenser.java b/adapter-milvus/src/main/java/io/nosqlbench/adapter/milvus/opdispensers/MilvusCreateIndexOpDispenser.java index ef86a65ee..776f85de0 100644 --- a/adapter-milvus/src/main/java/io/nosqlbench/adapter/milvus/opdispensers/MilvusCreateIndexOpDispenser.java +++ b/adapter-milvus/src/main/java/io/nosqlbench/adapter/milvus/opdispensers/MilvusCreateIndexOpDispenser.java @@ -36,12 +36,9 @@ public class MilvusCreateIndexOpDispenser extends MilvusBaseOpDispenser bF = l -> CreateIndexParam.newBuilder().withIndexName(targetF.apply(l)); - bF = op.enhanceFunc(bF, List.of("collection","collection_name"), String.class, + bF = op.enhanceFunc(bF, List.of("collection", "collection_name"), String.class, CreateIndexParam.Builder::withCollectionName); bF = op.enhanceFunc(bF, "field_name", String.class, CreateIndexParam.Builder::withFieldName); bF = op.enhanceEnumOptionally(bF, "index_type", IndexType.class, CreateIndexParam.Builder::withIndexType); bF = op.enhanceEnumOptionally(bF, "metric_type", MetricType.class, CreateIndexParam.Builder::withMetricType); bF = op.enhanceFuncOptionally(bF, "extra_param", String.class, CreateIndexParam.Builder::withExtraParam); bF = op.enhanceFuncOptionally(bF, "sync_mode", Boolean.class, CreateIndexParam.Builder::withSyncMode); - bF = op.enhanceFuncOptionally(bF, "sync_waiting_interval", Long.class, CreateIndexParam.Builder::withSyncWaitingInterval); - bF = op.enhanceFuncOptionally(bF, "sync_waiting_timeout", Long.class, CreateIndexParam.Builder::withSyncWaitingTimeout); - bF = op.enhanceFuncOptionally(bF, List.of("database","database_name"), String.class, + bF = op.enhanceFuncOptionally(bF, "sync_waiting_interval", Number.class, + (CreateIndexParam.Builder b, Number n) -> b.withSyncWaitingInterval(n.longValue())); + bF = op.enhanceFuncOptionally(bF, "sync_waiting_timeout", Number.class, + (CreateIndexParam.Builder b, Number n) -> b.withSyncWaitingTimeout(n.longValue())); + bF = op.enhanceFuncOptionally(bF, List.of("database", "database_name"), String.class, CreateIndexParam.Builder::withDatabaseName); LongFunction finalBF1 = bF; return l -> finalBF1.apply(l).build(); } @Override - public LongFunction> createOpFunc(LongFunction paramF, LongFunction clientF, ParsedOp op, LongFunction targetF) { + public LongFunction> createOpFunc( + LongFunction paramF, + LongFunction clientF, + ParsedOp op, LongFunction targetF) { return l -> new MilvusCreateIndexOp(clientF.apply(l), paramF.apply(l)); } } diff --git a/adapter-milvus/src/main/java/io/nosqlbench/adapter/milvus/opdispensers/MilvusDescribeIndexOpDispenser.java b/adapter-milvus/src/main/java/io/nosqlbench/adapter/milvus/opdispensers/MilvusDescribeIndexOpDispenser.java index ab409c7bb..01bd2ba71 100644 --- a/adapter-milvus/src/main/java/io/nosqlbench/adapter/milvus/opdispensers/MilvusDescribeIndexOpDispenser.java +++ b/adapter-milvus/src/main/java/io/nosqlbench/adapter/milvus/opdispensers/MilvusDescribeIndexOpDispenser.java @@ -18,21 +18,31 @@ package io.nosqlbench.adapter.milvus.opdispensers; import io.milvus.client.MilvusServiceClient; import io.milvus.param.index.DescribeIndexParam; -import io.milvus.param.partition.CreatePartitionParam; import io.nosqlbench.adapter.milvus.MilvusDriverAdapter; import io.nosqlbench.adapter.milvus.ops.MilvusBaseOp; import io.nosqlbench.adapter.milvus.ops.MilvusDescribeIndexOp; import io.nosqlbench.adapters.api.templating.ParsedOp; +import java.time.Duration; +import java.time.temporal.ChronoUnit; import java.util.List; import java.util.function.LongFunction; public class MilvusDescribeIndexOpDispenser extends MilvusBaseOpDispenser { + private Duration awaitTimeout = Duration.ZERO; + private Duration awaitInterval = Duration.of(10, ChronoUnit.SECONDS); + public MilvusDescribeIndexOpDispenser(MilvusDriverAdapter adapter, ParsedOp op, LongFunction targetFunction) { super(adapter, op, targetFunction); + + op.getOptionalStaticValue("await_timeout", Number.class) + .map(Number::doubleValue) + .ifPresent(v->this.awaitTimeout = Duration.of((long)(v*1000), ChronoUnit.MILLIS)); + op.getOptionalStaticValue("await_interval", Number.class) + .map(Number::doubleValue).ifPresent(v->this.awaitInterval =Duration.of((long)(v*1000),ChronoUnit.MILLIS)); } @Override @@ -43,11 +53,12 @@ public class MilvusDescribeIndexOpDispenser extends MilvusBaseOpDispenser ebF = l -> DescribeIndexParam.newBuilder().withIndexName(targetF.apply(l)); - ebF = op.enhanceFunc(ebF, List.of("collection","collection_name"),String.class, + ebF = op.enhanceFunc(ebF, List.of("collection","collection_name"), String.class, DescribeIndexParam.Builder::withCollectionName); - ebF = op.enhanceFunc(ebF,List.of("database_name","database"),String.class, + ebF = op.enhanceFuncOptionally(ebF, List.of("database_name","database"), String.class, DescribeIndexParam.Builder::withDatabaseName); + final LongFunction lastF = ebF; final LongFunction collectionParamF = l -> lastF.apply(l).build(); return collectionParamF; @@ -60,6 +71,11 @@ public class MilvusDescribeIndexOpDispenser extends MilvusBaseOpDispenser targetF ) { - return l -> new MilvusDescribeIndexOp(clientF.apply(l),paramF.apply(l)); + return l -> new MilvusDescribeIndexOp( + clientF.apply(l), + paramF.apply(l), + awaitTimeout, + awaitInterval + ); } } diff --git a/adapter-milvus/src/main/java/io/nosqlbench/adapter/milvus/opdispensers/MilvusFlushOpDispenser.java b/adapter-milvus/src/main/java/io/nosqlbench/adapter/milvus/opdispensers/MilvusFlushOpDispenser.java index 5f5e0242f..e99efdcd8 100644 --- a/adapter-milvus/src/main/java/io/nosqlbench/adapter/milvus/opdispensers/MilvusFlushOpDispenser.java +++ b/adapter-milvus/src/main/java/io/nosqlbench/adapter/milvus/opdispensers/MilvusFlushOpDispenser.java @@ -51,12 +51,12 @@ public class MilvusFlushOpDispenser extends MilvusBaseOpDispenser { }; LongFunction finalEbF = ebF; ebF = l -> finalEbF.apply(l).withCollectionNames(cnames.apply(l)); - ebF = op.enhanceFuncOptionally(ebF, List.of("database_name","database"),String.class, + ebF = op.enhanceFuncOptionally(ebF, List.of("database_name", "database"), String.class, FlushParam.Builder::withDatabaseName); - ebF = op.enhanceFuncOptionally(ebF, "sync_flush_waiting_interval",Long.class, - FlushParam.Builder::withSyncFlushWaitingInterval); - ebF = op.enhanceFuncOptionally(ebF, "sync_flush_waiting_timeout",Long.class, - FlushParam.Builder::withSyncFlushWaitingTimeout); + ebF = op.enhanceFuncOptionally(ebF, "sync_flush_waiting_interval", Number.class, + (FlushParam.Builder b, Number n) -> b.withSyncFlushWaitingInterval(n.longValue())); + ebF = op.enhanceFuncOptionally(ebF, "sync_flush_waiting_timeout", Number.class, + (FlushParam.Builder b, Number n) -> b.withSyncFlushWaitingTimeout(n.longValue())); final LongFunction lastF = ebF; final LongFunction collectionParamF = l -> lastF.apply(l).build(); @@ -70,6 +70,6 @@ public class MilvusFlushOpDispenser extends MilvusBaseOpDispenser { ParsedOp op, LongFunction targetF ) { - return l -> new MilvusFlushOp(clientF.apply(l),paramF.apply(l)); + return l -> new MilvusFlushOp(clientF.apply(l), paramF.apply(l)); } } diff --git a/adapter-milvus/src/main/java/io/nosqlbench/adapter/milvus/opdispensers/MilvusGetFlushStateOpDispenser.java b/adapter-milvus/src/main/java/io/nosqlbench/adapter/milvus/opdispensers/MilvusGetFlushStateOpDispenser.java index cc96923ff..cfb41439e 100644 --- a/adapter-milvus/src/main/java/io/nosqlbench/adapter/milvus/opdispensers/MilvusGetFlushStateOpDispenser.java +++ b/adapter-milvus/src/main/java/io/nosqlbench/adapter/milvus/opdispensers/MilvusGetFlushStateOpDispenser.java @@ -52,9 +52,10 @@ public class MilvusGetFlushStateOpDispenser extends MilvusBaseOpDispenser finalEbF = ebF; ebF = l -> finalEbF.apply(l).withSegmentIDs(idsF.apply(l)); - ebF = op.enhanceFuncOptionally(ebF,List.of("collection","collection_name"),String.class, + ebF = op.enhanceFuncOptionally(ebF, List.of("collection", "collection_name"), String.class, GetFlushStateParam.Builder::withCollectionName); - ebF = op.enhanceFuncOptionally(ebF,"flush_ts",Long.class,GetFlushStateParam.Builder::withFlushTs); + ebF = op.enhanceFuncOptionally(ebF, "flush_ts", Number.class, + (GetFlushStateParam.Builder b, Number n) -> b.withFlushTs(n.longValue())); final LongFunction lastF = ebF; final LongFunction collectionParamF = l -> lastF.apply(l).build(); @@ -68,6 +69,6 @@ public class MilvusGetFlushStateOpDispenser extends MilvusBaseOpDispenser targetF ) { - return l -> new MilvusGetFlushStateOp(clientF.apply(l),paramF.apply(l)); + return l -> new MilvusGetFlushStateOp(clientF.apply(l), paramF.apply(l)); } } diff --git a/adapter-milvus/src/main/java/io/nosqlbench/adapter/milvus/opdispensers/MilvusGetLoadStateOpDispenser.java b/adapter-milvus/src/main/java/io/nosqlbench/adapter/milvus/opdispensers/MilvusGetLoadStateOpDispenser.java index 474346ffa..601974f66 100644 --- a/adapter-milvus/src/main/java/io/nosqlbench/adapter/milvus/opdispensers/MilvusGetLoadStateOpDispenser.java +++ b/adapter-milvus/src/main/java/io/nosqlbench/adapter/milvus/opdispensers/MilvusGetLoadStateOpDispenser.java @@ -17,23 +17,49 @@ package io.nosqlbench.adapter.milvus.opdispensers; import io.milvus.client.MilvusServiceClient; +import io.milvus.grpc.LoadState; import io.milvus.param.collection.GetLoadStateParam; import io.nosqlbench.adapter.milvus.MilvusDriverAdapter; import io.nosqlbench.adapter.milvus.MilvusAdapterUtils; import io.nosqlbench.adapter.milvus.ops.MilvusBaseOp; import io.nosqlbench.adapter.milvus.ops.MilvusGetLoadStateOp; import io.nosqlbench.adapters.api.templating.ParsedOp; +import io.nosqlbench.nb.api.errors.OpConfigError; +import java.time.Duration; +import java.time.temporal.ChronoUnit; import java.util.List; import java.util.Optional; import java.util.function.LongFunction; public class MilvusGetLoadStateOpDispenser extends MilvusBaseOpDispenser { + private Duration awaitTimeout = Duration.ZERO; + private Duration awaitInterval = Duration.of(10, ChronoUnit.SECONDS); + private LoadState awaitState = LoadState.UNRECOGNIZED; + public MilvusGetLoadStateOpDispenser(MilvusDriverAdapter adapter, ParsedOp op, LongFunction targetFunction) { super(adapter, op, targetFunction); + op.getOptionalStaticValue("await_timeout", Number.class) + .map(Number::doubleValue) + .ifPresent(v->this.awaitTimeout=Duration.of((long)(v*1000),ChronoUnit.MILLIS)); + op.getOptionalStaticValue("await_interval", Number.class) + .map(Number::doubleValue).ifPresent(v->this.awaitInterval=Duration.of((long)(v*1000),ChronoUnit.MILLIS)); + op.getOptionalStaticValue("await_state", String.class).ifPresent(s -> { + var spec = s.toLowerCase(); + for (LoadState value : LoadState.values()) { + if (value.name().toLowerCase().equals(spec) || value.name().toLowerCase().equals("loadstate" + spec)) { + this.awaitState = value; + break; + } + } + if (this.awaitState == null) { + throw new OpConfigError("Unrecognizable load state to await: " + spec); + } + }); + } @Override @@ -44,7 +70,7 @@ public class MilvusGetLoadStateOpDispenser extends MilvusBaseOpDispenser ebF = l -> GetLoadStateParam.newBuilder().withCollectionName(targetF.apply(l)); - ebF = op.enhanceFuncOptionally(ebF, List.of("database_name","database"),String.class, + ebF = op.enhanceFuncOptionally(ebF, List.of("database_name", "database"), String.class, GetLoadStateParam.Builder::withDatabaseName); Optional> partitionsF = op.getAsOptionalFunction("partition_name", String.class); @@ -54,6 +80,8 @@ public class MilvusGetLoadStateOpDispenser extends MilvusBaseOpDispenser finalEbF.apply(l).withPartitionNames(MilvusAdapterUtils.splitNames(pfunc.apply(l))); } + + final LongFunction lastF = ebF; return l -> lastF.apply(l).build(); } @@ -65,6 +93,12 @@ public class MilvusGetLoadStateOpDispenser extends MilvusBaseOpDispenser targetF ) { - return l -> new MilvusGetLoadStateOp(clientF.apply(l),paramF.apply(l)); + return l -> new MilvusGetLoadStateOp( + clientF.apply(l), + paramF.apply(l), + this.awaitState, + this.awaitTimeout, + this.awaitInterval + ); } } diff --git a/adapter-milvus/src/main/java/io/nosqlbench/adapter/milvus/opdispensers/MilvusListBulkInsertTasksOpDispenser.java b/adapter-milvus/src/main/java/io/nosqlbench/adapter/milvus/opdispensers/MilvusListBulkInsertTasksOpDispenser.java index 0dea6425e..c584519f5 100644 --- a/adapter-milvus/src/main/java/io/nosqlbench/adapter/milvus/opdispensers/MilvusListBulkInsertTasksOpDispenser.java +++ b/adapter-milvus/src/main/java/io/nosqlbench/adapter/milvus/opdispensers/MilvusListBulkInsertTasksOpDispenser.java @@ -41,7 +41,8 @@ public class MilvusListBulkInsertTasksOpDispenser extends MilvusBaseOpDispenser< ) { LongFunction ebF = l -> ListBulkInsertTasksParam.newBuilder().withCollectionName(targetF.apply(l)); - ebF = op.enhanceFuncOptionally(ebF,"limit",Integer.class,ListBulkInsertTasksParam.Builder::withLimit); + ebF = op.enhanceFuncOptionally(ebF, "limit", Number.class, + (ListBulkInsertTasksParam.Builder b, Number n) -> b.withLimit(n.intValue())); final LongFunction lastF = ebF; final LongFunction collectionParamF = l -> lastF.apply(l).build(); @@ -55,6 +56,6 @@ public class MilvusListBulkInsertTasksOpDispenser extends MilvusBaseOpDispenser< ParsedOp op, LongFunction targetF ) { - return l -> new MilvusListBulkInsertTasksOp(clientF.apply(l),paramF.apply(l)); + return l -> new MilvusListBulkInsertTasksOp(clientF.apply(l), paramF.apply(l)); } } diff --git a/adapter-milvus/src/main/java/io/nosqlbench/adapter/milvus/opdispensers/MilvusLoadCollectionOpDispenser.java b/adapter-milvus/src/main/java/io/nosqlbench/adapter/milvus/opdispensers/MilvusLoadCollectionOpDispenser.java index 8255b230f..a5490963d 100644 --- a/adapter-milvus/src/main/java/io/nosqlbench/adapter/milvus/opdispensers/MilvusLoadCollectionOpDispenser.java +++ b/adapter-milvus/src/main/java/io/nosqlbench/adapter/milvus/opdispensers/MilvusLoadCollectionOpDispenser.java @@ -43,16 +43,18 @@ public class MilvusLoadCollectionOpDispenser extends MilvusBaseOpDispenser ebF = l -> LoadCollectionParam.newBuilder().withCollectionName(targetF.apply(l)); - ebF = op.enhanceFuncOptionally(ebF,List.of("database_name","database"),String.class, + ebF = op.enhanceFuncOptionally(ebF, List.of("database_name", "database"), String.class, LoadCollectionParam.Builder::withDatabaseName); - ebF = op.enhanceFuncOptionally(ebF,"refresh",Boolean.class,LoadCollectionParam.Builder::withRefresh); - ebF = op.enhanceFuncOptionally(ebF,"sync_load",Boolean.class,LoadCollectionParam.Builder::withSyncLoad); - ebF = op.enhanceFuncOptionally(ebF,"replica_number",Integer.class,LoadCollectionParam.Builder::withReplicaNumber); - ebF = op.enhanceFuncOptionally(ebF,"resource_groups", List.class,LoadCollectionParam.Builder::withResourceGroups); - ebF = op.enhanceFuncOptionally(ebF,"sync_load_waiting_interval",Long.class,LoadCollectionParam.Builder::withSyncLoadWaitingInterval); - ebF = op.enhanceFuncOptionally(ebF,"sync_load_waiting_timeout",Long.class, - LoadCollectionParam.Builder::withSyncLoadWaitingTimeout); + ebF = op.enhanceFuncOptionally(ebF, "refresh", Boolean.class, LoadCollectionParam.Builder::withRefresh); + ebF = op.enhanceFuncOptionally(ebF, "sync_load", Boolean.class, LoadCollectionParam.Builder::withSyncLoad); + ebF = op.enhanceFuncOptionally(ebF, "replica_number", Number.class, + (LoadCollectionParam.Builder b, Number n) -> b.withReplicaNumber(n.intValue())); + ebF = op.enhanceFuncOptionally(ebF, "resource_groups", List.class, LoadCollectionParam.Builder::withResourceGroups); + ebF = op.enhanceFuncOptionally(ebF, "sync_load_waiting_interval", Number.class, + (LoadCollectionParam.Builder b, Number n) -> b.withSyncLoadWaitingInterval(n.longValue())); + ebF = op.enhanceFuncOptionally(ebF, "sync_load_waiting_timeout", Number.class, + (LoadCollectionParam.Builder b, Number n) -> b.withSyncLoadWaitingTimeout(n.longValue())); final LongFunction lastF = ebF; final LongFunction collectionParamF = l -> lastF.apply(l).build(); @@ -66,6 +68,6 @@ public class MilvusLoadCollectionOpDispenser extends MilvusBaseOpDispenser targetF ) { - return l -> new MilvusLoadCollectionOp(clientF.apply(l),paramF.apply(l)); + return l -> new MilvusLoadCollectionOp(clientF.apply(l), paramF.apply(l)); } } diff --git a/adapter-milvus/src/main/java/io/nosqlbench/adapter/milvus/opdispensers/MilvusLoadPartitionsOpDispenser.java b/adapter-milvus/src/main/java/io/nosqlbench/adapter/milvus/opdispensers/MilvusLoadPartitionsOpDispenser.java index d3898aa1a..2a7d86baf 100644 --- a/adapter-milvus/src/main/java/io/nosqlbench/adapter/milvus/opdispensers/MilvusLoadPartitionsOpDispenser.java +++ b/adapter-milvus/src/main/java/io/nosqlbench/adapter/milvus/opdispensers/MilvusLoadPartitionsOpDispenser.java @@ -50,22 +50,24 @@ public class MilvusLoadPartitionsOpDispenser extends MilvusBaseOpDispenser ebF = l -> LoadPartitionsParam.newBuilder().withCollectionName(targetF.apply(l)); - ebF = op.enhanceFunc(ebF, List.of("partition_names","partitions"), List.class, + ebF = op.enhanceFunc(ebF, List.of("partition_names", "partitions"), List.class, LoadPartitionsParam.Builder::withPartitionNames); ebF = op.enhanceFuncOptionally( ebF, "resource_groups", List.class, LoadPartitionsParam.Builder::withResourceGroups ); ebF = op.enhanceFuncOptionally( - ebF, List.of("database_name","database"), String.class, + ebF, List.of("database_name", "database"), String.class, LoadPartitionsParam.Builder::withDatabaseName ); ebF = op.enhanceFuncOptionally(ebF, "refresh", Boolean.class, LoadPartitionsParam.Builder::withRefresh); - ebF = op.enhanceFuncOptionally(ebF, "replica_number", Integer.class, LoadPartitionsParam.Builder::withReplicaNumber); - ebF = op.enhanceFuncOptionally(ebF,"sync_load",Boolean.class,LoadPartitionsParam.Builder::withSyncLoad); - ebF = op.enhanceFuncOptionally(ebF,"sync_load_waiting_interval",Long.class,LoadPartitionsParam.Builder::withSyncLoadWaitingInterval); - ebF = op.enhanceFuncOptionally(ebF,"sync_load_waiting_timeout",Long.class, - LoadPartitionsParam.Builder::withSyncLoadWaitingTimeout); + ebF = op.enhanceFuncOptionally(ebF, "replica_number", Number.class, + (LoadPartitionsParam.Builder b, Number n) -> b.withReplicaNumber(n.intValue())); + ebF = op.enhanceFuncOptionally(ebF, "sync_load", Boolean.class, LoadPartitionsParam.Builder::withSyncLoad); + ebF = op.enhanceFuncOptionally(ebF, "sync_load_waiting_interval", Number.class, + (LoadPartitionsParam.Builder b, Number n) -> b.withSyncLoadWaitingInterval(n.longValue())); + ebF = op.enhanceFuncOptionally(ebF, "sync_load_waiting_timeout", Number.class, + (LoadPartitionsParam.Builder b, Number n) -> b.withSyncLoadWaitingTimeout(n.longValue())); final LongFunction lastF = ebF; final LongFunction collectionParamF = l -> lastF.apply(l).build(); diff --git a/adapter-milvus/src/main/java/io/nosqlbench/adapter/milvus/opdispensers/MilvusQueryOpDispenser.java b/adapter-milvus/src/main/java/io/nosqlbench/adapter/milvus/opdispensers/MilvusQueryOpDispenser.java index b28be60ea..6bc087c7c 100644 --- a/adapter-milvus/src/main/java/io/nosqlbench/adapter/milvus/opdispensers/MilvusQueryOpDispenser.java +++ b/adapter-milvus/src/main/java/io/nosqlbench/adapter/milvus/opdispensers/MilvusQueryOpDispenser.java @@ -44,14 +44,14 @@ public class MilvusQueryOpDispenser extends MilvusBaseOpDispenser { LongFunction ebF = l -> QueryParam.newBuilder().withCollectionName(targetF.apply(l)); - ebF = op.enhanceFuncOptionally(ebF,List.of("partition_names","partitions"), List.class, + ebF = op.enhanceFuncOptionally(ebF, List.of("partition_names", "partitions"), List.class, QueryParam.Builder::withPartitionNames); - ebF = op.enhanceEnumOptionally(ebF,"consistency_level", ConsistencyLevelEnum.class, QueryParam.Builder::withConsistencyLevel); - ebF = op.enhanceFuncOptionally(ebF,"expr",String.class,QueryParam.Builder::withExpr); - ebF = op.enhanceFuncOptionally(ebF,"limit",Long.class,QueryParam.Builder::withLimit); - ebF = op.enhanceFuncOptionally(ebF,"offset",Long.class,QueryParam.Builder::withOffset); - ebF = op.enhanceFuncOptionally(ebF,"ignore_growing",Boolean.class,QueryParam.Builder::withIgnoreGrowing); - ebF = op.enhanceFuncOptionally(ebF,"out_fields",List.class,QueryParam.Builder::withOutFields); + ebF = op.enhanceEnumOptionally(ebF, "consistency_level", ConsistencyLevelEnum.class, QueryParam.Builder::withConsistencyLevel); + ebF = op.enhanceFuncOptionally(ebF, "expr", String.class, QueryParam.Builder::withExpr); + ebF = op.enhanceFuncOptionally(ebF, "limit", Number.class, (QueryParam.Builder b, Number n) -> b.withLimit(n.longValue())); + ebF = op.enhanceFuncOptionally(ebF, "offset", Number.class, (QueryParam.Builder b, Number n) -> b.withOffset(n.longValue())); + ebF = op.enhanceFuncOptionally(ebF, "ignore_growing", Boolean.class, QueryParam.Builder::withIgnoreGrowing); + ebF = op.enhanceFuncOptionally(ebF, "out_fields", List.class, QueryParam.Builder::withOutFields); final LongFunction lastF = ebF; final LongFunction collectionParamF = l -> lastF.apply(l).build(); @@ -65,6 +65,6 @@ public class MilvusQueryOpDispenser extends MilvusBaseOpDispenser { ParsedOp op, LongFunction targetF ) { - return l -> new MilvusQueryOp(clientF.apply(l),paramF.apply(l)); + return l -> new MilvusQueryOp(clientF.apply(l), paramF.apply(l)); } } diff --git a/adapter-milvus/src/main/java/io/nosqlbench/adapter/milvus/opdispensers/MilvusSearchOpDispenser.java b/adapter-milvus/src/main/java/io/nosqlbench/adapter/milvus/opdispensers/MilvusSearchOpDispenser.java index d2e5021da..f58d1622c 100644 --- a/adapter-milvus/src/main/java/io/nosqlbench/adapter/milvus/opdispensers/MilvusSearchOpDispenser.java +++ b/adapter-milvus/src/main/java/io/nosqlbench/adapter/milvus/opdispensers/MilvusSearchOpDispenser.java @@ -24,12 +24,8 @@ import io.nosqlbench.adapter.milvus.MilvusDriverAdapter; import io.nosqlbench.adapter.milvus.ops.MilvusBaseOp; import io.nosqlbench.adapter.milvus.ops.MilvusSearchOp; import io.nosqlbench.adapters.api.templating.ParsedOp; -import org.apache.logging.log4j.LogManager; -import org.apache.logging.log4j.Logger; -import java.util.ArrayList; import java.util.List; -import java.util.Map; import java.util.function.LongFunction; public class MilvusSearchOpDispenser extends MilvusBaseOpDispenser { @@ -53,9 +49,11 @@ public class MilvusSearchOpDispenser extends MilvusBaseOpDispenser ebF = op.enhanceEnumOptionally(ebF, "consistency_level", ConsistencyLevelEnum.class, SearchParam.Builder::withConsistencyLevel); ebF = op.enhanceFuncOptionally(ebF, "expr", String.class, SearchParam.Builder::withExpr); - ebF = op.enhanceDefaultFunc(ebF, "top_k", Integer.class, 100, SearchParam.Builder::withTopK); + ebF = op.enhanceDefaultFunc(ebF, "top_k", Number.class, 100, + (SearchParam.Builder b, Number n) -> b.withTopK(n.intValue())); ebF = op.enhanceEnumOptionally(ebF, "metric_type", MetricType.class, SearchParam.Builder::withMetricType); - ebF = op.enhanceFuncOptionally(ebF, "round_decimal", Integer.class, SearchParam.Builder::withRoundDecimal); + ebF = op.enhanceFuncOptionally(ebF, "round_decimal", Number.class, + (SearchParam.Builder b, Number n) -> b.withRoundDecimal(n.intValue())); ebF = op.enhanceFuncOptionally(ebF, "ignore_growing", Boolean.class, SearchParam.Builder::withIgnoreGrowing); ebF = op.enhanceFuncOptionally(ebF, "params", String.class, SearchParam.Builder::withParams); ebF = op.enhanceFunc(ebF, List.of("vector_field_name", "vector_field"), String.class, diff --git a/adapter-milvus/src/main/java/io/nosqlbench/adapter/milvus/ops/MilvusBaseOp.java b/adapter-milvus/src/main/java/io/nosqlbench/adapter/milvus/ops/MilvusBaseOp.java index d685fabcd..6eef97bec 100644 --- a/adapter-milvus/src/main/java/io/nosqlbench/adapter/milvus/ops/MilvusBaseOp.java +++ b/adapter-milvus/src/main/java/io/nosqlbench/adapter/milvus/ops/MilvusBaseOp.java @@ -17,6 +17,7 @@ package io.nosqlbench.adapter.milvus.ops; import io.milvus.client.MilvusServiceClient; +import io.milvus.param.R; import io.nosqlbench.adapters.api.activityimpl.uniform.flowtypes.CycleOp; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; @@ -50,6 +51,16 @@ public abstract class MilvusBaseOp implements CycleOp { try { Object result = applyOp(value); + if (result instanceof R r) { + var error = r.getException(); + if (error!=null) { + throw error; + } + } else { + logger.warn("Op '" + this.toString() + "' did not return a Result 'R' type." + + " Exception handling will be bypassed" + ); + } return result; } catch (Exception e) { if (e instanceof RuntimeException rte) { diff --git a/adapter-milvus/src/main/java/io/nosqlbench/adapter/milvus/ops/MilvusCreateCollectionOp.java b/adapter-milvus/src/main/java/io/nosqlbench/adapter/milvus/ops/MilvusCreateCollectionOp.java index 1d9fcc6ec..a8d2bddda 100644 --- a/adapter-milvus/src/main/java/io/nosqlbench/adapter/milvus/ops/MilvusCreateCollectionOp.java +++ b/adapter-milvus/src/main/java/io/nosqlbench/adapter/milvus/ops/MilvusCreateCollectionOp.java @@ -17,6 +17,8 @@ package io.nosqlbench.adapter.milvus.ops; import io.milvus.client.MilvusServiceClient; +import io.milvus.param.R; +import io.milvus.param.RpcStatus; import io.milvus.param.collection.CreateCollectionParam; import io.nosqlbench.adapters.api.templating.ParsedOp; @@ -33,6 +35,7 @@ public class MilvusCreateCollectionOp extends MilvusBaseOp collection = client.createCollection(request); + return collection; } } diff --git a/adapter-milvus/src/main/java/io/nosqlbench/adapter/milvus/ops/MilvusCreateDatabaseOp.java b/adapter-milvus/src/main/java/io/nosqlbench/adapter/milvus/ops/MilvusCreateDatabaseOp.java index 006070fad..5474f73b8 100644 --- a/adapter-milvus/src/main/java/io/nosqlbench/adapter/milvus/ops/MilvusCreateDatabaseOp.java +++ b/adapter-milvus/src/main/java/io/nosqlbench/adapter/milvus/ops/MilvusCreateDatabaseOp.java @@ -17,6 +17,8 @@ package io.nosqlbench.adapter.milvus.ops; import io.milvus.client.MilvusServiceClient; +import io.milvus.param.R; +import io.milvus.param.RpcStatus; import io.milvus.param.collection.CreateDatabaseParam; public class MilvusCreateDatabaseOp extends MilvusBaseOp { @@ -26,6 +28,7 @@ public class MilvusCreateDatabaseOp extends MilvusBaseOp { @Override public Object applyOp(long value) { - return client.createDatabase(request); + R database = client.createDatabase(request); + return database; } } diff --git a/adapter-milvus/src/main/java/io/nosqlbench/adapter/milvus/ops/MilvusDescribeCollectionOp.java b/adapter-milvus/src/main/java/io/nosqlbench/adapter/milvus/ops/MilvusDescribeCollectionOp.java index 50ed18e4c..07c3f6596 100644 --- a/adapter-milvus/src/main/java/io/nosqlbench/adapter/milvus/ops/MilvusDescribeCollectionOp.java +++ b/adapter-milvus/src/main/java/io/nosqlbench/adapter/milvus/ops/MilvusDescribeCollectionOp.java @@ -17,6 +17,8 @@ package io.nosqlbench.adapter.milvus.ops; import io.milvus.client.MilvusServiceClient; +import io.milvus.grpc.DescribeCollectionResponse; +import io.milvus.param.R; import io.milvus.param.collection.DescribeCollectionParam; public class MilvusDescribeCollectionOp extends MilvusBaseOp { @@ -26,6 +28,7 @@ public class MilvusDescribeCollectionOp extends MilvusBaseOp describeCollectionResponseR = client.describeCollection(request); + return describeCollectionResponseR; } } diff --git a/adapter-milvus/src/main/java/io/nosqlbench/adapter/milvus/ops/MilvusDescribeIndexOp.java b/adapter-milvus/src/main/java/io/nosqlbench/adapter/milvus/ops/MilvusDescribeIndexOp.java index 1e5ee5618..168ad87fb 100644 --- a/adapter-milvus/src/main/java/io/nosqlbench/adapter/milvus/ops/MilvusDescribeIndexOp.java +++ b/adapter-milvus/src/main/java/io/nosqlbench/adapter/milvus/ops/MilvusDescribeIndexOp.java @@ -17,15 +17,88 @@ package io.nosqlbench.adapter.milvus.ops; import io.milvus.client.MilvusServiceClient; +import io.milvus.grpc.DescribeIndexResponse; +import io.milvus.grpc.IndexDescription; +import io.milvus.param.R; import io.milvus.param.index.DescribeIndexParam; +import io.nosqlbench.adapters.api.activityimpl.uniform.flowtypes.Op; +import io.nosqlbench.adapters.api.activityimpl.uniform.flowtypes.OpGenerator; +import io.nosqlbench.adapters.api.scheduling.TimeoutPredicate; -public class MilvusDescribeIndexOp extends MilvusBaseOp { - public MilvusDescribeIndexOp(MilvusServiceClient client, DescribeIndexParam request) { +import java.time.Duration; +import java.util.ArrayList; +import java.util.List; + +public class MilvusDescribeIndexOp extends MilvusBaseOp implements OpGenerator { + private final Duration timeout; + private final Duration interval; + private final TimeoutPredicate timeoutPredicate; + private MilvusDescribeIndexOp nextOp; + private long lastAttemptAt = 0L; + + public MilvusDescribeIndexOp( + MilvusServiceClient client, + DescribeIndexParam request, + Duration timeout, + Duration interval + ) { super(client, request); + this.timeout = timeout; + this.interval = interval; + this.timeoutPredicate = TimeoutPredicate.of(p -> p>=100, timeout, interval, true); } @Override public Object applyOp(long value) { - return client.describeIndex(request); + nextOp = null; + timeoutPredicate.blockUntilNextInterval(); + + R describeIndexResponseR = client.describeIndex(request); + DescribeIndexResponse data = describeIndexResponseR.getData(); + + TimeoutPredicate.Result result = timeoutPredicate.test(getIndexStats(data).percent()); + String message = result.status().name() + " await state " + result.value() + " at time " + result.timeSummary(); + logger.info(message); + + if (result.isPending()) { + this.nextOp=this; + } + + return describeIndexResponseR; + } + + private IndexStats getIndexStats(DescribeIndexResponse data) { + var stats = new ArrayList(); + for (IndexDescription desc : data.getIndexDescriptionsList()) { + stats.add(new IndexStat(desc.getIndexName(), desc.getIndexedRows(), desc.getPendingIndexRows())); + } + return new IndexStats(stats); + } + + public static class IndexStats extends ArrayList { + public IndexStats(List stats) { + super(stats); + } + + public int percent() { + return stream().mapToInt(IndexStat::percent).min().orElse(0); + } + } + public record IndexStat( + String index_name, + long indexed_rows, + long pending_rows + ) { + public int percent() { + if (pending_rows == 0) { + return 100; + } + return (int) (100.0d * ((double) indexed_rows / (double) (indexed_rows + pending_rows))); + } + } + + @Override + public Op getNextOp() { + return nextOp; } } diff --git a/adapter-milvus/src/main/java/io/nosqlbench/adapter/milvus/ops/MilvusGetIndexBuildProgressOp.java b/adapter-milvus/src/main/java/io/nosqlbench/adapter/milvus/ops/MilvusGetIndexBuildProgressOp.java index b9407194e..a156fea5f 100644 --- a/adapter-milvus/src/main/java/io/nosqlbench/adapter/milvus/ops/MilvusGetIndexBuildProgressOp.java +++ b/adapter-milvus/src/main/java/io/nosqlbench/adapter/milvus/ops/MilvusGetIndexBuildProgressOp.java @@ -16,7 +16,12 @@ package io.nosqlbench.adapter.milvus.ops; +import com.google.protobuf.DescriptorProtos; +import com.google.protobuf.InvalidProtocolBufferException; +import com.google.protobuf.util.JsonFormat; import io.milvus.client.MilvusServiceClient; +import io.milvus.grpc.GetIndexBuildProgressResponse; +import io.milvus.param.R; import io.milvus.param.index.GetIndexBuildProgressParam; public class MilvusGetIndexBuildProgressOp extends MilvusBaseOp { @@ -26,6 +31,13 @@ public class MilvusGetIndexBuildProgressOp extends MilvusBaseOp indexBuildProgress = client.getIndexBuildProgress(request); + GetIndexBuildProgressResponse r = indexBuildProgress.getData(); + try { + String responseJson = JsonFormat.printer().print(r); + return responseJson; + } catch (InvalidProtocolBufferException e) { + throw new RuntimeException(e); + } } } diff --git a/adapter-milvus/src/main/java/io/nosqlbench/adapter/milvus/ops/MilvusGetLoadStateOp.java b/adapter-milvus/src/main/java/io/nosqlbench/adapter/milvus/ops/MilvusGetLoadStateOp.java index c76966d8d..0fb51f6d1 100644 --- a/adapter-milvus/src/main/java/io/nosqlbench/adapter/milvus/ops/MilvusGetLoadStateOp.java +++ b/adapter-milvus/src/main/java/io/nosqlbench/adapter/milvus/ops/MilvusGetLoadStateOp.java @@ -17,15 +17,55 @@ package io.nosqlbench.adapter.milvus.ops; import io.milvus.client.MilvusServiceClient; +import io.milvus.grpc.GetLoadStateResponse; +import io.milvus.grpc.LoadState; +import io.milvus.param.R; import io.milvus.param.collection.GetLoadStateParam; +import io.nosqlbench.adapter.milvus.exceptions.MilvusAwaitStateIncompleteError; +import io.nosqlbench.adapters.api.activityimpl.uniform.flowtypes.Op; +import io.nosqlbench.adapters.api.activityimpl.uniform.flowtypes.OpGenerator; +import io.nosqlbench.adapters.api.scheduling.TimeoutPredicate; -public class MilvusGetLoadStateOp extends MilvusBaseOp { - public MilvusGetLoadStateOp(MilvusServiceClient client, GetLoadStateParam request) { +import java.time.Duration; +import java.time.temporal.ChronoUnit; +import java.util.concurrent.locks.LockSupport; + +public class MilvusGetLoadStateOp extends MilvusBaseOp implements OpGenerator { + private final TimeoutPredicate timeoutPredicate; + private int tried; + private MilvusGetLoadStateOp nextOp; + private long lastAttemptAt = 0L; + + public MilvusGetLoadStateOp( + MilvusServiceClient client, + GetLoadStateParam request, + LoadState awaitState, + Duration timeout, + Duration interval + ) { super(client, request); + this.timeoutPredicate = TimeoutPredicate.of(s -> s==awaitState, timeout, interval, true); } @Override public Object applyOp(long value) { - return client.getLoadState(request); + this.nextOp = null; + timeoutPredicate.blockUntilNextInterval(); + R getLoadStateResponse = client.getLoadState(request); + TimeoutPredicate.Result result = timeoutPredicate.test(getLoadStateResponse.getData().getState()); + + String message = result.status().name() + " await state " + result.value() + " at time " + result.timeSummary(); + logger.info(message); + + if (result.status()== TimeoutPredicate.Status.pending) { + nextOp=this; + } + + return getLoadStateResponse; + } + + @Override + public Op getNextOp() { + return this.nextOp; } } diff --git a/adapter-milvus/src/main/java/io/nosqlbench/adapter/milvus/ops/MilvusInsertOp.java b/adapter-milvus/src/main/java/io/nosqlbench/adapter/milvus/ops/MilvusInsertOp.java index ad8bf03c2..e33894281 100644 --- a/adapter-milvus/src/main/java/io/nosqlbench/adapter/milvus/ops/MilvusInsertOp.java +++ b/adapter-milvus/src/main/java/io/nosqlbench/adapter/milvus/ops/MilvusInsertOp.java @@ -36,6 +36,7 @@ public class MilvusInsertOp extends MilvusBaseOp { @Override public R applyOp(long value) { - return client.insert(request); + R insert = client.insert(request); + return insert; } } diff --git a/adapter-milvus/src/main/java/io/nosqlbench/adapter/milvus/ops/MilvusLoadCollectionOp.java b/adapter-milvus/src/main/java/io/nosqlbench/adapter/milvus/ops/MilvusLoadCollectionOp.java index 4cc309e1a..35f7935a3 100644 --- a/adapter-milvus/src/main/java/io/nosqlbench/adapter/milvus/ops/MilvusLoadCollectionOp.java +++ b/adapter-milvus/src/main/java/io/nosqlbench/adapter/milvus/ops/MilvusLoadCollectionOp.java @@ -17,6 +17,8 @@ package io.nosqlbench.adapter.milvus.ops; import io.milvus.client.MilvusServiceClient; +import io.milvus.param.R; +import io.milvus.param.RpcStatus; import io.milvus.param.collection.LoadCollectionParam; public class MilvusLoadCollectionOp extends MilvusBaseOp { @@ -26,6 +28,7 @@ public class MilvusLoadCollectionOp extends MilvusBaseOp { @Override public Object applyOp(long value) { - return client.loadCollection(request); + R rpcStatusR = client.loadCollection(request); + return rpcStatusR; } } diff --git a/adapter-milvus/src/main/java/io/nosqlbench/adapter/milvus/resultwrappers/MVGetIndexBuildProgressRespones.java b/adapter-milvus/src/main/java/io/nosqlbench/adapter/milvus/resultwrappers/MVGetIndexBuildProgressRespones.java new file mode 100644 index 000000000..b253179c2 --- /dev/null +++ b/adapter-milvus/src/main/java/io/nosqlbench/adapter/milvus/resultwrappers/MVGetIndexBuildProgressRespones.java @@ -0,0 +1,43 @@ +/* + * Copyright (c) 2024 nosqlbench + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package io.nosqlbench.adapter.milvus.resultwrappers; + +import io.milvus.grpc.GetIndexBuildProgressResponse; + +public class MVGetIndexBuildProgressRespones { + private final GetIndexBuildProgressResponse r; + + public MVGetIndexBuildProgressRespones(GetIndexBuildProgressResponse r) { + this.r = r; + } + + public int getPercent() { + if (getTotalRows()==getIndexedRows()) { + return 100; + } + + double ratio = (double) getIndexedRows() / (double) getTotalRows(); + return (int) (ratio*100.0d); + } + + public long getTotalRows() { + return r.getTotalRows(); + } + public long getIndexedRows() { + return r.getIndexedRows(); + } +} diff --git a/adapter-mongodb/src/main/java/io/nosqlbench/adapter/mongodb/dispensers/MongoCommandOpDispenser.java b/adapter-mongodb/src/main/java/io/nosqlbench/adapter/mongodb/dispensers/MongoCommandOpDispenser.java index 2be65f599..2923b6620 100644 --- a/adapter-mongodb/src/main/java/io/nosqlbench/adapter/mongodb/dispensers/MongoCommandOpDispenser.java +++ b/adapter-mongodb/src/main/java/io/nosqlbench/adapter/mongodb/dispensers/MongoCommandOpDispenser.java @@ -60,7 +60,7 @@ public class MongoCommandOpDispenser extends BaseOpDispenser { } @Override - public Op apply(long cycle) { + public Op getOp(long cycle) { return mongoOpF.apply(cycle); } } diff --git a/adapter-mongodb/src/main/java/io/nosqlbench/adapter/mongodb/dispensers/MongoDbUpdateOpDispenser.java b/adapter-mongodb/src/main/java/io/nosqlbench/adapter/mongodb/dispensers/MongoDbUpdateOpDispenser.java index 0829fd2e9..82b5af140 100644 --- a/adapter-mongodb/src/main/java/io/nosqlbench/adapter/mongodb/dispensers/MongoDbUpdateOpDispenser.java +++ b/adapter-mongodb/src/main/java/io/nosqlbench/adapter/mongodb/dispensers/MongoDbUpdateOpDispenser.java @@ -49,7 +49,7 @@ public class MongoDbUpdateOpDispenser extends BaseOpDispenser { } @Override - public Op apply(long value) { + public Op getOp(long value) { Op op = opF.apply(value); return op; } diff --git a/adapter-pinecone/README.md b/adapter-pinecone/README.md new file mode 100644 index 000000000..42220fb95 --- /dev/null +++ b/adapter-pinecone/README.md @@ -0,0 +1,6 @@ +This driver has been disconnected from the main build and artifacts due to +it being abandoned by pinecone. + +If it is needed for anything in the future, a special build should be done +which disables other modules, as there are incompatible versions of grpc +between this module and others. diff --git a/adapter-pinecone/pom.xml b/adapter-pinecone/pom.xml index 91ae70b94..1587e792f 100644 --- a/adapter-pinecone/pom.xml +++ b/adapter-pinecone/pom.xml @@ -49,7 +49,7 @@ io.pinecone pinecone-client - 0.7.4 + 1.0.0-rc.1 diff --git a/adapter-pulsar/pom.xml b/adapter-pulsar/pom.xml index 711f82882..8ac7bdfac 100644 --- a/adapter-pulsar/pom.xml +++ b/adapter-pulsar/pom.xml @@ -34,7 +34,7 @@ - 3.2.0 + 3.2.1 @@ -49,6 +49,12 @@ org.apache.pulsar pulsar-client ${pulsar.version} + + + protobuf-java + com.google.protobuf + + diff --git a/adapter-pulsar/src/main/java/io/nosqlbench/adapter/pulsar/dispensers/AdminNamespaceOpDispenser.java b/adapter-pulsar/src/main/java/io/nosqlbench/adapter/pulsar/dispensers/AdminNamespaceOpDispenser.java index 2b6233a11..4d13dc0f3 100644 --- a/adapter-pulsar/src/main/java/io/nosqlbench/adapter/pulsar/dispensers/AdminNamespaceOpDispenser.java +++ b/adapter-pulsar/src/main/java/io/nosqlbench/adapter/pulsar/dispensers/AdminNamespaceOpDispenser.java @@ -37,7 +37,7 @@ public class AdminNamespaceOpDispenser extends PulsarAdminOpDispenser { } @Override - public AdminNamespaceOp apply(long cycle) { + public AdminNamespaceOp getOp(long cycle) { return new AdminNamespaceOp( pulsarAdapterMetrics, pulsarAdmin, diff --git a/adapter-pulsar/src/main/java/io/nosqlbench/adapter/pulsar/dispensers/AdminTenantOpDispenser.java b/adapter-pulsar/src/main/java/io/nosqlbench/adapter/pulsar/dispensers/AdminTenantOpDispenser.java index e05c1a588..cca2d9e67 100644 --- a/adapter-pulsar/src/main/java/io/nosqlbench/adapter/pulsar/dispensers/AdminTenantOpDispenser.java +++ b/adapter-pulsar/src/main/java/io/nosqlbench/adapter/pulsar/dispensers/AdminTenantOpDispenser.java @@ -43,7 +43,7 @@ public class AdminTenantOpDispenser extends PulsarAdminOpDispenser { } @Override - public AdminTenantOp apply(long cycle) { + public AdminTenantOp getOp(long cycle) { return new AdminTenantOp( pulsarAdapterMetrics, pulsarAdmin, diff --git a/adapter-pulsar/src/main/java/io/nosqlbench/adapter/pulsar/dispensers/AdminTopicOpDispenser.java b/adapter-pulsar/src/main/java/io/nosqlbench/adapter/pulsar/dispensers/AdminTopicOpDispenser.java index 219d22ff0..9e0d2b5c8 100644 --- a/adapter-pulsar/src/main/java/io/nosqlbench/adapter/pulsar/dispensers/AdminTopicOpDispenser.java +++ b/adapter-pulsar/src/main/java/io/nosqlbench/adapter/pulsar/dispensers/AdminTopicOpDispenser.java @@ -44,7 +44,7 @@ public class AdminTopicOpDispenser extends PulsarAdminOpDispenser { } @Override - public AdminTopicOp apply(long cycle) { + public AdminTopicOp getOp(long cycle) { return new AdminTopicOp( pulsarAdapterMetrics, diff --git a/adapter-pulsar/src/main/java/io/nosqlbench/adapter/pulsar/dispensers/MessageConsumerOpDispenser.java b/adapter-pulsar/src/main/java/io/nosqlbench/adapter/pulsar/dispensers/MessageConsumerOpDispenser.java index 8cc539550..76a017d3f 100644 --- a/adapter-pulsar/src/main/java/io/nosqlbench/adapter/pulsar/dispensers/MessageConsumerOpDispenser.java +++ b/adapter-pulsar/src/main/java/io/nosqlbench/adapter/pulsar/dispensers/MessageConsumerOpDispenser.java @@ -76,7 +76,7 @@ public class MessageConsumerOpDispenser extends PulsarClientOpDispenser { } @Override - public MessageConsumerOp apply(final long cycle) { + public MessageConsumerOp getOp(final long cycle) { return new MessageConsumerOp( this.pulsarAdapterMetrics, this.pulsarClient, diff --git a/adapter-pulsar/src/main/java/io/nosqlbench/adapter/pulsar/dispensers/MessageProducerOpDispenser.java b/adapter-pulsar/src/main/java/io/nosqlbench/adapter/pulsar/dispensers/MessageProducerOpDispenser.java index 63ffbd1ca..7bf6320df 100644 --- a/adapter-pulsar/src/main/java/io/nosqlbench/adapter/pulsar/dispensers/MessageProducerOpDispenser.java +++ b/adapter-pulsar/src/main/java/io/nosqlbench/adapter/pulsar/dispensers/MessageProducerOpDispenser.java @@ -56,7 +56,7 @@ public class MessageProducerOpDispenser extends PulsarClientOpDispenser { } @Override - public MessageProducerOp apply(long cycle) { + public MessageProducerOp getOp(long cycle) { return new MessageProducerOp( pulsarAdapterMetrics, pulsarClient, diff --git a/adapter-pulsar/src/main/java/io/nosqlbench/adapter/pulsar/dispensers/MessageReaderOpDispenser.java b/adapter-pulsar/src/main/java/io/nosqlbench/adapter/pulsar/dispensers/MessageReaderOpDispenser.java index 47ac52c88..55c06cee2 100644 --- a/adapter-pulsar/src/main/java/io/nosqlbench/adapter/pulsar/dispensers/MessageReaderOpDispenser.java +++ b/adapter-pulsar/src/main/java/io/nosqlbench/adapter/pulsar/dispensers/MessageReaderOpDispenser.java @@ -52,7 +52,7 @@ public class MessageReaderOpDispenser extends PulsarClientOpDispenser { } @Override - public MessageReaderOp apply(long cycle) { + public MessageReaderOp getOp(long cycle) { return new MessageReaderOp( pulsarAdapterMetrics, diff --git a/adapter-s4j/src/main/java/io/nosqlbench/adapter/s4j/dispensers/MessageConsumerOpDispenser.java b/adapter-s4j/src/main/java/io/nosqlbench/adapter/s4j/dispensers/MessageConsumerOpDispenser.java index e4ebd1d00..f028cef01 100644 --- a/adapter-s4j/src/main/java/io/nosqlbench/adapter/s4j/dispensers/MessageConsumerOpDispenser.java +++ b/adapter-s4j/src/main/java/io/nosqlbench/adapter/s4j/dispensers/MessageConsumerOpDispenser.java @@ -122,7 +122,7 @@ public class MessageConsumerOpDispenser extends S4JBaseOpDispenser { } @Override - public MessageConsumerOp apply(long cycle) { + public MessageConsumerOp getOp(long cycle) { S4JJMSContextWrapper s4JJMSContextWrapper = getS4jJmsContextWrapper(cycle, this.combinedS4jConfigObjMap); JMSContext jmsContext = s4JJMSContextWrapper.getJmsContext(); boolean commitTransact = super.commitTransaction(txnBatchNum, jmsContext.getSessionMode(), cycle); diff --git a/adapter-s4j/src/main/java/io/nosqlbench/adapter/s4j/dispensers/MessageProducerOpDispenser.java b/adapter-s4j/src/main/java/io/nosqlbench/adapter/s4j/dispensers/MessageProducerOpDispenser.java index ebbda3a75..584ebca04 100644 --- a/adapter-s4j/src/main/java/io/nosqlbench/adapter/s4j/dispensers/MessageProducerOpDispenser.java +++ b/adapter-s4j/src/main/java/io/nosqlbench/adapter/s4j/dispensers/MessageProducerOpDispenser.java @@ -269,7 +269,7 @@ public class MessageProducerOpDispenser extends S4JBaseOpDispenser { } @Override - public MessageProducerOp apply(long cycle) { + public MessageProducerOp getOp(long cycle) { String destName = destNameStrFunc.apply(cycle); String jmsMsgHeaderRawJsonStr = msgHeaderRawJsonStrFunc.apply(cycle); String jmsMsgPropertyRawJsonStr = msgPropRawJsonStrFunc.apply(cycle); diff --git a/adapter-stdout/src/main/java/io/nosqlbench/adapter/stdout/StdoutOpDispenser.java b/adapter-stdout/src/main/java/io/nosqlbench/adapter/stdout/StdoutOpDispenser.java index 5d08f151f..66f77d8f0 100644 --- a/adapter-stdout/src/main/java/io/nosqlbench/adapter/stdout/StdoutOpDispenser.java +++ b/adapter-stdout/src/main/java/io/nosqlbench/adapter/stdout/StdoutOpDispenser.java @@ -37,7 +37,7 @@ public class StdoutOpDispenser extends BaseOpDispenser { } @Override - public StdoutOp apply(long value) { + public StdoutOp getOp(long value) { StdoutSpace ctx = ctxfunc.apply(value); String output = outFunction.apply(value); return new StdoutOp(ctx,output); diff --git a/adapter-tcp/src/main/java/io/nosqlbench/adapter/tcpclient/TcpClientOpDispenser.java b/adapter-tcp/src/main/java/io/nosqlbench/adapter/tcpclient/TcpClientOpDispenser.java index 815ed2336..3e74578a9 100644 --- a/adapter-tcp/src/main/java/io/nosqlbench/adapter/tcpclient/TcpClientOpDispenser.java +++ b/adapter-tcp/src/main/java/io/nosqlbench/adapter/tcpclient/TcpClientOpDispenser.java @@ -35,7 +35,7 @@ public class TcpClientOpDispenser extends BaseOpDispenser org.scala-lang scala-library - 2.13.12 + 2.13.13 diff --git a/adapters-api/src/main/java/io/nosqlbench/adapters/api/activityimpl/BaseOpDispenser.java b/adapters-api/src/main/java/io/nosqlbench/adapters/api/activityimpl/BaseOpDispenser.java index f3de53694..8a7f45d2a 100644 --- a/adapters-api/src/main/java/io/nosqlbench/adapters/api/activityimpl/BaseOpDispenser.java +++ b/adapters-api/src/main/java/io/nosqlbench/adapters/api/activityimpl/BaseOpDispenser.java @@ -226,6 +226,9 @@ public abstract class BaseOpDispenser extends NBBaseComponent i return this.labels; } - - + @Override + public final T apply(long value) { + T op = getOp(value); + return op; + } } diff --git a/adapters-api/src/main/java/io/nosqlbench/adapters/api/activityimpl/OpDispenser.java b/adapters-api/src/main/java/io/nosqlbench/adapters/api/activityimpl/OpDispenser.java index 86025e429..57ff77e0e 100644 --- a/adapters-api/src/main/java/io/nosqlbench/adapters/api/activityimpl/OpDispenser.java +++ b/adapters-api/src/main/java/io/nosqlbench/adapters/api/activityimpl/OpDispenser.java @@ -82,7 +82,7 @@ public interface OpDispenser extends LongFunction, OpResultTracker { * @return an executable operation */ - T apply(long value); + T getOp(long value); CycleFunction getVerifier(); diff --git a/adapters-api/src/main/java/io/nosqlbench/adapters/api/activityimpl/uniform/DryRunOp.java b/adapters-api/src/main/java/io/nosqlbench/adapters/api/activityimpl/uniform/opwrappers/DryRunOp.java similarity index 89% rename from adapters-api/src/main/java/io/nosqlbench/adapters/api/activityimpl/uniform/DryRunOp.java rename to adapters-api/src/main/java/io/nosqlbench/adapters/api/activityimpl/uniform/opwrappers/DryRunOp.java index 9b5bdabd6..a5775f10a 100644 --- a/adapters-api/src/main/java/io/nosqlbench/adapters/api/activityimpl/uniform/DryRunOp.java +++ b/adapters-api/src/main/java/io/nosqlbench/adapters/api/activityimpl/uniform/opwrappers/DryRunOp.java @@ -1,5 +1,5 @@ /* - * Copyright (c) 2022-2023 nosqlbench + * Copyright (c) 2022-2024 nosqlbench * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -14,7 +14,7 @@ * limitations under the License. */ -package io.nosqlbench.adapters.api.activityimpl.uniform; +package io.nosqlbench.adapters.api.activityimpl.uniform.opwrappers; import io.nosqlbench.adapters.api.activityimpl.uniform.flowtypes.Op; import io.nosqlbench.adapters.api.activityimpl.uniform.flowtypes.RunnableOp; diff --git a/adapters-api/src/main/java/io/nosqlbench/adapters/api/activityimpl/uniform/DryRunOpDispenserWrapper.java b/adapters-api/src/main/java/io/nosqlbench/adapters/api/activityimpl/uniform/opwrappers/DryRunOpDispenserWrapper.java similarity index 82% rename from adapters-api/src/main/java/io/nosqlbench/adapters/api/activityimpl/uniform/DryRunOpDispenserWrapper.java rename to adapters-api/src/main/java/io/nosqlbench/adapters/api/activityimpl/uniform/opwrappers/DryRunOpDispenserWrapper.java index 13bee89fa..55a514d38 100644 --- a/adapters-api/src/main/java/io/nosqlbench/adapters/api/activityimpl/uniform/DryRunOpDispenserWrapper.java +++ b/adapters-api/src/main/java/io/nosqlbench/adapters/api/activityimpl/uniform/opwrappers/DryRunOpDispenserWrapper.java @@ -1,5 +1,5 @@ /* - * Copyright (c) 2022-2023 nosqlbench + * Copyright (c) 2022-2024 nosqlbench * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -14,10 +14,11 @@ * limitations under the License. */ -package io.nosqlbench.adapters.api.activityimpl.uniform; +package io.nosqlbench.adapters.api.activityimpl.uniform.opwrappers; import io.nosqlbench.adapters.api.activityimpl.BaseOpDispenser; import io.nosqlbench.adapters.api.activityimpl.OpDispenser; +import io.nosqlbench.adapters.api.activityimpl.uniform.DriverAdapter; import io.nosqlbench.adapters.api.activityimpl.uniform.flowtypes.Op; import io.nosqlbench.adapters.api.templating.ParsedOp; @@ -30,8 +31,8 @@ public class DryRunOpDispenserWrapper extends BaseOpDispenser { this.realDispenser = realDispenser; } @Override - public DryRunOp apply(long cycle) { - Op op = realDispenser.apply(cycle); + public DryRunOp getOp(long cycle) { + Op op = realDispenser.getOp(cycle); return new DryRunOp(op); } } diff --git a/adapters-api/src/main/java/io/nosqlbench/adapters/api/activityimpl/uniform/EmitterOp.java b/adapters-api/src/main/java/io/nosqlbench/adapters/api/activityimpl/uniform/opwrappers/EmitterOp.java similarity index 90% rename from adapters-api/src/main/java/io/nosqlbench/adapters/api/activityimpl/uniform/EmitterOp.java rename to adapters-api/src/main/java/io/nosqlbench/adapters/api/activityimpl/uniform/opwrappers/EmitterOp.java index 9abbda81a..8e03fa81c 100644 --- a/adapters-api/src/main/java/io/nosqlbench/adapters/api/activityimpl/uniform/EmitterOp.java +++ b/adapters-api/src/main/java/io/nosqlbench/adapters/api/activityimpl/uniform/opwrappers/EmitterOp.java @@ -1,5 +1,5 @@ /* - * Copyright (c) 2022-2023 nosqlbench + * Copyright (c) 2022-2024 nosqlbench * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -14,7 +14,7 @@ * limitations under the License. */ -package io.nosqlbench.adapters.api.activityimpl.uniform; +package io.nosqlbench.adapters.api.activityimpl.uniform.opwrappers; import io.nosqlbench.adapters.api.activityimpl.uniform.flowtypes.CycleOp; diff --git a/adapters-api/src/main/java/io/nosqlbench/adapters/api/activityimpl/uniform/EmitterOpDispenserWrapper.java b/adapters-api/src/main/java/io/nosqlbench/adapters/api/activityimpl/uniform/opwrappers/EmitterOpDispenserWrapper.java similarity index 82% rename from adapters-api/src/main/java/io/nosqlbench/adapters/api/activityimpl/uniform/EmitterOpDispenserWrapper.java rename to adapters-api/src/main/java/io/nosqlbench/adapters/api/activityimpl/uniform/opwrappers/EmitterOpDispenserWrapper.java index c82c5b1a9..292e77697 100644 --- a/adapters-api/src/main/java/io/nosqlbench/adapters/api/activityimpl/uniform/EmitterOpDispenserWrapper.java +++ b/adapters-api/src/main/java/io/nosqlbench/adapters/api/activityimpl/uniform/opwrappers/EmitterOpDispenserWrapper.java @@ -1,5 +1,5 @@ /* - * Copyright (c) 2022-2023 nosqlbench + * Copyright (c) 2022-2024 nosqlbench * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -14,10 +14,11 @@ * limitations under the License. */ -package io.nosqlbench.adapters.api.activityimpl.uniform; +package io.nosqlbench.adapters.api.activityimpl.uniform.opwrappers; import io.nosqlbench.adapters.api.activityimpl.BaseOpDispenser; import io.nosqlbench.adapters.api.activityimpl.OpDispenser; +import io.nosqlbench.adapters.api.activityimpl.uniform.DriverAdapter; import io.nosqlbench.adapters.api.activityimpl.uniform.flowtypes.CycleOp; import io.nosqlbench.adapters.api.activityimpl.uniform.flowtypes.Op; import io.nosqlbench.adapters.api.templating.ParsedOp; @@ -31,8 +32,8 @@ public class EmitterOpDispenserWrapper extends BaseOpDispenser { this.realDispenser = realDispenser; } @Override - public EmitterOp apply(long cycle) { - CycleOp cycleOp = realDispenser.apply(cycle); + public EmitterOp getOp(long cycle) { + CycleOp cycleOp = realDispenser.getOp(cycle); return new EmitterOp(cycleOp); } } diff --git a/adapters-api/src/main/java/io/nosqlbench/adapters/api/activityimpl/uniform/opwrappers/PollingOp.java b/adapters-api/src/main/java/io/nosqlbench/adapters/api/activityimpl/uniform/opwrappers/PollingOp.java new file mode 100644 index 000000000..2cce02b77 --- /dev/null +++ b/adapters-api/src/main/java/io/nosqlbench/adapters/api/activityimpl/uniform/opwrappers/PollingOp.java @@ -0,0 +1,66 @@ +/* + * Copyright (c) 2024 nosqlbench + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package io.nosqlbench.adapters.api.activityimpl.uniform.opwrappers; + +import io.nosqlbench.adapters.api.activityimpl.uniform.flowtypes.CycleOp; +import io.nosqlbench.adapters.api.activityimpl.uniform.flowtypes.Op; +import io.nosqlbench.adapters.api.activityimpl.uniform.flowtypes.OpGenerator; +import io.nosqlbench.adapters.api.evalctx.CycleFunction; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; + +import java.util.function.Predicate; + +public class PollingOp implements CycleOp, OpGenerator { + private final static Logger logger = LogManager.getLogger(PollingOp.class); + + private final CycleOp innerOp; + private final CycleFunction untilCondition; + private PollingOp nextOp = null; + + public PollingOp(CycleOp innerOp, CycleFunction untilCondition) { + this.innerOp = innerOp; + this.untilCondition = untilCondition; + } + @Override + public synchronized T apply(long value) { + T result = this.innerOp.apply(value); + untilCondition.setVariable("result",result); + boolean conditionIsMet = untilCondition.apply(value); + if (conditionIsMet) { + onConditionMet(result); + this.nextOp=null; + } else { + this.nextOp=this; + onConditionUnmet(result); + } + return result; + } + + @Override + public synchronized Op getNextOp() { + return nextOp; + } + + protected void onConditionMet(T value) { + logger.debug("for op " + this + ": condition MET for result " + value); + } + + protected void onConditionUnmet(T value) { + logger.debug("for op " + this + ": condition UNMET for result " + value); + } +} diff --git a/adapters-api/src/main/java/io/nosqlbench/adapters/api/scheduling/TimeoutPredicate.java b/adapters-api/src/main/java/io/nosqlbench/adapters/api/scheduling/TimeoutPredicate.java new file mode 100644 index 000000000..62b7f40f2 --- /dev/null +++ b/adapters-api/src/main/java/io/nosqlbench/adapters/api/scheduling/TimeoutPredicate.java @@ -0,0 +1,172 @@ +/* + * Copyright (c) 2024 nosqlbench + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package io.nosqlbench.adapters.api.scheduling; + +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; + +import java.time.Duration; +import java.time.Instant; +import java.time.temporal.ChronoUnit; +import java.util.Objects; +import java.util.concurrent.locks.LockSupport; +import java.util.function.Predicate; +import java.util.function.Supplier; + +public class TimeoutPredicate { + private final static Logger logger = LogManager.getLogger(TimeoutPredicate.class); + + private final Supplier source; + private final Predicate predicate; + private final long timeoutNanos; + private final long blockingNanos; + private final boolean rethrow; + private long pulseTime = 0L; + private long startNanos; + private long endNanos; + + public static TimeoutPredicate of( + Predicate o, + Duration timeout, + Duration interval, + boolean b + ) { + return new TimeoutPredicate<>(o, timeout, interval, b); + } + + public static TimeoutPredicate of( + Supplier source, Predicate predicate, Duration timeout, Duration interval, boolean rethrow + ) { + return new TimeoutPredicate<>(source, predicate, timeout, interval, rethrow); + } + + private TimeoutPredicate( + Predicate predicate, + Duration timeout, + Duration minBlockingInterval, + boolean rethrow + ) { + this(null, predicate, timeout, minBlockingInterval, rethrow); + } + + private TimeoutPredicate( + Supplier source, + Predicate predicate, + Duration timeout, + Duration minBlockingInterval, + boolean rethrow + ) { + this.source = source; + this.predicate = Objects.requireNonNull(predicate); + + timeoutNanos = Objects.requireNonNull(timeout).toNanos(); + blockingNanos = Objects.requireNonNull(minBlockingInterval).toNanos(); + startNanos = System.nanoTime(); + endNanos = startNanos + timeoutNanos; + this.rethrow = rethrow; + } + + + public Result test(T value) { + long totalNanos = blockUntilNextInterval(); + + boolean isComplete = false; + try { + isComplete = predicate.test(value); + long remaining = endNanos - pulseTime; + if (isComplete) { + return new Result<>(value, Status.complete, totalNanos, timeoutNanos, null); + } else if (remaining > 0) { + return new Result<>(value, Status.pending, totalNanos, timeoutNanos, null); + } else { + return new Result<>(value, Status.incomplete, totalNanos, timeoutNanos, null); + } + } catch (Exception e) { + logger.error("exception caught while evaluating timeout predicate:" + e, e); + if (rethrow) throw new RuntimeException(e); + return new Result<>(value, Status.error, totalNanos, timeoutNanos, new RuntimeException(e)); + } + } + + public Result test() { + Objects.requireNonNull(source); + T value = source.get(); + return test(value); + } + + public long blockUntilNextInterval() { + if (pulseTime == 0L) { // first try has no delay + pulseTime = System.nanoTime(); + return 0L; + } + + long now = System.nanoTime(); + long targetNanos = Math.max(now, Math.min(endNanos, pulseTime + blockingNanos)); + while (now <= targetNanos) { + LockSupport.parkNanos(targetNanos - now); + now = System.nanoTime(); + } + pulseTime = now; + long currentTime = pulseTime - startNanos; + return currentTime; + } + + public static enum Status { + complete, + pending, + incomplete, + error + } + + public static record Result( + T value, + Status status, + long duration_ns, + long timeout_ns, + RuntimeException exception + ) { + public String timeSummary() { + return statusDuration() + " / " + + timeoutDuration(); + } + + public Duration statusDuration() { + return Duration.of(duration_ns - (duration_ns % 1_000_000), ChronoUnit.NANOS); + } + + public Duration timeoutDuration() { + return Duration.of(timeout_ns, ChronoUnit.NANOS); + } + + public boolean isComplete() { + return status==Status.complete; + } + public boolean isIncomplete() { + return status==Status.incomplete; + } + public boolean isPending() { + return status==Status.pending; + } + } + + @Override + public String toString() { + return "timeout:" + Duration.of(this.endNanos - this.startNanos, ChronoUnit.NANOS) + + ", current:" + Duration.of((this.endNanos - this.pulseTime), ChronoUnit.NANOS) + + ", interval:" + Duration.of(this.blockingNanos, ChronoUnit.NANOS); + } +} diff --git a/adapters-api/src/main/java/io/nosqlbench/adapters/api/templating/StrInterpolator.java b/adapters-api/src/main/java/io/nosqlbench/adapters/api/templating/StrInterpolator.java index b521a59f7..1c6fe9100 100644 --- a/adapters-api/src/main/java/io/nosqlbench/adapters/api/templating/StrInterpolator.java +++ b/adapters-api/src/main/java/io/nosqlbench/adapters/api/templating/StrInterpolator.java @@ -117,14 +117,15 @@ public class StrInterpolator implements Function { // } accesses.put(key,value); - logger.debug("Template parameter '" + key + "' applied as '" + value + "'"); + logger.trace("Template parameter '" + key + "' applied as '" + value + "'"); + // TODO summarize these to how many times return value; } public Map checkpointAccesses() { LinkedHashMap accesses = new LinkedHashMap<>(this.accesses); - logger.debug("removed template params after applying:" + accesses); + logger.trace("removed template params after applying:" + accesses); this.accesses.clear(); return accesses; diff --git a/adapters-api/src/test/java/io/nosqlbench/adapters/api/scheduling/TimeoutPredicateTest.java b/adapters-api/src/test/java/io/nosqlbench/adapters/api/scheduling/TimeoutPredicateTest.java new file mode 100644 index 000000000..24523ab60 --- /dev/null +++ b/adapters-api/src/test/java/io/nosqlbench/adapters/api/scheduling/TimeoutPredicateTest.java @@ -0,0 +1,105 @@ +/* + * Copyright (c) 2024 nosqlbench + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package io.nosqlbench.adapters.api.scheduling; + +import org.junit.jupiter.api.Test; + +import java.time.Duration; + +import static org.assertj.core.api.Assertions.assertThat; + +public class TimeoutPredicateTest { + + @Test + public void testNeverCompletablePreciate() { + int interval=10; + int timeout=500; + + TimeoutPredicate wontMakeIt = TimeoutPredicate.of( + ()->false, + l -> l, + Duration.ofMillis(timeout), + Duration.ofMillis(interval), + true + ); + + TimeoutPredicate.Result resultNow = wontMakeIt.test(); + assertThat(resultNow.duration_ns()).isEqualTo(0L); + assertThat(resultNow.value()).isFalse(); + assertThat(resultNow.status()).isEqualTo(TimeoutPredicate.Status.pending); + + resultNow = wontMakeIt.test(); + assertThat(resultNow.duration_ns()).isBetween(10*1_000_000L,50*1_000_000L); + assertThat(resultNow.value()).isFalse(); + assertThat(resultNow.status()).isEqualTo(TimeoutPredicate.Status.pending); + + while (resultNow.status()== TimeoutPredicate.Status.pending) { + resultNow=wontMakeIt.test(); + } + + assertThat(resultNow.status()).isEqualTo(TimeoutPredicate.Status.incomplete); + + } + + @Test + public void testImmediatelyCompletablePreciate() { + int interval=10; + int timeout=5000; + TimeoutPredicate canMakeIt = TimeoutPredicate.of( + ()->true, + l -> l, + Duration.ofMillis(timeout), + Duration.ofMillis(interval), + true + ); + + TimeoutPredicate.Result resultNow = canMakeIt.test(); + assertThat(resultNow.duration_ns()).isEqualTo(0L); + assertThat(resultNow.value()).isTrue(); + assertThat(resultNow.status()).isEqualTo(TimeoutPredicate.Status.complete); + + } + + @Test + public void testEventuallyCompletePredicate() { + + int interval=250; + int timeout=5000; + long now = System.currentTimeMillis(); + long inASec = now+1000; + TimeoutPredicate canMakeIt = TimeoutPredicate.of( + System::currentTimeMillis, + l -> l>inASec, + Duration.ofMillis(timeout), + Duration.ofMillis(interval), + true + ); + + TimeoutPredicate.Result result = canMakeIt.test(); + System.out.println(result); + + while (result.status()== TimeoutPredicate.Status.pending) { +// canMakeIt.blockUntilNextInterval(); + result=canMakeIt.test(); + System.out.println(canMakeIt); + System.out.println(result); + } + + assertThat(result.status()).isEqualTo(TimeoutPredicate.Status.complete); + } + +} diff --git a/docsys/pom.xml b/docsys/pom.xml index cc0b87df4..2b597bf7e 100644 --- a/docsys/pom.xml +++ b/docsys/pom.xml @@ -88,7 +88,7 @@ org.apache.commons commons-compress - 1.26.0 + 1.26.1 @@ -100,18 +100,18 @@ com.fasterxml.jackson.jaxrs jackson-jaxrs-json-provider - 2.16.1 + 2.17.0 com.sun.xml.bind jaxb-core - 4.0.4 + 4.0.5 com.sun.xml.bind jaxb-impl - 4.0.4 + 4.0.5 diff --git a/engine-cli/src/main/java/io/nosqlbench/engine/cli/NBCLI.java b/engine-cli/src/main/java/io/nosqlbench/engine/cli/NBCLI.java index 353e4518d..eca8b8c58 100644 --- a/engine-cli/src/main/java/io/nosqlbench/engine/cli/NBCLI.java +++ b/engine-cli/src/main/java/io/nosqlbench/engine/cli/NBCLI.java @@ -49,6 +49,7 @@ import io.nosqlbench.engine.core.logging.NBLoggerConfig; import io.nosqlbench.engine.core.metadata.MarkdownFinder; import io.nosqlbench.nb.annotations.Service; import io.nosqlbench.nb.annotations.ServiceSelector; +import io.nosqlbench.nb.api.nbio.ResolverForNBIOCache; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.apache.logging.log4j.core.config.ConfigurationFactory; @@ -220,6 +221,26 @@ public class NBCLI implements Function, NBLabeledElement { NBCLI.logger = LogManager.getLogger("NBCLI"); NBIO.addGlobalIncludes(options.wantsIncludes()); + NBIO.setUseNBIOCache(options.wantsToUseNBIOCache()); + if(options.wantsToUseNBIOCache()) { + logger.info(() -> "Configuring options for NBIO Cache"); + logger.info(() -> "Setting NBIO Cache Force Update to " + options.wantsNbioCacheForceUpdate()); + ResolverForNBIOCache.setForceUpdate(options.wantsNbioCacheForceUpdate()); + logger.info(() -> "Setting NBIO Cache Verify Checksum to " + options.wantsNbioCacheVerify()); + ResolverForNBIOCache.setVerifyChecksum(options.wantsNbioCacheVerify()); + if (options.getNbioCacheDir() != null) { + logger.info(() -> "Setting NBIO Cache directory to " + options.getNbioCacheDir()); + ResolverForNBIOCache.setCacheDir(options.getNbioCacheDir()); + } + if (options.getNbioCacheMaxRetries() != null) { + try { + ResolverForNBIOCache.setMaxRetries(Integer.parseInt(options.getNbioCacheMaxRetries())); + logger.info(() -> "Setting NBIO Cache max retries to " + options.getNbioCacheMaxRetries()); + } catch (NumberFormatException e) { + logger.error("Invalid value for nbio-cache-max-retries: " + options.getNbioCacheMaxRetries()); + } + } + } if (options.wantsBasicHelp()) { System.out.println(this.loadHelpFile("basic.md")); @@ -381,16 +402,6 @@ public class NBCLI implements Function, NBLabeledElement { // reporters.start(10, options.getReportInterval()); // } // -// for ( -// final LoggerConfigData histoLogger : options.getHistoLoggerConfigs()) -// ActivityMetrics.addHistoLogger(sessionName, histoLogger.pattern, histoLogger.file, histoLogger.interval); -// for ( -// final LoggerConfigData statsLogger : options.getStatsLoggerConfigs()) -// ActivityMetrics.addStatsLogger(sessionName, statsLogger.pattern, statsLogger.file, statsLogger.interval); -// for ( -// final LoggerConfigData classicConfigs : options.getClassicHistoConfigs()) -// ActivityMetrics.addClassicHistos(sessionName, classicConfigs.pattern, classicConfigs.file, classicConfigs.interval); -// // if (options.getConsoleLogLevel().isGreaterOrEqualTo(NBLogLevel.WARN)) { // options.setWantsStackTraces(true); // NBCLI.logger.debug(() -> "enabling stack traces since log level is " + options.getConsoleLogLevel()); @@ -446,7 +457,12 @@ public class NBCLI implements Function, NBLabeledElement { } session.create().pushReporter(uri, intervalMs, NBLabels.forKV()); }); - + for (final NBCLIOptions.LoggerConfigData histoLogger : options.getHistoLoggerConfigs()) { + session.create().histoLogger(sessionName, histoLogger.pattern, histoLogger.file, histoLogger.millis); + } + for (final NBCLIOptions.LoggerConfigData statsLogger : options.getStatsLoggerConfigs()) { + session.create().histoStatsLogger(sessionName, statsLogger.pattern, statsLogger.file, statsLogger.millis); + } ExecutionResult sessionResult = session.apply(options.getCommands()); logger.info(sessionResult); diff --git a/engine-cli/src/main/java/io/nosqlbench/engine/cli/NBCLIOptions.java b/engine-cli/src/main/java/io/nosqlbench/engine/cli/NBCLIOptions.java index 65cd3c61d..dd2970ee4 100644 --- a/engine-cli/src/main/java/io/nosqlbench/engine/cli/NBCLIOptions.java +++ b/engine-cli/src/main/java/io/nosqlbench/engine/cli/NBCLIOptions.java @@ -121,7 +121,8 @@ public class NBCLIOptions { private static final String GRAPHITE_LOG_LEVEL = "--graphite-log-level"; private static final String REPORT_CSV_TO = "--report-csv-to"; private static final String REPORT_SUMMARY_TO = "--report-summary-to"; - private static final String REPORT_SUMMARY_TO_DEFAULT = "stdout:60,_LOGS_/_SESSION__summary.txt"; + private static final String SUMMARY = "--summary"; + private static final String REPORT_SUMMARY_TO_DEFAULT = "_LOGS_/_SESSION__summary.txt"; private static final String PROGRESS = "--progress"; private static final String WITH_LOGGING_PATTERN = "--with-logging-pattern"; private static final String LOGGING_PATTERN = "--logging-pattern"; @@ -136,6 +137,11 @@ public class NBCLIOptions { private static final String DEFAULT_CONSOLE_PATTERN = "TERSE"; private static final String DEFAULT_LOGFILE_PATTERN = "VERBOSE"; private final static String ENABLE_DEDICATED_VERIFICATION_LOGGER = "--enable-dedicated-verification-logging"; + private final static String USE_NBIO_CACHE = "--use-nbio-cache"; + private final static String NBIO_CACHE_FORCE_UPDATE = "--nbio-cache-force-update"; + private final static String NBIO_CACHE_NO_VERIFY = "--nbio-cache-no-verify"; + private final static String NBIO_CACHE_DIR = "--nbio-cache-dir"; + private final static String NBIO_CACHE_MAX_RETRIES = "--nbio-cache-max-retries"; // private static final String DEFAULT_CONSOLE_LOGGING_PATTERN = "%d{HH:mm:ss.SSS} [%thread] %-5level %logger{36} - %msg%n"; @@ -203,8 +209,13 @@ public class NBCLIOptions { private boolean wantsConsoleMetrics = true; private String annotateLabelSpec = ""; private String metricsLabelSpec = ""; - private String wantsToCatResource =""; + private String wantsToCatResource = ""; private long heartbeatIntervalMs = 10000; + private boolean useNBIOCache = false; + private boolean nbioCacheForceUpdate = false; + private boolean nbioCacheVerify = true; + private String nbioCacheDir; + private String nbioCacheMaxRetries; public boolean wantsLoggedMetrics() { return this.wantsConsoleMetrics; @@ -271,7 +282,7 @@ public class NBCLIOptions { } public boolean wantsToCatResource() { - return this.wantsToCatResource!=null && !this.wantsToCatResource.isEmpty(); + return this.wantsToCatResource != null && !this.wantsToCatResource.isEmpty(); } public enum Mode { @@ -596,6 +607,10 @@ public class NBCLIOptions { arglist.removeFirst(); this.reportCsvTo = arglist.removeFirst(); break; + case NBCLIOptions.SUMMARY: + arglist.removeFirst(); + this.reportSummaryTo = "stdout:0"; + break; case NBCLIOptions.REPORT_SUMMARY_TO: arglist.removeFirst(); this.reportSummaryTo = this.readWordOrThrow(arglist, "report summary file"); @@ -644,7 +659,27 @@ public class NBCLIOptions { case HEARTBEAT_MILLIS: arglist.removeFirst(); this.heartbeatIntervalMs = - Long.parseLong(this.readWordOrThrow(arglist, "heartbeat interval in ms")); + Long.parseLong(this.readWordOrThrow(arglist, "heartbeat interval in ms")); + break; + case USE_NBIO_CACHE: + arglist.removeFirst(); + this.useNBIOCache = true; + break; + case NBIO_CACHE_FORCE_UPDATE: + arglist.removeFirst(); + this.nbioCacheForceUpdate = true; + break; + case NBIO_CACHE_NO_VERIFY: + arglist.removeFirst(); + this.nbioCacheVerify = false; + break; + case NBCLIOptions.NBIO_CACHE_DIR: + arglist.removeFirst(); + this.nbioCacheDir = this.readWordOrThrow(arglist, "a NBIO cache directory"); + break; + case NBIO_CACHE_MAX_RETRIES: + arglist.removeFirst(); + this.nbioCacheMaxRetries = this.readWordOrThrow(arglist, "the maximum number of attempts to fetch a resource from the cache"); break; default: nonincludes.addLast(arglist.removeFirst()); @@ -673,8 +708,7 @@ public class NBCLIOptions { """ .replaceAll("ARG", cmdParam) .replaceAll("PROG", "nb5") - .replaceAll("INCLUDES", String.join(",", wantsIncludes())) - ; + .replaceAll("INCLUDES", String.join(",", wantsIncludes())); final String debugMessage = """ @@ -683,7 +717,7 @@ public class NBCLIOptions { COMMANDSTREAM """ .replaceAll("COMMANDSTREAM", - String.join(" ",arglist)); + String.join(" ", arglist)); if (consoleLevel.isGreaterOrEqualTo(NBLogLevel.INFO)) { System.out.println(debugMessage); } @@ -808,6 +842,21 @@ public class NBCLIOptions { public NBLogLevel getConsoleLogLevel() { return this.consoleLevel; } + public boolean wantsToUseNBIOCache() { + return this.useNBIOCache; + } + public boolean wantsNbioCacheForceUpdate() { + return nbioCacheForceUpdate; + } + public boolean wantsNbioCacheVerify() { + return nbioCacheVerify; + } + public String getNbioCacheDir() { + return nbioCacheDir; + } + public String getNbioCacheMaxRetries() { + return nbioCacheMaxRetries; + } private String readWordOrThrow(final LinkedList arglist, final String required) { if (null == arglist.peekFirst()) diff --git a/engine-cli/src/main/resources/commandline.md b/engine-cli/src/main/resources/commandline.md index 943cc9f0d..e4d8ff442 100644 --- a/engine-cli/src/main/resources/commandline.md +++ b/engine-cli/src/main/resources/commandline.md @@ -226,13 +226,21 @@ The classic metrics logging format is used to report results into the logfile fo This format is not generally human-friendly, so a better summary report is provided by default to the console and/or a specified summary file by default. +By default, summaries are always reported to a summary file in the logs directory. +It is highly recommended that you use this form in general. Users are often more interested +in seeing play-by-play high-level details on console, and more human-readable forms of metrics +summaries are easily created with other options. + Examples: + # report to auto-named summary file for every session + --report-summary-to _LOGS_/_SESSION_.summary + # report to console if session ran more than 60 seconds --report-summary-to stdout:60 - # report to auto-named summary file for every session - --report-summary-to _LOGS_/_SESSION_.summary + # simply enable reporting summary to console only, same as above + --summary # do both (the default) --report-summary-to stdout:60,_LOGS_/_SESSION_.summary diff --git a/engine-cli/src/test/java/io/nosqlbench/engine/cli/NBCLIScenarioPreprocessorTest.java b/engine-cli/src/test/java/io/nosqlbench/engine/cli/NBCLIScenarioPreprocessorTest.java index 2715dfa23..ed1269943 100644 --- a/engine-cli/src/test/java/io/nosqlbench/engine/cli/NBCLIScenarioPreprocessorTest.java +++ b/engine-cli/src/test/java/io/nosqlbench/engine/cli/NBCLIScenarioPreprocessorTest.java @@ -194,4 +194,11 @@ public class NBCLIScenarioPreprocessorTest { assertThat(cmds1.get(0).getArgValueOrNull("cycles_test")).isNull(); } + + @Test + public void testThatDuplicateParamInScenarioDefThrowsError() { + assertThatExceptionOfType(BasicError.class) + .isThrownBy(() -> new NBCLIOptions(new String[]{"scenario_test", "duplicate_param"}, NBCLIOptions.Mode.ParseAllOptions)) + .withMessageContaining("Duplicate occurrence of parameter \"threads\""); + } } diff --git a/engine-cli/src/test/resources/activities/scenario_test.yaml b/engine-cli/src/test/resources/activities/scenario_test.yaml index b6ebb34f5..2741b4fa7 100644 --- a/engine-cli/src/test/resources/activities/scenario_test.yaml +++ b/engine-cli/src/test/resources/activities/scenario_test.yaml @@ -11,6 +11,10 @@ scenarios: template_test: with_template: run driver=stdout cycles=TEMPLATE(cycles-test,10) + duplicate_param: + schema: run driver=stdout workload==scenario_test threads=auto tags=block:"schema.*" threads=1 doundef==undef + + blocks: schema: ops: diff --git a/engine-core/pom.xml b/engine-core/pom.xml index 510d22325..e1bf8a73c 100644 --- a/engine-core/pom.xml +++ b/engine-core/pom.xml @@ -55,13 +55,17 @@ - org.graalvm.js - js-scriptengine + org.graalvm.polyglot + polyglot - org.graalvm.tools - profiler - runtime + org.graalvm.polyglot + js + pom + + + org.graalvm.js + js-scriptengine @@ -95,12 +99,6 @@ ${revision} - - org.graalvm.js - js - runtime - - org.openjdk.jmh jmh-core diff --git a/engine-core/src/main/java/io/nosqlbench/engine/api/activityapi/planning/OpSource.java b/engine-core/src/main/java/io/nosqlbench/engine/api/activityapi/planning/OpSource.java index 625e7e9a8..138f16780 100644 --- a/engine-core/src/main/java/io/nosqlbench/engine/api/activityapi/planning/OpSource.java +++ b/engine-core/src/main/java/io/nosqlbench/engine/api/activityapi/planning/OpSource.java @@ -30,7 +30,7 @@ import java.util.function.LongFunction; public interface OpSource extends LongFunction { static OpSource of(OpSequence> seq) { - return (long l) -> seq.apply(l).apply(l); + return (long l) -> seq.apply(l).getOp(l); } /** diff --git a/engine-core/src/main/java/io/nosqlbench/engine/api/activityimpl/SimpleActivity.java b/engine-core/src/main/java/io/nosqlbench/engine/api/activityimpl/SimpleActivity.java index 5e060179e..7d153b65a 100644 --- a/engine-core/src/main/java/io/nosqlbench/engine/api/activityimpl/SimpleActivity.java +++ b/engine-core/src/main/java/io/nosqlbench/engine/api/activityimpl/SimpleActivity.java @@ -16,7 +16,7 @@ package io.nosqlbench.engine.api.activityimpl; -import io.nosqlbench.adapters.api.activityimpl.uniform.EmitterOpDispenserWrapper; +import io.nosqlbench.adapters.api.activityimpl.uniform.opwrappers.EmitterOpDispenserWrapper; import io.nosqlbench.adapters.api.activityimpl.uniform.flowtypes.CycleOp; import io.nosqlbench.engine.core.lifecycle.scenario.container.InvokableResult; import io.nosqlbench.nb.api.components.core.NBComponent; @@ -50,7 +50,7 @@ import io.nosqlbench.adapters.api.activityconfig.yaml.OpsDocList; import io.nosqlbench.engine.api.activityapi.simrate.StrideRateSpec; import io.nosqlbench.engine.api.activityimpl.motor.RunStateTally; import io.nosqlbench.adapters.api.activityimpl.uniform.DriverAdapter; -import io.nosqlbench.adapters.api.activityimpl.uniform.DryRunOpDispenserWrapper; +import io.nosqlbench.adapters.api.activityimpl.uniform.opwrappers.DryRunOpDispenserWrapper; import io.nosqlbench.adapters.api.activityimpl.uniform.decorators.SyntheticOpTemplateProvider; import io.nosqlbench.adapters.api.activityimpl.uniform.flowtypes.Op; import io.nosqlbench.adapters.api.templating.ParsedOp; @@ -420,7 +420,7 @@ public class SimpleActivity extends NBStatusComponent implements Activity, Invok protected OpSequence> createOpSourceFromParsedOps( // Map> adapterCache, // Map> mapperCache, - List> adapters, + List> adapters, List pops ) { try { @@ -470,7 +470,7 @@ public class SimpleActivity extends NBStatusComponent implements Activity, Invok // } planner.addOp((OpDispenser) dispenser, ratio); } catch (Exception e) { - throw new OpConfigError("Error while mapping op from template named '" + pop.getName() + "': " + e.getMessage(),e); + throw new OpConfigError("Error while mapping op from template named '" + pop.getName() + "': " + e.getMessage(), e); } } if (0 < dryrunCount) { @@ -569,7 +569,8 @@ public class SimpleActivity extends NBStatusComponent implements Activity, Invok * @param opinit * A function to map an OpTemplate to the executable operation form required by * the native driver for this activity. - * @param defaultAdapter The adapter which will be used for any op templates with no explicit adapter + * @param defaultAdapter + * The adapter which will be used for any op templates with no explicit adapter * @return The sequence of operations as determined by filtering and ratios */ @Deprecated(forRemoval = true) diff --git a/engine-core/src/main/java/io/nosqlbench/engine/api/activityimpl/motor/CoreMotor.java b/engine-core/src/main/java/io/nosqlbench/engine/api/activityimpl/motor/CoreMotor.java index ddaf76329..5b150be1b 100644 --- a/engine-core/src/main/java/io/nosqlbench/engine/api/activityimpl/motor/CoreMotor.java +++ b/engine-core/src/main/java/io/nosqlbench/engine/api/activityimpl/motor/CoreMotor.java @@ -211,116 +211,6 @@ public class CoreMotor implements ActivityDefObserver, Motor, Stoppable { long strideDelay = 0L; long cycleDelay = 0L; - // Reviewer Note: This separate of code paths was used to avoid impacting the - // previously logic for the SyncAction type. It may be consolidated later once - // the async action is proven durable -// if (action instanceof AsyncAction) { -// -// @SuppressWarnings("unchecked") -// AsyncAction async = (AsyncAction) action; -// -// opTracker = new OpTrackerImpl<>(activity, slotId); -// opTracker.setCycleOpFunction(async.getOpInitFunction()); -// -// StrideOutputConsumer strideconsumer = null; -// if (action instanceof StrideOutputConsumer) { -// strideconsumer = (StrideOutputConsumer) async; -// } -// -// motorState.enterState(Running); -// while (motorState.get() == Running) { -// -// CycleSegment cycleSegment = null; -// -// try (Timer.Context inputTime = inputTimer.time()) { -// cycleSegment = input.getInputSegment(stride); -// } -// -// if (cycleSegment == null) { -// logger.trace(() -> "input exhausted (input " + input + ") via null segment, stopping motor thread " + slotId); -// motorState.enterState(Finished); -// continue; -// } -// -// if (strideRateLimiter != null) { -// // block for strides rate limiter -// strideDelay = strideRateLimiter.block(); -// } -// -// StrideTracker strideTracker = new StrideTracker<>( -// strideServiceTimer, -// stridesResponseTimer, -// strideDelay, -// cycleSegment.peekNextCycle(), -// stride, -// output, -// strideconsumer); -// strideTracker.start(); -// -// long strideStart = System.nanoTime(); -// -// while (!cycleSegment.isExhausted() && motorState.get() == Running) { -// long cyclenum = cycleSegment.nextCycle(); -// if (cyclenum < 0) { -// if (cycleSegment.isExhausted()) { -// logger.trace(() -> "input exhausted (input " + input + ") via negative read, stopping motor thread " + slotId); -// motorState.enterState(Finished); -// continue; -// } -// } -// -// if (motorState.get() != Running) { -// logger.trace(()->"motor stopped in cycle " + cyclenum + ", stopping motor thread " + slotId); -// continue; -// } -// -// if (cycleRateLimiter != null) { -// // Block for cycle rate limiter -// cycleDelay = cycleRateLimiter.block(); -// } -// -// try { -// TrackedOp op = opTracker.newOp(cyclenum, strideTracker); -// op.setWaitTime(cycleDelay); -// -// synchronized (opTracker) { -// while (opTracker.isFull()) { -// try { -// logger.trace(() -> "Blocking for enqueue with (" + opTracker.getPendingOps() + "/" + opTracker.getMaxPendingOps() + ") queued ops"); -// optrackerBlockCounter.inc(); -// opTracker.wait(10000); -// } catch (InterruptedException ignored) { -// } -// } -// } -// -// async.enqueue(op); -// -// } catch (Exception t) { -// logger.error(()->"Error while processing async cycle " + cyclenum + ", error:" + t); -// throw t; -// } -// } -// -// -// } -// -// if (motorState.get() == Finished) { -// boolean finished = opTracker.awaitCompletion(60000); -// if (finished) { -// logger.debug(() -> "slot " + this.slotId + " completed successfully"); -// } else { -// logger.warn(() -> "slot " + this.slotId + " was stopped before completing successfully"); -// } -// } -// -// if (motorState.get() == Stopping) { -// motorState.enterState(Stopped); -// } -// -// -// } else if (action instanceof SyncAction sync) { - if (action instanceof SyncAction sync) { cycleServiceTimer = activity.getInstrumentation().getOrCreateCyclesServiceTimer(); strideServiceTimer = activity.getInstrumentation().getOrCreateStridesServiceTimer(); @@ -406,7 +296,7 @@ public class CoreMotor implements ActivityDefObserver, Motor, Stoppable { } } else { - throw new RuntimeException("Valid Action implementations must implement either the SyncAction or the AsyncAction sub-interface"); + throw new RuntimeException("Valid Action implementations must implement SyncAction"); } if (motorState.get() == Stopping) { @@ -427,7 +317,7 @@ public class CoreMotor implements ActivityDefObserver, Motor, Stoppable { @Override public String toString() { - return "slot:" + this.slotId + "; state:" + motorState.get(); + return this.activity.getAlias() + ": slot:" + this.slotId + "; state:" + motorState.get(); } @Override diff --git a/engine-core/src/main/java/io/nosqlbench/engine/api/activityimpl/uniform/actions/StandardAction.java b/engine-core/src/main/java/io/nosqlbench/engine/api/activityimpl/uniform/actions/StandardAction.java index 6c9eb604c..05d4bd507 100644 --- a/engine-core/src/main/java/io/nosqlbench/engine/api/activityimpl/uniform/actions/StandardAction.java +++ b/engine-core/src/main/java/io/nosqlbench/engine/api/activityimpl/uniform/actions/StandardAction.java @@ -78,7 +78,7 @@ public class StandardAction, R extends Op> impl try (Timer.Context ct = bindTimer.time()) { dispenser = opsequence.apply(cycle); - op = dispenser.apply(cycle); + op = dispenser.getOp(cycle); } catch (Exception e) { throw new RuntimeException("while binding request in cycle " + cycle + " for op template named '" + (dispenser!=null?dispenser.getOpName():"NULL")+ "': " + e.getMessage(), e); diff --git a/engine-core/src/main/java/io/nosqlbench/engine/core/lifecycle/activity/ActivityExecutor.java b/engine-core/src/main/java/io/nosqlbench/engine/core/lifecycle/activity/ActivityExecutor.java index 7ad32521b..dd7725617 100644 --- a/engine-core/src/main/java/io/nosqlbench/engine/core/lifecycle/activity/ActivityExecutor.java +++ b/engine-core/src/main/java/io/nosqlbench/engine/core/lifecycle/activity/ActivityExecutor.java @@ -512,7 +512,9 @@ public class ActivityExecutor implements NBLabeledElement, ParameterMap.Listener RunState maxState = state.getMaxState(); activity.setRunState(maxState); if (maxState == RunState.Errored) { - throw new RuntimeException("Error while waiting for activity completion" + (this.exception!=null ? this.exception.toString() : "")); + throw new RuntimeException("Error while waiting for activity completion with states [" + tally.toString() + "], error=" + (this.exception!=null ? + this.exception.toString() : "[no error on activity executor]")); + } } diff --git a/engine-core/src/main/java/io/nosqlbench/engine/core/lifecycle/session/NBSession.java b/engine-core/src/main/java/io/nosqlbench/engine/core/lifecycle/session/NBSession.java index ccc77c892..f702d0ef3 100644 --- a/engine-core/src/main/java/io/nosqlbench/engine/core/lifecycle/session/NBSession.java +++ b/engine-core/src/main/java/io/nosqlbench/engine/core/lifecycle/session/NBSession.java @@ -16,22 +16,23 @@ package io.nosqlbench.engine.core.lifecycle.session; +import io.nosqlbench.engine.cmdstream.Cmd; +import io.nosqlbench.engine.core.lifecycle.ExecutionResult; +import io.nosqlbench.engine.core.lifecycle.scenario.container.NBBufferedContainer; import io.nosqlbench.engine.core.lifecycle.scenario.container.NBCommandParams; +import io.nosqlbench.engine.core.lifecycle.scenario.container.NBContainer; +import io.nosqlbench.engine.core.lifecycle.scenario.execution.NBCommandResult; import io.nosqlbench.engine.core.lifecycle.scenario.execution.NBInvokableCommand; +import io.nosqlbench.nb.api.components.decorators.NBTokenWords; import io.nosqlbench.nb.api.components.status.NBHeartbeatComponent; import io.nosqlbench.nb.api.engine.activityimpl.ActivityDef; import io.nosqlbench.nb.api.engine.metrics.instruments.MetricCategory; import io.nosqlbench.nb.api.labels.NBLabeledElement; -import io.nosqlbench.nb.api.components.decorators.NBTokenWords; -import io.nosqlbench.engine.cmdstream.Cmd; -import io.nosqlbench.engine.core.lifecycle.ExecutionResult; -import io.nosqlbench.engine.core.lifecycle.scenario.container.NBBufferedContainer; -import io.nosqlbench.engine.core.lifecycle.scenario.container.NBContainer; -import io.nosqlbench.engine.core.lifecycle.scenario.execution.NBCommandResult; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; -import java.util.*; +import java.util.List; +import java.util.Map; import java.util.concurrent.ConcurrentHashMap; import java.util.function.Function; diff --git a/engine-core/src/main/java/io/nosqlbench/engine/core/logging/NBLoggerConfig.java b/engine-core/src/main/java/io/nosqlbench/engine/core/logging/NBLoggerConfig.java index dc9c2d73f..3579f5f8a 100644 --- a/engine-core/src/main/java/io/nosqlbench/engine/core/logging/NBLoggerConfig.java +++ b/engine-core/src/main/java/io/nosqlbench/engine/core/logging/NBLoggerConfig.java @@ -69,7 +69,8 @@ public class NBLoggerConfig extends ConfigurationFactory { */ public static Map BUILTIN_OVERRIDES = Map.of( // ERROR StatusConsoleListener Unable to locate appender "SESSION_APPENDER" for logger config "oshi.util" - "oshi.util", Level.INFO + "oshi.util", Level.INFO, + "io.jhdf", Level.INFO ); /** diff --git a/engine-core/src/test/java/io/nosqlbench/engine/core/lifecycle/session/CmdParserTest.java b/engine-core/src/test/java/io/nosqlbench/engine/core/lifecycle/session/CmdParserTest.java index 086d0dfe4..59f846fef 100644 --- a/engine-core/src/test/java/io/nosqlbench/engine/core/lifecycle/session/CmdParserTest.java +++ b/engine-core/src/test/java/io/nosqlbench/engine/core/lifecycle/session/CmdParserTest.java @@ -26,6 +26,7 @@ import java.util.LinkedList; import java.util.List; import static org.assertj.core.api.Assertions.assertThat; +import static org.assertj.core.api.Assertions.assertThatExceptionOfType; import static org.junit.jupiter.api.Assertions.*; // test for dots and underscores in names @@ -98,4 +99,11 @@ class CmdParserTest { assertThat(cmds.getFirst().getArgValue("param1")).isEqualTo("value1"); } + @Test + public void testThatDuplicateParameterThrowsBasicError() { + assertThatExceptionOfType(BasicError.class) + .isThrownBy(() -> CmdParser.parseArgvCommands(new LinkedList<>(List.of("run", "threads=auto", "threads=1")))) + .withMessageContaining("Duplicate occurrence of option: threads"); + } + } diff --git a/mvn-defaults/pom.xml b/mvn-defaults/pom.xml index be49213be..8c13ca342 100644 --- a/mvn-defaults/pom.xml +++ b/mvn-defaults/pom.xml @@ -80,7 +80,7 @@ org.apache.groovy groovy - 4.0.18 + 4.0.20 org.snakeyaml @@ -280,7 +280,7 @@ com.github.oshi oshi-core-java11 - 6.4.12 + 6.5.0 com.google.code.gson @@ -290,7 +290,7 @@ com.amazonaws aws-java-sdk-s3 - 1.12.658 + 1.12.681 com.elega9t @@ -358,33 +358,22 @@ 2.4.0-b180830.0438 + - org.graalvm.sdk - graal-sdk - 23.0.3 + org.graalvm.polyglot + polyglot + 23.1.0 - org.graalvm.js + org.graalvm.polyglot js - 23.0.3 - runtime + 23.1.2 + pom org.graalvm.js js-scriptengine - 23.0.3 - - - org.graalvm.tools - profiler - 23.0.3 - runtime - - - org.graalvm.tools - chromeinspector - 23.0.3 - runtime + 23.1.2 @@ -397,30 +386,30 @@ org.apache.logging.log4j log4j-api - 2.23.0 + 2.23.1 org.apache.logging.log4j log4j-core - 2.23.0 + 2.23.1 org.apache.logging.log4j log4j-slf4j-impl - 2.23.0 + 2.23.1 org.apache.logging.log4j log4j-slf4j2-impl - 2.23.0 + 2.23.1 org.apache.logging.log4j log4j-jcl - 2.23.0 + 2.23.1 @@ -685,6 +674,7 @@ **/generated/** **/jmh_generated/** + **/generated-sources/** true Copyright (c) 2022 nosqlbench @@ -694,6 +684,7 @@ org.apache.maven.plugins maven-enforcer-plugin + 3.2.1 enforce-java @@ -705,6 +696,7 @@ [21,) + @@ -846,7 +838,7 @@ org.apache.maven.plugins maven-gpg-plugin - 1.6 + 3.0.1 sign-artifacts @@ -904,20 +896,6 @@ - - - - - - - - - - - - - - diff --git a/nb-api/pom.xml b/nb-api/pom.xml index 61e27578a..126b0bcd0 100644 --- a/nb-api/pom.xml +++ b/nb-api/pom.xml @@ -142,8 +142,13 @@ - org.graalvm.sdk - graal-sdk + org.graalvm.polyglot + polyglot + + + org.graalvm.polyglot + js + pom diff --git a/nb-api/src/main/java/io/nosqlbench/nb/api/components/core/NBBaseComponent.java b/nb-api/src/main/java/io/nosqlbench/nb/api/components/core/NBBaseComponent.java index c8e978394..f45dc4e48 100644 --- a/nb-api/src/main/java/io/nosqlbench/nb/api/components/core/NBBaseComponent.java +++ b/nb-api/src/main/java/io/nosqlbench/nb/api/components/core/NBBaseComponent.java @@ -21,12 +21,16 @@ import io.nosqlbench.nb.api.components.events.ComponentOutOfScope; import io.nosqlbench.nb.api.components.events.DownEvent; import io.nosqlbench.nb.api.components.events.NBEvent; import io.nosqlbench.nb.api.components.events.UpEvent; +import io.nosqlbench.nb.api.engine.metrics.MetricsCloseable; import io.nosqlbench.nb.api.engine.metrics.instruments.NBMetric; import io.nosqlbench.nb.api.labels.NBLabels; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; -import java.util.*; +import java.util.ArrayList; +import java.util.List; +import java.util.Map; +import java.util.Optional; import java.util.concurrent.ConcurrentHashMap; public class NBBaseComponent extends NBBaseComponentMetrics implements NBComponent, NBTokenWords, NBComponentTimeline { @@ -40,6 +44,7 @@ public class NBBaseComponent extends NBBaseComponentMetrics implements NBCompone protected Exception error; protected long started_ns, teardown_ns, closed_ns, errored_ns, started_epoch_ms; protected NBInvokableState state = NBInvokableState.STARTING; + private static final List metricsCloseables = new ArrayList<>(); public NBBaseComponent(NBComponent parentComponent) { this(parentComponent, NBLabels.forKV()); @@ -133,6 +138,9 @@ public class NBBaseComponent extends NBBaseComponentMetrics implements NBCompone for (NBComponent child : children) { child.close(); } + for (MetricsCloseable metricsCloseable : metricsCloseables) { + metricsCloseable.closeMetrics(); + } } catch (Exception e) { onError(e); } finally { @@ -302,4 +310,9 @@ public class NBBaseComponent extends NBBaseComponentMetrics implements NBCompone public long started_epoch_ms() { return this.started_epoch_ms; } + + public void addMetricsCloseable(MetricsCloseable metric) { + metricsCloseables.add(metric); + } + } diff --git a/nb-api/src/main/java/io/nosqlbench/nb/api/components/core/NBBaseComponentMetrics.java b/nb-api/src/main/java/io/nosqlbench/nb/api/components/core/NBBaseComponentMetrics.java index 5576d2ffc..9e5ceda89 100644 --- a/nb-api/src/main/java/io/nosqlbench/nb/api/components/core/NBBaseComponentMetrics.java +++ b/nb-api/src/main/java/io/nosqlbench/nb/api/components/core/NBBaseComponentMetrics.java @@ -16,18 +16,22 @@ package io.nosqlbench.nb.api.components.core; +import com.codahale.metrics.*; +import com.codahale.metrics.Timer; import io.nosqlbench.nb.api.engine.metrics.instruments.MetricCategory; import io.nosqlbench.nb.api.tagging.TagFilter; import io.nosqlbench.nb.api.engine.metrics.instruments.NBMetric; import java.util.*; import java.util.concurrent.ConcurrentHashMap; +import java.util.concurrent.CopyOnWriteArrayList; import java.util.concurrent.locks.Lock; import java.util.concurrent.locks.ReentrantLock; public class NBBaseComponentMetrics implements NBComponentMetrics { private final Lock lock = new ReentrantLock(false); private final Map metrics = new ConcurrentHashMap<>(); + private final static List listeners = new CopyOnWriteArrayList<>(); @Override public String addComponentMetric(NBMetric metric, MetricCategory category, String requiredDescription) { try { @@ -37,11 +41,54 @@ public class NBBaseComponentMetrics implements NBComponentMetrics { throw new RuntimeException("Can't add the same metric by label set to the same live component:" + openMetricsName); } metrics.put(openMetricsName,metric); + for (MetricRegistryListener listener : listeners) { + notifyListenerOfAddedMetric(listener, metric, openMetricsName); + } return metric.getLabels().linearizeAsMetrics(); } finally { lock.unlock(); } } + + public void addListener(MetricRegistryListener listener) { + listeners.add(listener); + + for (Map.Entry entry : metrics.entrySet()) { + notifyListenerOfAddedMetric(listener, entry.getValue(), entry.getKey()); + } + } + + public void removeListener(MetricRegistryListener listener) { + listeners.remove(listener); + } + + private void notifyListenerOfAddedMetric(MetricRegistryListener listener, NBMetric metric, String name) { + switch (metric) { + case Gauge gauge -> listener.onGaugeAdded(name, gauge); + case Counter counter -> listener.onCounterAdded(name, counter); + case Histogram histogram -> listener.onHistogramAdded(name, histogram); + case Meter meter -> listener.onMeterAdded(name, meter); + case Timer timer -> listener.onTimerAdded(name, timer); + case null, default -> throw new IllegalArgumentException("Unknown metric type: " + metric.getClass()); + } + } + + private void onMetricRemoved(String name, NBMetric metric) { + for (MetricRegistryListener listener : listeners) { + notifyListenerOfRemovedMetric(name, metric, listener); + } + } + + private void notifyListenerOfRemovedMetric(String name, NBMetric metric, MetricRegistryListener listener) { + switch (metric) { + case Gauge gauge -> listener.onGaugeRemoved(name); + case Counter counter -> listener.onCounterRemoved(name); + case Histogram histogram -> listener.onHistogramRemoved(name); + case Meter meter -> listener.onMeterRemoved(name); + case Timer timer -> listener.onTimerRemoved(name); + case null, default -> throw new IllegalArgumentException("Unknown metric type: " + metric.getClass()); + } + } @Override public NBMetric getComponentMetric(String name) { return metrics.get(name); diff --git a/nb-api/src/main/java/io/nosqlbench/nb/api/components/core/NBCreators.java b/nb-api/src/main/java/io/nosqlbench/nb/api/components/core/NBCreators.java index 24adfdf36..f09a29f3f 100644 --- a/nb-api/src/main/java/io/nosqlbench/nb/api/components/core/NBCreators.java +++ b/nb-api/src/main/java/io/nosqlbench/nb/api/components/core/NBCreators.java @@ -18,8 +18,7 @@ package io.nosqlbench.nb.api.components.core; import io.nosqlbench.nb.api.csvoutput.CsvOutputPluginWriter; import com.codahale.metrics.Meter; -import io.nosqlbench.nb.api.engine.metrics.DeltaHdrHistogramReservoir; -import io.nosqlbench.nb.api.engine.metrics.DoubleSummaryGauge; +import io.nosqlbench.nb.api.engine.metrics.*; import io.nosqlbench.nb.api.engine.metrics.instruments.*; import io.nosqlbench.nb.api.engine.metrics.reporters.*; import io.nosqlbench.nb.api.histo.HdrHistoLog; @@ -37,11 +36,14 @@ import com.codahale.metrics.MetricAttribute; import com.codahale.metrics.MetricFilter; import org.apache.logging.log4j.Marker; +import java.io.File; import java.io.PrintStream; import java.util.*; import java.nio.file.Path; +import java.util.concurrent.TimeUnit; import java.util.function.Supplier; +import java.util.regex.Pattern; public class NBCreators { @@ -159,6 +161,34 @@ public class NBCreators { return new NBShutdownHook(component); } + public void histoLogger(String sessionName, String pattern, String filename, long millis) { + if (filename.contains("_SESSION_")) { + filename = filename.replace("_SESSION_", sessionName); + } + Pattern compiledPattern = Pattern.compile(pattern); + File logfile = new File(filename); + + HistoIntervalLogger histoIntervalLogger = + new HistoIntervalLogger(base, sessionName, logfile, compiledPattern, millis); + logger.debug(() -> "Adding " + histoIntervalLogger + " to session " + sessionName); + base.addMetricsCloseable(histoIntervalLogger); + base.addListener(histoIntervalLogger); + } + + public void histoStatsLogger(String sessionName, String pattern, String filename, long millis) { + if (filename.contains("_SESSION_")) { + filename = filename.replace("_SESSION_", sessionName); + } + Pattern compiledPattern = Pattern.compile(pattern); + File logfile = new File(filename); + + HistoStatsLogger histoStatsLogger = + new HistoStatsLogger(base, sessionName, logfile, compiledPattern, millis, TimeUnit.NANOSECONDS); + logger.debug(() -> "Adding " + histoStatsLogger + " to session " + sessionName); + base.addMetricsCloseable(histoStatsLogger); + base.addListener(histoStatsLogger); + } + public static class Log4jReporterBuilder { private final NBComponent component; private Logger logger = LogManager.getLogger(Log4JMetricsReporter.class); diff --git a/nb-api/src/main/java/io/nosqlbench/nb/api/config/standard/ConfigModel.java b/nb-api/src/main/java/io/nosqlbench/nb/api/config/standard/ConfigModel.java index 7da373dce..27fbef95e 100644 --- a/nb-api/src/main/java/io/nosqlbench/nb/api/config/standard/ConfigModel.java +++ b/nb-api/src/main/java/io/nosqlbench/nb/api/config/standard/ConfigModel.java @@ -236,7 +236,7 @@ public class ConfigModel implements NBConfigModel { private ConfigModel expand(ConfigModel configModel, Map config) { List> expanders = configModel.params.stream() - .filter(p -> p.getExpander()!=null).toList(); + .filter(p -> p.getExpander() != null).toList(); for (Param expandingParameter : expanders) { for (String name : expandingParameter.getNames()) { @@ -294,16 +294,23 @@ public class ConfigModel implements NBConfigModel { // For each provided configuration element ... for (String configkey : config.keySet()) { Param element = this.paramsByName.get(configkey); + String warning = "Unknown config parameter '" + configkey + "' in config model while configuring " + getOf().getSimpleName() + + ", possible parameter names are " + this.paramsByName.keySet() + "."; if (element == null) { - StringBuilder paramhelp = new StringBuilder( - "Unknown config parameter '" + configkey + "' in config model while configuring " + getOf().getSimpleName() - + ", possible parameter names are " + this.paramsByName.keySet() + "." - ); + String warnonly = System.getenv("NB_CONFIG_WARNINGS_ONLY"); + if (warnonly != null) { + System.out.println("WARNING: " + warning); + } else { + StringBuilder paramhelp = new StringBuilder( + "Unknown config parameter '" + configkey + "' in config model while configuring " + getOf().getSimpleName() + + ", possible parameter names are " + this.paramsByName.keySet() + "." + ); - ConfigSuggestions.getForParam(this, configkey) - .ifPresent(suggestion -> paramhelp.append(" ").append(suggestion)); + ConfigSuggestions.getForParam(this, configkey) + .ifPresent(suggestion -> paramhelp.append(" ").append(suggestion)); - throw new BasicError(paramhelp.toString()); + throw new BasicError(paramhelp.toString()); + } } Object value = config.get(configkey); } diff --git a/nb-api/src/main/java/io/nosqlbench/nb/api/nbio/NBIO.java b/nb-api/src/main/java/io/nosqlbench/nb/api/nbio/NBIO.java index 04c320c82..87badc54a 100644 --- a/nb-api/src/main/java/io/nosqlbench/nb/api/nbio/NBIO.java +++ b/nb-api/src/main/java/io/nosqlbench/nb/api/nbio/NBIO.java @@ -43,6 +43,8 @@ public class NBIO implements NBPathsAPI.Facets { private static String[] globalIncludes = new String[0]; + private static boolean useNBIOCache; + public synchronized static void addGlobalIncludes(String[] globalIncludes) { NBIO.globalIncludes = globalIncludes; } @@ -158,12 +160,25 @@ public class NBIO implements NBPathsAPI.Facets { return this; } + /** + * {@inheritDoc} + */ + @Override + public NBPathsAPI.GetPrefixes cachedContent() { + this.resolver = URIResolvers.inNBIOCache(); + return this; + } + /** * {@inheritDoc} */ @Override public NBPathsAPI.GetPrefixes allContent() { - this.resolver = URIResolvers.inFS().inCP().inURLs(); + if (useNBIOCache) { + this.resolver = URIResolvers.inFS().inCP().inNBIOCache(); + } else { + this.resolver = URIResolvers.inFS().inCP().inURLs(); + } return this; } @@ -343,6 +358,14 @@ public class NBIO implements NBPathsAPI.Facets { return new NBIO().remoteContent(); } + /** + * Return content from the NBIO cache. If the content is not in the cache look for it in the given + * URL and put it in the cache. + * + * @return this builder + */ + public static NBPathsAPI.GetPrefixes cached() { return new NBIO().cachedContent(); } + /** * {@inheritDoc} @@ -628,4 +651,13 @@ public class NBIO implements NBPathsAPI.Facets { ", extensionSets=" + extensionSets + '}'; } + + public boolean useNBIOCache() { + return useNBIOCache; + } + + public static void setUseNBIOCache(boolean wantsToUseNBIOCache) { + useNBIOCache = wantsToUseNBIOCache; + } + } diff --git a/nb-api/src/main/java/io/nosqlbench/nb/api/nbio/NBIOResolverConditions.java b/nb-api/src/main/java/io/nosqlbench/nb/api/nbio/NBIOResolverConditions.java new file mode 100644 index 000000000..7f3e338bc --- /dev/null +++ b/nb-api/src/main/java/io/nosqlbench/nb/api/nbio/NBIOResolverConditions.java @@ -0,0 +1,25 @@ +/* + * Copyright (c) 2024 nosqlbench + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +package io.nosqlbench.nb.api.nbio; + +public enum NBIOResolverConditions { + UPDATE_AND_VERIFY, + UPDATE_NO_VERIFY, + LOCAL_VERIFY, + LOCAL_NO_VERIFY +} diff --git a/nb-api/src/main/java/io/nosqlbench/nb/api/nbio/NBPathsAPI.java b/nb-api/src/main/java/io/nosqlbench/nb/api/nbio/NBPathsAPI.java index d3bb76431..75e65af02 100644 --- a/nb-api/src/main/java/io/nosqlbench/nb/api/nbio/NBPathsAPI.java +++ b/nb-api/src/main/java/io/nosqlbench/nb/api/nbio/NBPathsAPI.java @@ -67,6 +67,14 @@ public interface NBPathsAPI { */ GetPrefixes fileContent(); + /** + * Return content from the NBIO cache. If the content is not in the cache look for it in the given + * URL and put it in the cache. + * + * @return this builder + */ + GetPrefixes cachedContent(); + /** * Return content from everywhere, from remote URls, or from the file system and then the internal * bundled content if not found in the file system first. diff --git a/nb-api/src/main/java/io/nosqlbench/nb/api/nbio/ResolverForClasspath.java b/nb-api/src/main/java/io/nosqlbench/nb/api/nbio/ResolverForClasspath.java index b2d659e5a..003aaa436 100644 --- a/nb-api/src/main/java/io/nosqlbench/nb/api/nbio/ResolverForClasspath.java +++ b/nb-api/src/main/java/io/nosqlbench/nb/api/nbio/ResolverForClasspath.java @@ -101,9 +101,11 @@ public class ResolverForClasspath implements ContentResolver { public List resolveDirectory(URI uri) { List path = resolvePaths(uri); List dirs = new ArrayList<>(); - for (Path dirpath : path) { - if (Files.isDirectory(dirpath)) { - dirs.add(dirpath); + if (path != null) { + for (Path dirpath : path) { + if (Files.isDirectory(dirpath)) { + dirs.add(dirpath); + } } } return dirs; diff --git a/nb-api/src/main/java/io/nosqlbench/nb/api/nbio/ResolverForNBIOCache.java b/nb-api/src/main/java/io/nosqlbench/nb/api/nbio/ResolverForNBIOCache.java new file mode 100644 index 000000000..52355e9d3 --- /dev/null +++ b/nb-api/src/main/java/io/nosqlbench/nb/api/nbio/ResolverForNBIOCache.java @@ -0,0 +1,329 @@ +/* + * Copyright (c) 2024 nosqlbench + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +package io.nosqlbench.nb.api.nbio; + +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; + +import java.io.*; +import java.net.HttpURLConnection; +import java.net.URI; +import java.net.URL; +import java.nio.channels.Channels; +import java.nio.channels.ReadableByteChannel; +import java.nio.file.Files; +import java.nio.file.Path; +import java.security.MessageDigest; +import java.security.NoSuchAlgorithmException; +import java.util.ArrayList; +import java.util.List; + +public class ResolverForNBIOCache implements ContentResolver { + public static final ResolverForNBIOCache INSTANCE = new ResolverForNBIOCache(); + private final static Logger logger = LogManager.getLogger(ResolverForNBIOCache.class); + private static String cacheDir = System.getProperty("user.home") + "/.nosqlbench/nbio-cache/"; + private static boolean forceUpdate = false; + private static boolean verifyChecksum = true; + private static int maxRetries = 3; + + @Override + public List> resolve(URI uri) { + List> contents = new ArrayList<>(); + Path path = resolvePath(uri); + + if (path != null) { + contents.add(new PathContent(path)); + } + return contents; + } + + /** + * This method is used to resolve the path of a given URI. + * It first checks if the URI has a scheme (http or https) and if it does, it tries to resolve the path from the cache. + * If the file is not in the cache, it tries to download it from the remote URL. + * If the URI does not have a scheme, it returns null. + * + * @param uri the URI to resolve the path for + * @return the resolved Path object, or null if the URI does not have a scheme or the path could not be resolved + */ + private Path resolvePath(URI uri) { + if (uri.getScheme() != null && !uri.getScheme().isEmpty() && + (uri.getScheme().equalsIgnoreCase("http") || + uri.getScheme().equalsIgnoreCase("https"))) { + Path cachedFilePath = Path.of(cacheDir + uri.getPath()); + if (Files.isReadable(cachedFilePath)) { + return pathFromLocalCache(cachedFilePath, uri); + } + else { + return pathFromRemoteUrl(uri); + } + } + return null; + } + + private boolean downloadFile(URI uri, Path cachedFilePath, URLContent checksum) { + int retries = 0; + boolean success = false; + while (retries < maxRetries) { + try { + if (this.remoteFileExists(uri)) { + logger.info(() -> "Downloading remote file " + uri + " to cache at " + cachedFilePath); + ReadableByteChannel channel = Channels.newChannel(uri.toURL().openStream()); + FileOutputStream outputStream = new FileOutputStream(cachedFilePath.toFile()); + outputStream.getChannel().transferFrom(channel, 0, Long.MAX_VALUE); + outputStream.close(); + channel.close(); + logger.info(() -> "Downloaded remote file to cache at " + cachedFilePath); + if(checksum == null || verifyChecksum(cachedFilePath, checksum)) { + success = true; + break; + } + } else { + logger.error(() -> "Error downloading remote file to cache at " + cachedFilePath + ", retrying..."); + retries++; + } + } catch (IOException e) { + logger.error(() -> "Error downloading remote file to cache at " + cachedFilePath + ", retrying..."); + retries++; + } + } + return success; + } + + private boolean verifyChecksum(Path cachedFilePath, URLContent checksum) { + try { + String localChecksumStr = generateSHA256Checksum(cachedFilePath.toString()); + Path checksumPath = checksumPath(cachedFilePath); + Files.writeString(checksumPath, localChecksumStr); + logger.debug(() -> "Generated local checksum and saved to cache at " + checksumPath); + String remoteChecksum = stripControlCharacters(new String(checksum.getInputStream().readAllBytes())); + if (localChecksumStr.equals(remoteChecksum)) { + return true; + } else { + logger.warn(() -> "checksums do not match for " + checksumPath + " and " + checksum); + return false; + } + } catch (Exception e) { + throw new RuntimeException(e); + } + } + + private static String stripControlCharacters(String input) { + return input.replaceAll("[\\p{Cntrl}]+$", ""); + } + + /** + * This method is used to download a file from a remote URL and store it in a local cache. + * It first creates the cache directory if it doesn't exist. + * Then it tries to download the file and if successful, it generates a SHA256 checksum for the downloaded file. + * It then compares the generated checksum with the remote checksum. + * If the checksums match, it returns the path to the cached file. + * If the checksums don't match or if there was an error during the download, it cleans up the cache and throws a RuntimeException. + * + * @param uri the URI of the remote file to download + * @return the Path to the downloaded file in the local cache + * @throws RuntimeException if there was an error during the download or if the checksums don't match + */ + private Path pathFromRemoteUrl(URI uri) { + Path cachedFilePath = Path.of(cacheDir + uri.getPath()); + createCacheDir(cachedFilePath); + if (!verifyChecksum) { + return execute(NBIOResolverConditions.UPDATE_NO_VERIFY, cachedFilePath, uri); + } + else { + return execute(NBIOResolverConditions.UPDATE_AND_VERIFY, cachedFilePath, uri); + } + } + + private void createCacheDir(Path cachedFilePath) { + Path dir = cachedFilePath.getParent(); + if (!Files.exists(dir)) { + try { + Files.createDirectories(dir); + } catch (IOException e) { + throw new RuntimeException(e); + } + } + } + + private void cleanupCache(Path cachedFilePath) { + if (!cachedFilePath.toFile().delete()) + logger.warn(() -> "Could not delete cached file " + cachedFilePath); + Path checksumPath = checksumPath(cachedFilePath); + if (!checksumPath.toFile().delete()) + logger.warn(() -> "Could not delete cached checksum " + checksumPath); + } + + private Path execute(NBIOResolverConditions condition, Path cachedFilePath, URI uri) { + String remoteChecksumFileStr = uri.getPath() + ".sha256"; + URLContent checksum = resolveURI(URI.create(uri.toString().replace(uri.getPath(), remoteChecksumFileStr))); + switch(condition) { + case UPDATE_AND_VERIFY: + if (checksum == null) { + logger.warn(() -> "Remote checksum file " + remoteChecksumFileStr + " does not exist. Proceeding without verification"); + } + if (downloadFile(uri, cachedFilePath, checksum)) { + return cachedFilePath; + } else { + throw new RuntimeException("Error downloading remote file to cache at " + cachedFilePath); + } + case UPDATE_NO_VERIFY: + logger.warn(() -> "Checksum verification is disabled, downloading remote file to cache at " + cachedFilePath); + if (downloadFile(uri, cachedFilePath, null)) { + return cachedFilePath; + } else { + throw new RuntimeException("Error downloading remote file to cache at " + cachedFilePath); + } + case LOCAL_VERIFY: + if (checksum == null) { + logger.warn(() -> "Remote checksum file does not exist, returning cached file " + cachedFilePath); + return cachedFilePath; + } + try { + String localChecksum = Files.readString(getOrCreateChecksum(cachedFilePath)); + String remoteChecksum = stripControlCharacters(new String(checksum.getInputStream().readAllBytes())); + if (localChecksum.equals(remoteChecksum)) { + return cachedFilePath; + } + else { + logger.warn(() -> "Checksums do not match, rehydrating cache " + cachedFilePath); + return pathFromRemoteUrl(uri); + } + } catch (IOException e) { + throw new RuntimeException(e); + } + case LOCAL_NO_VERIFY: + return cachedFilePath; + default: + throw new RuntimeException("Invalid NBIO Cache condition"); + } + } + + /** + * This method is used to retrieve a file from the local cache. + * It first checks if the file exists in the cache and if a checksum file is present. + * If the checksum file is not present, it generates a new one. + * If the "force update" option is enabled, it deletes the cached file and downloads it from the remote URL. + * If the "checksum verification" option is enabled, it compares the local checksum with the remote checksum. + * If the checksums match, it returns the path to the cached file. + * If the checksums don't match, it deletes the cached file and downloads it from the remote URL. + * If the remote file or checksum does not exist, it returns the cached file. + * + * @param cachedFilePath the Path to the cached file + * @param uri the URI of the remote file + * @return the Path to the cached file + * @throws RuntimeException if there was an error during the checksum comparison or if the checksums don't match + */ + private Path pathFromLocalCache(Path cachedFilePath, URI uri) { + + if (forceUpdate) { + return pathFromRemoteUrl(uri); + } + if (!verifyChecksum) { + logger.warn(() -> "Checksum verification is disabled, returning cached file " + cachedFilePath); + return execute(NBIOResolverConditions.LOCAL_NO_VERIFY, cachedFilePath, uri); + } else { + return execute(NBIOResolverConditions.LOCAL_VERIFY, cachedFilePath, uri); + } + + } + + private Path getOrCreateChecksum(Path cachedFilePath) { + Path checksumPath = checksumPath(cachedFilePath); + if (!Files.isReadable(checksumPath)) { + try { + Files.writeString(checksumPath, generateSHA256Checksum(cachedFilePath.toString())); + } catch (IOException | NoSuchAlgorithmException e) { + throw new RuntimeException(e); + } + } + return checksumPath; + } + + private Path checksumPath(Path cachedFilePath) { + return Path.of(cachedFilePath + ".sha256"); + } + + private static String generateSHA256Checksum(String filePath) throws IOException, NoSuchAlgorithmException { + MessageDigest md = MessageDigest.getInstance("SHA-256"); + try (InputStream is = new FileInputStream(filePath)) { + byte[] buffer = new byte[8192]; + int bytesRead; + while ((bytesRead = is.read(buffer)) != -1) { + md.update(buffer, 0, bytesRead); + } + } + byte[] digest = md.digest(); + StringBuilder sb = new StringBuilder(); + for (byte b : digest) { + sb.append(String.format("%02x", b)); + } + return sb.toString(); + } + + private URLContent resolveURI(URI uri) { + try { + URL url = uri.toURL(); + InputStream inputStream = url.openStream(); + logger.debug(() -> "Found accessible remote file at " + url); + return new URLContent(url, inputStream); + } catch (IOException e) { + logger.error(() -> "Unable to find content at URI '" + uri + "', this often indicates a configuration error."); + return null; + } + } + + private boolean remoteFileExists(URI uri) { + try { + HttpURLConnection connection = (HttpURLConnection) uri.toURL().openConnection(); + connection.setRequestMethod("HEAD"); + int responseCode = connection.getResponseCode(); + return responseCode == HttpURLConnection.HTTP_OK; + } catch (Exception e) { + return false; // Error occurred or file does not exist + } + } + + @Override + public List resolveDirectory(URI uri) { + List dirs = new ArrayList<>(); + + Path path = Path.of(cacheDir + uri.getPath()); + if (Files.isDirectory(path)) { + dirs.add(path); + } + return dirs; + } + + public static void setCacheDir(String cacheDir) { + ResolverForNBIOCache.cacheDir = cacheDir; + } + + public static void setForceUpdate(boolean forceUpdate) { + ResolverForNBIOCache.forceUpdate = forceUpdate; + } + + public static void setVerifyChecksum(boolean verifyChecksum) { + ResolverForNBIOCache.verifyChecksum = verifyChecksum; + } + + public static void setMaxRetries(int maxRetries) { + ResolverForNBIOCache.maxRetries = maxRetries; + } + +} diff --git a/nb-api/src/main/java/io/nosqlbench/nb/api/nbio/URIResolver.java b/nb-api/src/main/java/io/nosqlbench/nb/api/nbio/URIResolver.java index 4aff35b69..90e2be2af 100644 --- a/nb-api/src/main/java/io/nosqlbench/nb/api/nbio/URIResolver.java +++ b/nb-api/src/main/java/io/nosqlbench/nb/api/nbio/URIResolver.java @@ -36,7 +36,8 @@ public class URIResolver implements ContentResolver { private static final List EVERYWHERE = List.of( ResolverForURL.INSTANCE, ResolverForFilesystem.INSTANCE, - ResolverForClasspath.INSTANCE + ResolverForClasspath.INSTANCE, + ResolverForNBIOCache.INSTANCE ); private List extensions; @@ -87,6 +88,16 @@ public class URIResolver implements ContentResolver { return this; } + /** + * Include resources within the NBIO cache or download them if they are not found. + * + * @return this URISearch + */ + public URIResolver inNBIOCache() { + loaders.add(ResolverForNBIOCache.INSTANCE); + return this; + } + public List> resolve(String uri) { return resolve(URI.create(uri)); } diff --git a/nb-api/src/main/java/io/nosqlbench/nb/api/nbio/URIResolvers.java b/nb-api/src/main/java/io/nosqlbench/nb/api/nbio/URIResolvers.java index c1a4c852b..5c08d8eb5 100644 --- a/nb-api/src/main/java/io/nosqlbench/nb/api/nbio/URIResolvers.java +++ b/nb-api/src/main/java/io/nosqlbench/nb/api/nbio/URIResolvers.java @@ -52,4 +52,8 @@ public class URIResolvers { public static URIResolver inClasspath() { return new URIResolver().inCP(); } + + public static URIResolver inNBIOCache() { + return new URIResolver().inNBIOCache(); + } } diff --git a/nb5/pom.xml b/nb5/pom.xml index cae2bd24c..218497a0c 100644 --- a/nb5/pom.xml +++ b/nb5/pom.xml @@ -63,7 +63,7 @@ io.nosqlbench - adapter-pinecone + adapter-cqld4 ${revision} @@ -101,11 +101,6 @@ adapter-jdbc ${revision} - - io.nosqlbench - adapter-milvus - ${revision} - io.nosqlbench adapter-mongodb @@ -121,18 +116,12 @@ adapter-aws-opensearch ${revision} - - io.nosqlbench - adapter-cqld4 - ${revision} - io.nosqlbench adapter-s4j ${revision} - @@ -255,230 +244,242 @@ - - adapter-opensearch - - - - ../adapter-opensearch/target - - - - - io.nosqlbench - adapter-aws-opensearch - ${revision} - - - - - adapter-cqld4 - - - - ../adapter-cqld4/target - - - - - io.nosqlbench - adapter-cqld4 - ${revision} - - - - - adapter-s4j - - - - ../adapter-s4j/target - - - - - io.nosqlbench - adapter-s4j - ${revision} - - - - - adapter-pinecone - - - - ../adapter-pinecone/target - - - - - io.nosqlbench - adapter-pinecone - ${revision} - - - - - adapter-mongodb - - - - ../adapter-mongodb/target - - - - - io.nosqlbench - adapter-mongodb - ${revision} - - - - - adapter-neo4j - - - - ../adapter-neo4j/target - - - - - io.nosqlbench - adapter-neo4j - ${revision} - - - - - adapter-tcp - - - - ../adapter-tcp/target - - - - - io.nosqlbench - adapter-tcp - ${revision} - - - - - adapter-dynamodb - - - - ../adapter-dynamodb/target - - - - - io.nosqlbench - adapter-dynamodb - ${revision} - - - - - adapter-http - - - - ../adapter-http/target - - - - - io.nosqlbench - adapter-http - ${revision} - - - - - adapter-pulsar - - - - ../adapter-pulsar/target - - - - - io.nosqlbench - adapter-pulsar - ${revision} - - - - - adapter-kafka - - - - ../adapter-kafka/target - - - - - io.nosqlbench - adapter-kafka - ${revision} - - - - - adapter-amqp - - - - ../adapter-amqp/target - - - - - io.nosqlbench - adapter-amqp - ${revision} - - - - - adapter-jdbc - - - - ../adapter-jdbc/target - - - - - io.nosqlbench - adapter-jdbc - ${revision} - - - - - adapter-milvus - - - - ../adapter-milvus/target - - - - - io.nosqlbench - adapter-milvus - ${revision} - - - + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/nbr/src/main/java/io/nosqlbench/scenarios/simframe/optimizers/findmax/CMD_findmax.java b/nbr/src/main/java/io/nosqlbench/scenarios/simframe/optimizers/findmax/CMD_findmax.java new file mode 100644 index 000000000..c87c04e95 --- /dev/null +++ b/nbr/src/main/java/io/nosqlbench/scenarios/simframe/optimizers/findmax/CMD_findmax.java @@ -0,0 +1,90 @@ +/* + * Copyright (c) 2020-2024 nosqlbench + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package io.nosqlbench.scenarios.simframe.optimizers.findmax; + +import io.nosqlbench.engine.api.activityapi.core.Activity; +import io.nosqlbench.engine.api.activityapi.simrate.CycleRateSpec; +import io.nosqlbench.engine.api.activityapi.simrate.SimRateSpec; +import io.nosqlbench.engine.core.lifecycle.scenario.container.ContainerActivitiesController; +import io.nosqlbench.engine.core.lifecycle.scenario.container.InvokableResult; +import io.nosqlbench.engine.core.lifecycle.scenario.container.NBBufferedContainer; +import io.nosqlbench.engine.core.lifecycle.scenario.container.NBCommandParams; +import io.nosqlbench.engine.core.lifecycle.scenario.execution.NBBaseCommand; +import io.nosqlbench.nb.api.components.events.ParamChange; +import io.nosqlbench.nb.api.components.events.SetThreads; +import io.nosqlbench.scenarios.simframe.SimFrameUtils; +import io.nosqlbench.scenarios.simframe.capture.SimFrameCapture; +import io.nosqlbench.scenarios.simframe.capture.SimFrameJournal; +import io.nosqlbench.scenarios.simframe.capture.SimFrameValueData; +import io.nosqlbench.scenarios.simframe.optimizers.CMD_optimize; +import io.nosqlbench.scenarios.simframe.planning.SimFrame; +import io.nosqlbench.scenarios.simframe.planning.SimFrameFunctionAnalyzer; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; + +import java.io.PrintWriter; +import java.io.Reader; + +public class CMD_findmax extends NBBaseCommand { + private final static Logger logger = LogManager.getLogger(CMD_optimize.class); + + public CMD_findmax(NBBufferedContainer parentComponent, String phaseName, String targetScenario) { + super(parentComponent, phaseName, targetScenario); + } + + @Override + public Object invoke(NBCommandParams params, PrintWriter stdout, PrintWriter stderr, Reader stdin, ContainerActivitiesController controller) { + Activity flywheel = SimFrameUtils.findFlywheelActivity(controller, params.get("activity")); + stdout.println("starting analysis on activity '" + flywheel.getAlias() + "'"); + SimFrameUtils.awaitActivity(flywheel); + + SimFrameJournal journal = new SimFrameJournal<>(); + FindmaxParamModel model = new FindmaxParamModel(); + + FindmaxConfig findmaxConfig = new FindmaxConfig(params); + switch(findmaxConfig.optimization_type()) { + case "rate" -> + model.add("rate", + findmaxConfig.min_value(), // min + findmaxConfig.base_value(), // initial + findmaxConfig.max_value(), // max + rate -> flywheel.onEvent(ParamChange.of(new CycleRateSpec( + rate, + 1.1d, + SimRateSpec.Verb.restart))) + ); + case "threads" -> + model.add("threads", + findmaxConfig.min_value(), // min + findmaxConfig.base_value(), // initial + findmaxConfig.max_value(), // max + threads -> flywheel.onEvent(ParamChange.of(new SetThreads((int) (threads)))) + ); + default -> + throw new RuntimeException("Unsupported optimization type: " + findmaxConfig.optimization_type()); + } + + SimFrameCapture capture = new SimFrameValueData(flywheel); + FindmaxFrameFunction frameFunction = new FindmaxFrameFunction(controller, findmaxConfig, flywheel, capture, journal, model); + SimFrameFunctionAnalyzer analyzer = new FindmaxAnalyzer(frameFunction, findmaxConfig); + SimFrame best = analyzer.analyze(); + stdout.println("Best Run:\n" + best); + return best.params(); + } + + +} diff --git a/nbr/src/main/java/io/nosqlbench/scenarios/simframe/optimizers/findmax/FindmaxAnalyzer.java b/nbr/src/main/java/io/nosqlbench/scenarios/simframe/optimizers/findmax/FindmaxAnalyzer.java new file mode 100644 index 000000000..bcd7d456a --- /dev/null +++ b/nbr/src/main/java/io/nosqlbench/scenarios/simframe/optimizers/findmax/FindmaxAnalyzer.java @@ -0,0 +1,110 @@ +/* + * Copyright (c) 2020-2024 nosqlbench + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +package io.nosqlbench.scenarios.simframe.optimizers.findmax; + +import io.nosqlbench.scenarios.simframe.planning.SimFrame; +import io.nosqlbench.scenarios.simframe.planning.SimFrameAction; +import io.nosqlbench.scenarios.simframe.planning.SimFrameFunctionAnalyzer; + +import java.util.Comparator; + +import static io.nosqlbench.virtdata.core.bindings.VirtDataLibrary.logger; + +public class FindmaxAnalyzer extends SimFrameFunctionAnalyzer { + + public FindmaxAnalyzer(FindmaxFrameFunction function, FindmaxConfig config) { + super(function, config); + } + + @Override + protected FrameResult nextFrame() { + double newValue; + SimFrame last = function.getJournal().last(); + SimFrame best = function.getJournal().bestRun(); + if (best.index() == last.index()) { // got better consecutively + newValue = last.params().paramValues()[0] + config.step_value(); + config = new FindmaxConfig( + config.sample_time_ms(), + config.max_value(), + config.base_value(), + config.min_value(), + (config.step_value() * config.value_incr()), + config.value_incr(), + config.sample_incr(), + config.min_settling_ms(), + config.optimization_type(), + new double[]{newValue} + ); + } else if (best.index() == last.index() - 1) { + // got worse consecutively, this may be collapsed out since the general case below covers it (test first) + if (((last.params().paramValues()[0] + config.step_value()) - + (best.params().paramValues()[0] + config.step_value())) <= config.step_value()) { + logger.info("could not divide search space further, stop condition met"); + return new FrameResult(best.params().paramValues()[0], SimFrameAction.stop_run); + } else { + newValue = best.params().paramValues()[0] + config.step_value(); + config = new FindmaxConfig( + (config.sample_time_ms() * config.sample_incr()), + config.max_value(), + config.base_value(), + config.min_value(), + config.step_value(), + config.value_incr(), + config.sample_incr(), + (config.min_settling_ms() * 4), + config.optimization_type(), + new double[]{newValue} + ); + } + } else { // any other case + // find next frame with higher rate but lower value, the closest one by rate + SimFrame nextWorseFrameWithHigherRate = function.getJournal().frames().stream() + .filter(f -> f.value() < best.value()) + .filter(f -> f.params().paramValues()[0] + config.step_value() > (best.params().paramValues()[0] + config.step_value())) + .min(Comparator.comparingDouble(f -> f.params().paramValues()[0] + config.step_value())) + .orElseThrow(() -> new RuntimeException("inconsistent samples")); + if ((nextWorseFrameWithHigherRate.params().paramValues()[0] + config.step_value() - + best.params().paramValues()[0] + config.step_value()) > config.step_value()) { + newValue = best.params().paramValues()[0] + config.step_value(); + config = new FindmaxConfig( + (config.sample_time_ms() * config.sample_incr()), + config.max_value(), + config.base_value(), + config.min_value(), + config.step_value(), + config.value_incr(), + config.sample_incr(), + (config.min_settling_ms() * 2), + config.optimization_type(), + new double[]{newValue} + ); + } else { + logger.info("could not divide search space further, stop condition met"); + return new FrameResult(best.params().paramValues()[0], SimFrameAction.stop_run); + } + } + double[] point = {newValue}; + return new FrameResult(function.value(point), SimFrameAction.continue_run); + } + + @Override + protected FrameResult initialFrame() { + return new FrameResult(function.value(config.initialPoint()), SimFrameAction.continue_run); + } + +} diff --git a/nbr/src/main/java/io/nosqlbench/scenarios/simframe/optimizers/findmax/FindmaxConfig.java b/nbr/src/main/java/io/nosqlbench/scenarios/simframe/optimizers/findmax/FindmaxConfig.java new file mode 100644 index 000000000..f49a2d66a --- /dev/null +++ b/nbr/src/main/java/io/nosqlbench/scenarios/simframe/optimizers/findmax/FindmaxConfig.java @@ -0,0 +1,51 @@ +/* + * Copyright (c) 2020-2024 nosqlbench + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package io.nosqlbench.scenarios.simframe.optimizers.findmax; + +import io.nosqlbench.engine.core.lifecycle.scenario.container.NBCommandParams; + +public record FindmaxConfig ( + double sample_time_ms, + double max_value, + double base_value, + double min_value, + double step_value, + double value_incr, + double sample_incr, + long min_settling_ms, + String optimization_type, + double[] initial_point +) { + public double[] initialPoint() { + return new double[]{base_value}; + } + + public FindmaxConfig(NBCommandParams params) { + this( + params.maybeGet("sample_time_ms").map(Double::parseDouble).orElse(4000d), + params.maybeGet("max_value").map(Double::parseDouble).orElse(10000d), + params.maybeGet("base_value").map(Double::parseDouble).orElse(10d), + params.maybeGet("min_value").map(Double::parseDouble).orElse(0d), + params.maybeGet("step_value").map(Double::parseDouble).orElse(100d), + params.maybeGet("value_incr").map(Double::parseDouble).orElse(2d), + params.maybeGet("sample_incr").map(Double::parseDouble).orElse(1.2d), + params.maybeGet("min_settling_ms").map(Long::parseLong).orElse(4000L), + params.maybeGet("optimization_type").orElse("rate"), + new double[]{params.maybeGet("base_value").map(Double::parseDouble).orElse(10d)} + ); + } +} diff --git a/nbr/src/main/java/io/nosqlbench/scenarios/simframe/optimizers/findmax/FindmaxFrameFunction.java b/nbr/src/main/java/io/nosqlbench/scenarios/simframe/optimizers/findmax/FindmaxFrameFunction.java new file mode 100644 index 000000000..3a64fd2cd --- /dev/null +++ b/nbr/src/main/java/io/nosqlbench/scenarios/simframe/optimizers/findmax/FindmaxFrameFunction.java @@ -0,0 +1,76 @@ +/* + * Copyright (c) 2020-2024 nosqlbench + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package io.nosqlbench.scenarios.simframe.optimizers.findmax; + +import io.nosqlbench.engine.api.activityapi.core.Activity; +import io.nosqlbench.engine.api.activityapi.core.RunState; +import io.nosqlbench.engine.core.lifecycle.scenario.container.ContainerActivitiesController; +import io.nosqlbench.scenarios.simframe.capture.SimFrameCapture; +import io.nosqlbench.scenarios.simframe.capture.SimFrameJournal; +import io.nosqlbench.scenarios.simframe.planning.SimFrameFunction; + +public class FindmaxFrameFunction implements SimFrameFunction { + + private final Activity flywheel; + private final SimFrameCapture capture; + private final SimFrameJournal journal; + private final FindmaxConfig settings; + private final ContainerActivitiesController controller; + private final FindmaxParamModel model; + + public FindmaxFrameFunction( + ContainerActivitiesController controller, + FindmaxConfig settings, + Activity flywheel, + SimFrameCapture capture, + SimFrameJournal journal, + FindmaxParamModel model + ) { + this.controller = controller; + this.settings = settings; + this.flywheel = flywheel; + this.capture = capture; + this.journal = journal; + this.model = model; + } + + @Override + public double value(double[] point) { + System.out.println("━".repeat(40)); + FindmaxFrameParams params = model.apply(point); + System.out.println(params); + capture.startWindow(); + capture.awaitSteadyState(); + model.apply(point); + capture.restartWindow(); + System.out.println("sampling for " + settings.sample_time_ms()+"ms"); + controller.waitMillis((long) settings.sample_time_ms()); + capture.stopWindow(); + journal.record(params,capture.last()); + System.out.println(journal.last()); + if (flywheel.getRunStateTally().tallyFor(RunState.Running)==0) { + System.out.println("state:" + flywheel.getRunState()); + throw new RuntimeException("Early exit of flywheel activity '" + flywheel.getAlias() + "'. Can't continue."); + } + return journal.last().value(); + } + + @Override + public SimFrameJournal getJournal() { + return journal; + } +} diff --git a/nbr/src/main/java/io/nosqlbench/scenarios/simframe/optimizers/findmax/FindmaxFrameParams.java b/nbr/src/main/java/io/nosqlbench/scenarios/simframe/optimizers/findmax/FindmaxFrameParams.java new file mode 100644 index 000000000..e3c46e6bd --- /dev/null +++ b/nbr/src/main/java/io/nosqlbench/scenarios/simframe/optimizers/findmax/FindmaxFrameParams.java @@ -0,0 +1,52 @@ +/* + * Copyright (c) 2020-2024 nosqlbench + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package io.nosqlbench.scenarios.simframe.optimizers.findmax; + + +import io.nosqlbench.engine.core.lifecycle.scenario.container.InvokableResult; + +import java.util.LinkedHashMap; +import java.util.Map; + +public class FindmaxFrameParams implements InvokableResult { + + FindmaxParamModel model; + double[] paramValues; + + public FindmaxFrameParams(FindmaxParamModel model, double[] paramValues) { + this.model = model; + this.paramValues = paramValues; + } + + @Override + public String toString() { + return model.summarizeParams(paramValues); + } + + public double[] paramValues() { + return paramValues; + } + + @Override + public Map asResult() { + Map result = new LinkedHashMap<>(); + for (int i = 0; i < this.paramValues.length; i++) { + result.put(model.getParams().get(i).name(),String.valueOf(paramValues[i])); + } + return result; + } +} diff --git a/nbr/src/main/java/io/nosqlbench/scenarios/simframe/optimizers/findmax/FindmaxParamModel.java b/nbr/src/main/java/io/nosqlbench/scenarios/simframe/optimizers/findmax/FindmaxParamModel.java new file mode 100644 index 000000000..4e4f48dee --- /dev/null +++ b/nbr/src/main/java/io/nosqlbench/scenarios/simframe/optimizers/findmax/FindmaxParamModel.java @@ -0,0 +1,87 @@ +/* + * Copyright (c) 2020-2024 nosqlbench + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package io.nosqlbench.scenarios.simframe.optimizers.findmax; + +import io.nosqlbench.scenarios.simframe.planning.GenericParamModel; +import org.apache.commons.math4.legacy.optim.SimpleBounds; + +import java.util.ArrayList; +import java.util.List; +import java.util.function.DoubleConsumer; + +public class FindmaxParamModel { + private final List params = new ArrayList<>(); + + public FindmaxParamModel add(String name, double min, double initial, double max, DoubleConsumer effector) { + if (min>initial || initial > max) { + throw new RuntimeException("parameters must be in min getParams() { + return this.params; + } +} diff --git a/nbr/src/main/java/io/nosqlbench/scenarios/simframe/optimizers/findmax/NBFindmaxInfo.java b/nbr/src/main/java/io/nosqlbench/scenarios/simframe/optimizers/findmax/NBFindmaxInfo.java new file mode 100644 index 000000000..c548b73ea --- /dev/null +++ b/nbr/src/main/java/io/nosqlbench/scenarios/simframe/optimizers/findmax/NBFindmaxInfo.java @@ -0,0 +1,29 @@ +/* + * Copyright (c) 2020-2024 nosqlbench + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package io.nosqlbench.scenarios.simframe.optimizers.findmax; + +import io.nosqlbench.engine.core.lifecycle.scenario.execution.NBBaseCommand; +import io.nosqlbench.engine.core.lifecycle.scenario.execution.NBCommandInfo; +import io.nosqlbench.nb.annotations.Service; + +@Service(value = NBCommandInfo.class,selector = "findmax") +public class NBFindmaxInfo extends NBCommandInfo { + @Override + public Class getType() { + return CMD_findmax.class; + } +} diff --git a/nbr/src/main/java/io/nosqlbench/scenarios/simframe/optimizers/optimo/OptimoFrameFunction.java b/nbr/src/main/java/io/nosqlbench/scenarios/simframe/optimizers/optimo/OptimoFrameFunction.java index 4219f4343..b6d1ffffc 100644 --- a/nbr/src/main/java/io/nosqlbench/scenarios/simframe/optimizers/optimo/OptimoFrameFunction.java +++ b/nbr/src/main/java/io/nosqlbench/scenarios/simframe/optimizers/optimo/OptimoFrameFunction.java @@ -23,7 +23,7 @@ import io.nosqlbench.scenarios.simframe.capture.SimFrameCapture; import io.nosqlbench.scenarios.simframe.capture.SimFrameJournal; import io.nosqlbench.scenarios.simframe.planning.SimFrameFunction; -public class OptimoFrameFunction implements SimFrameFunction { +public class OptimoFrameFunction implements SimFrameFunction { private final Activity flywheel; private final SimFrameCapture capture; @@ -65,4 +65,9 @@ public class OptimoFrameFunction implements SimFrameFunction { } return journal.last().value(); } + + @Override + public SimFrameJournal getJournal() { + return journal; + } } diff --git a/nbr/src/main/java/io/nosqlbench/scenarios/simframe/planning/SimFrameAction.java b/nbr/src/main/java/io/nosqlbench/scenarios/simframe/planning/SimFrameAction.java new file mode 100644 index 000000000..e435f11b3 --- /dev/null +++ b/nbr/src/main/java/io/nosqlbench/scenarios/simframe/planning/SimFrameAction.java @@ -0,0 +1,23 @@ +/* + * Copyright (c) 2020-2024 nosqlbench + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +package io.nosqlbench.scenarios.simframe.planning; + +public enum SimFrameAction { + continue_run, + stop_run +} diff --git a/nbr/src/main/java/io/nosqlbench/scenarios/simframe/planning/SimFrameFunction.java b/nbr/src/main/java/io/nosqlbench/scenarios/simframe/planning/SimFrameFunction.java index 77d08459f..7b629d660 100644 --- a/nbr/src/main/java/io/nosqlbench/scenarios/simframe/planning/SimFrameFunction.java +++ b/nbr/src/main/java/io/nosqlbench/scenarios/simframe/planning/SimFrameFunction.java @@ -16,10 +16,13 @@ package io.nosqlbench.scenarios.simframe.planning; +import io.nosqlbench.engine.core.lifecycle.scenario.container.InvokableResult; +import io.nosqlbench.scenarios.simframe.capture.SimFrameJournal; import org.apache.commons.math4.legacy.analysis.MultivariateFunction; -public interface SimFrameFunction extends MultivariateFunction { +public interface SimFrameFunction

extends MultivariateFunction { @Override double value(double[] point); + SimFrameJournal

getJournal(); } diff --git a/nbr/src/main/java/io/nosqlbench/scenarios/simframe/planning/SimFrameFunctionAnalyzer.java b/nbr/src/main/java/io/nosqlbench/scenarios/simframe/planning/SimFrameFunctionAnalyzer.java new file mode 100644 index 000000000..746b2906c --- /dev/null +++ b/nbr/src/main/java/io/nosqlbench/scenarios/simframe/planning/SimFrameFunctionAnalyzer.java @@ -0,0 +1,43 @@ +/* + * Copyright (c) 2020-2024 nosqlbench + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +package io.nosqlbench.scenarios.simframe.planning; + +import io.nosqlbench.engine.core.lifecycle.scenario.container.InvokableResult; + +public abstract class SimFrameFunctionAnalyzer, C extends Record> { + protected final A function; + protected C config; + + protected SimFrameFunctionAnalyzer(A function, C config) { + this.function = function; + this.config = config; + } + + public record FrameResult(double value, SimFrameAction action) {} + + public SimFrame analyze() { + FrameResult result = initialFrame(); + while (result.action() == SimFrameAction.continue_run) { + result = nextFrame(); + } + return function.getJournal().bestRun(); + } + + protected abstract FrameResult nextFrame(); + protected abstract FrameResult initialFrame(); +} diff --git a/pom.xml b/pom.xml index ddd9ff36e..bdda03576 100644 --- a/pom.xml +++ b/pom.xml @@ -59,20 +59,23 @@ adapter-diag adapter-stdout - adapter-pinecone - adapter-dynamodb + adapter-cqld4 adapter-http adapter-tcp + adapter-dynamodb + adapter-mongodb adapter-pulsar + adapter-s4j adapter-kafka adapter-amqp adapter-jdbc + adapter-milvus adapter-mongodb adapter-neo4j adapter-aws-opensearch - adapter-cqld4 - adapter-s4j + + virtdata-api @@ -91,6 +94,151 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/virtdata-api/src/main/java/io/nosqlbench/engine/api/templating/ParsedTemplateMap.java b/virtdata-api/src/main/java/io/nosqlbench/engine/api/templating/ParsedTemplateMap.java index b71cd8316..5370ac8fb 100644 --- a/virtdata-api/src/main/java/io/nosqlbench/engine/api/templating/ParsedTemplateMap.java +++ b/virtdata-api/src/main/java/io/nosqlbench/engine/api/templating/ParsedTemplateMap.java @@ -1134,5 +1134,11 @@ public class ParsedTemplateMap implements LongFunction>, StaticFi } - + public Map getCombinedPrototype() { + Map prototype = new LinkedHashMap<>(); + prototype.putAll(getDynamicPrototype()); + prototype.putAll(getStaticPrototype()); + prototype.putAll(getConfigPrototype()); + return prototype; + } } diff --git a/virtdata-api/src/main/java/io/nosqlbench/virtdata/core/templates/StringCompositor.java b/virtdata-api/src/main/java/io/nosqlbench/virtdata/core/templates/StringCompositor.java index e2f35f285..da0f93962 100644 --- a/virtdata-api/src/main/java/io/nosqlbench/virtdata/core/templates/StringCompositor.java +++ b/virtdata-api/src/main/java/io/nosqlbench/virtdata/core/templates/StringCompositor.java @@ -64,9 +64,11 @@ public class StringCompositor implements LongFunction { int minsize = 0; for (int i = 0; i < 100; i++) { String result = apply(i); - minsize = Math.max(minsize, result.length()); + if (result!=null) { + minsize = Math.max(minsize,result.length()); + } } - bufsize = minsize * 2; + bufsize = spans.length*1024; } public StringCompositor(ParsedTemplateString template, Map fconfig) { @@ -78,7 +80,9 @@ public class StringCompositor implements LongFunction { StringBuilder sb = new StringBuilder(bufsize); String[] ary = new String[mappers.length]; for (int i = 0; i < ary.length; i++) { - ary[i] = stringfunc.apply(mappers[i].apply(value)); + DataMapper mapperType = mappers[i]; + Object object = mapperType.apply(value); + ary[i] = stringfunc.apply(object); } for (int i = 0; i < LUT.length; i++) { sb.append(spans[i]).append(ary[LUT[i]]); diff --git a/virtdata-lib-basics/src/main/java/io/nosqlbench/virtdata/library/basics/shared/conversions/from_any/ToJSONF.java b/virtdata-lib-basics/src/main/java/io/nosqlbench/virtdata/library/basics/shared/conversions/from_any/ToJSONF.java new file mode 100644 index 000000000..67ef8bf23 --- /dev/null +++ b/virtdata-lib-basics/src/main/java/io/nosqlbench/virtdata/library/basics/shared/conversions/from_any/ToJSONF.java @@ -0,0 +1,49 @@ +/* + * Copyright (c) 2022 nosqlbench + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package io.nosqlbench.virtdata.library.basics.shared.conversions.from_any; + +import com.google.gson.Gson; +import com.google.gson.GsonBuilder; +import io.nosqlbench.virtdata.api.annotations.Categories; +import io.nosqlbench.virtdata.api.annotations.Category; +import io.nosqlbench.virtdata.api.annotations.ThreadSafeMapper; +import io.nosqlbench.virtdata.api.bindings.VirtDataConversions; + +import java.util.function.Function; +import java.util.function.LongFunction; + +/** + * Convert the input object to a JSON string with Gson. + */ +@ThreadSafeMapper +@Categories({Category.conversion}) +public class ToJSONF implements LongFunction { + private final static Gson gson = new GsonBuilder().create(); + private final LongFunction objfunc; + + public ToJSONF(LongFunction objFunc) { + this.objfunc = VirtDataConversions.adaptFunction(objFunc, LongFunction.class, String.class); + } + + + + @Override + public String apply(long value) { + Object object = objfunc.apply(value); + return gson.toJson(object); + } +} diff --git a/virtdata-lib-basics/src/main/java/io/nosqlbench/virtdata/library/basics/shared/conversions/from_any/ToJSONFPretty.java b/virtdata-lib-basics/src/main/java/io/nosqlbench/virtdata/library/basics/shared/conversions/from_any/ToJSONFPretty.java new file mode 100644 index 000000000..08dbd36b0 --- /dev/null +++ b/virtdata-lib-basics/src/main/java/io/nosqlbench/virtdata/library/basics/shared/conversions/from_any/ToJSONFPretty.java @@ -0,0 +1,48 @@ +/* + * Copyright (c) 2022 nosqlbench + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package io.nosqlbench.virtdata.library.basics.shared.conversions.from_any; + +import com.google.gson.Gson; +import com.google.gson.GsonBuilder; +import io.nosqlbench.virtdata.api.annotations.Categories; +import io.nosqlbench.virtdata.api.annotations.Category; +import io.nosqlbench.virtdata.api.annotations.ThreadSafeMapper; +import io.nosqlbench.virtdata.api.bindings.VirtDataConversions; + +import java.util.function.LongFunction; + +/** + * Convert the input object to a JSON string with Gson. + */ +@ThreadSafeMapper +@Categories({Category.conversion}) +public class ToJSONFPretty implements LongFunction { + private final static Gson gson = new GsonBuilder().setPrettyPrinting().create(); + private final LongFunction objfunc; + + public ToJSONFPretty(LongFunction objFunc) { + this.objfunc = VirtDataConversions.adaptFunction(objFunc, LongFunction.class, Object.class); + } + + + + @Override + public String apply(long value) { + Object object = objfunc.apply(value); + return gson.toJson(object); + } +} diff --git a/virtdata-lib-basics/src/main/java/io/nosqlbench/virtdata/library/basics/shared/from_long/to_string/ConcatArray.java b/virtdata-lib-basics/src/main/java/io/nosqlbench/virtdata/library/basics/shared/from_long/to_string/ConcatArray.java new file mode 100644 index 000000000..d2f5b59c5 --- /dev/null +++ b/virtdata-lib-basics/src/main/java/io/nosqlbench/virtdata/library/basics/shared/from_long/to_string/ConcatArray.java @@ -0,0 +1,62 @@ +/* + * Copyright (c) 2024 nosqlbench + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package io.nosqlbench.virtdata.library.basics.shared.from_long.to_string; + +import io.nosqlbench.virtdata.api.annotations.Categories; +import io.nosqlbench.virtdata.api.annotations.Category; +import io.nosqlbench.virtdata.api.annotations.Example; +import io.nosqlbench.virtdata.api.annotations.ThreadSafeMapper; + +import java.util.function.LongFunction; + +/** + * This variant of Concat allows you to apply a string concatenation to a series of + * string produced by the provided functions. Each position of a delimiter will simply contain + * all generated values, although usually, you won't need more than one. + */ +@ThreadSafeMapper +@Categories({Category.functional}) +public class ConcatArray extends Concat { + private final int size; + private final String delimiter; + + @Example({"ConcatArray(',',5,'{{}}', NumberNameToString())"}) + public ConcatArray(String delimiter, int size, String template, Object... functions) { + super(template, functions); + this.delimiter = delimiter; + this.size = size; + } + + @Override + public String apply(long cycle) { + StringBuilder buffer = new StringBuilder(1024); + for (int i = 0; i < literals.length - 1; i++) { + buffer.append(literals[i]); + for (int element = 0; element < this.size; element++) { + long value = cycleStepMapper.applyAsLong(cycle, element); + int funcIdx = Math.min(functions.length - 1, element); + LongFunction selectedFunction = functions[funcIdx]; + String string = selectedFunction.apply(value); + buffer.append(string).append(delimiter); + } + buffer.setLength(buffer.length()-delimiter.length()); + } + buffer.append(literals[literals.length - 1]); + return buffer.toString(); + } + +} diff --git a/virtdata-lib-basics/src/main/java/io/nosqlbench/virtdata/library/basics/shared/from_long/to_string/DirectoryLinesStable.java b/virtdata-lib-basics/src/main/java/io/nosqlbench/virtdata/library/basics/shared/from_long/to_string/DirectoryLinesStable.java index a43e2a457..53b511a74 100644 --- a/virtdata-lib-basics/src/main/java/io/nosqlbench/virtdata/library/basics/shared/from_long/to_string/DirectoryLinesStable.java +++ b/virtdata-lib-basics/src/main/java/io/nosqlbench/virtdata/library/basics/shared/from_long/to_string/DirectoryLinesStable.java @@ -91,6 +91,14 @@ public class DirectoryLinesStable implements LongFunction { this.totalSize = accumulator; } + private String getLine(int index, int offset) { + try { + IntFunction func = fileFunctions.get(index); + return func.apply(offset); + } catch (Exception e) { + throw new RuntimeException("Error while binding index=" + index + " offset=" + offset + " for " + this); + } + } @Override public synchronized String apply(long cycle) { int value = (int) (cycle % totalSize); @@ -100,9 +108,7 @@ public class DirectoryLinesStable implements LongFunction { index++; } - IntFunction func = fileFunctions.get(index); - return func.apply(value); - + return this.getLine(index,value); } @Override diff --git a/virtdata-lib-basics/src/test/java/io/nosqlbench/virtdata/library/basics/shared/from_long/to_string/ConcatArrayTest.java b/virtdata-lib-basics/src/test/java/io/nosqlbench/virtdata/library/basics/shared/from_long/to_string/ConcatArrayTest.java new file mode 100644 index 000000000..d613a54f9 --- /dev/null +++ b/virtdata-lib-basics/src/test/java/io/nosqlbench/virtdata/library/basics/shared/from_long/to_string/ConcatArrayTest.java @@ -0,0 +1,64 @@ +/* + * Copyright (c) 2024 nosqlbench + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package io.nosqlbench.virtdata.library.basics.shared.from_long.to_string; + +import io.nosqlbench.virtdata.library.basics.shared.conversions.from_any.ToJSONFPretty; +import io.nosqlbench.virtdata.library.basics.shared.from_long.to_collection.ListSized; +import io.nosqlbench.virtdata.library.basics.shared.from_long.to_collection.ListSizedStepped; +import org.junit.jupiter.api.Test; + +import java.util.function.LongFunction; + +import static org.assertj.core.api.Assertions.assertThat; +import static org.junit.jupiter.api.Assertions.*; + +public class ConcatArrayTest { + @Test + public void testConcatArray() { + ConcatArray ca = new ConcatArray(",", 2, "{\n{}\n}", new NumberNameToString()); + assertThat(ca.apply(3L)).isEqualTo(""" + { + three,four + }"""); + } + + @Test + public void testConcatArrayJson() { + ToJSONFPretty jsonlist = new ToJSONFPretty( + (LongFunction) new ListSizedStepped(4, + new NumberNameToString())); + + ConcatArray ca = new ConcatArray( + ",", 2, "{\n{}\n}", jsonlist); + + assertThat(ca.apply(3L)).isEqualTo(""" + { + [ + "three", + "four", + "five", + "six" + ],[ + "four", + "five", + "six", + "seven" + ] + }"""); + } + +} diff --git a/virtdata-lib-basics/src/test/java/io/nosqlbench/virtdata/library/basics/shared/from_long/to_string/DirectoryLinesStableTest.java b/virtdata-lib-basics/src/test/java/io/nosqlbench/virtdata/library/basics/shared/from_long/to_string/DirectoryLinesStableTest.java index aebf242c0..4556c9ae6 100644 --- a/virtdata-lib-basics/src/test/java/io/nosqlbench/virtdata/library/basics/shared/from_long/to_string/DirectoryLinesStableTest.java +++ b/virtdata-lib-basics/src/test/java/io/nosqlbench/virtdata/library/basics/shared/from_long/to_string/DirectoryLinesStableTest.java @@ -38,5 +38,16 @@ public class DirectoryLinesStableTest { assertThat(directoryLines.apply(Long.MAX_VALUE)).isEqualTo("data2.txt-line3"); } +// @Test +// public void testOverRangeIssue() { +// DirectoryLinesStable directoryLines = new DirectoryLinesStable( +// "exampledata/local/testdirlines", ".+jsonl" +// ); +// for (long i = 0; i < 40000; i++) { +// String result = directoryLines.apply(i); +// } +// +// } + } diff --git a/virtdata-lib-vectors/pom.xml b/virtdata-lib-vectors/pom.xml index b579fb27f..cc8164079 100644 --- a/virtdata-lib-vectors/pom.xml +++ b/virtdata-lib-vectors/pom.xml @@ -47,12 +47,6 @@ ${revision} - - org.apfloat - apfloat - 1.13.0 - - org.matheclipse matheclipse-core diff --git a/virtdata-lib-vectors/src/test/java/io/nosqlbench/virtdata/lib/vectors/dnn/DNN_Symbolic_Tests.java b/virtdata-lib-vectors/src/test/java/io/nosqlbench/virtdata/lib/vectors/dnn/DNN_Symbolic_Tests.java deleted file mode 100644 index 3b7828fc2..000000000 --- a/virtdata-lib-vectors/src/test/java/io/nosqlbench/virtdata/lib/vectors/dnn/DNN_Symbolic_Tests.java +++ /dev/null @@ -1,95 +0,0 @@ -/* - * Copyright (c) 2024 nosqlbench - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package io.nosqlbench.virtdata.lib.vectors.dnn; - -import org.junit.jupiter.api.Test; -import org.matheclipse.core.eval.ExprEvaluator; -import org.matheclipse.core.expression.F; -import org.matheclipse.core.interfaces.IExpr; -import org.matheclipse.core.interfaces.ISymbol; -import org.matheclipse.core.interfaces.IAST; - -public class DNN_Symbolic_Tests { - - @Test - public void testExactRepresentation() { - ExprEvaluator util = new ExprEvaluator(false, (short)10); - - // Convert an expression to the internal Java form: - // Note: single character identifiers are case sensitive - // (the "D()" function identifier must be written as upper case - // character) - String javaForm = util.toJavaForm("D(sin(x)*cos(x),x)"); - // prints: D(Times(Sin(x),Cos(x)),x) - System.out.println("Out[1]: " + javaForm.toString()); - - // Use the Java form to create an expression with F.* static - // methods: - ISymbol x = F.Dummy("x"); - IAST function = F.D(F.Times(F.Sin(x), F.Cos(x)), x); - IExpr result = util.eval(function); - // print: Cos(x)^2-Sin(x)^2 - System.out.println("Out[2]: " + result.toString()); - - // Note "diff" is an alias for the "D" function - result = util.eval("diff(sin(x)*cos(x),x)"); - // print: Cos(x)^2-Sin(x)^2 - System.out.println("Out[3]: " + result.toString()); - - // evaluate the last result (% contains "last answer") - result = util.eval("%+cos(x)^2"); - // print: 2*Cos(x)^2-Sin(x)^2 - System.out.println("Out[4]: " + result.toString()); - - // evaluate an Integrate[] expression - result = util.eval("integrate(sin(x)^5,x)"); - // print: 2/3*Cos(x)^3-1/5*Cos(x)^5-Cos(x) - System.out.println("Out[5]: " + result.toString()); - - // set the value of a variable "a" to 10 - result = util.eval("a=10"); - // print: 10 - System.out.println("Out[6]: " + result.toString()); - - // do a calculation with variable "a" - result = util.eval("a*3+b"); - // print: 30+b - System.out.println("Out[7]: " + result.toString()); - - // Do a calculation in "numeric mode" with the N() function - // Note: single character identifiers are case sensistive - // (the "N()" function identifier must be written as upper case - // character) - result = util.eval("N(sinh(5))"); - // print: 74.20321057778875 - System.out.println("Out[8]: " + result.toString()); - - // define a function with a recursive factorial function definition. - // Note: fac(0) is the stop condition. - result = util.eval("fac(x_Integer):=x*fac(x-1);fac(0)=1"); - // now calculate factorial of 10: - result = util.eval("fac(10)"); - // print: 3628800 - System.out.println("Out[9]: " + result.toString()); - - function = F.Function(F.Divide(F.Gamma(F.Plus(F.C1, F.Slot1)), F.Gamma(F.Plus(F.C1, F.Slot2)))); - // eval function ( Gamma(1+#1)/Gamma(1+#2) ) & [23,20] - result = util.evalFunction(function, "23", "20"); - // print: 10626 - System.out.println("Out[10]: " + result.toString()); - } -}