initial commit of pinecone op types

This commit is contained in:
Ubuntu 2023-05-05 21:11:28 +00:00 committed by Madhavan
parent 4de2aa67d6
commit 5a5a6372f1
11 changed files with 358 additions and 0 deletions

38
adapter-pinecone/.gitignore vendored Normal file
View File

@ -0,0 +1,38 @@
target/
!.mvn/wrapper/maven-wrapper.jar
!**/src/main/**/target/
!**/src/test/**/target/
### IntelliJ IDEA ###
.idea/modules.xml
.idea/jarRepositories.xml
.idea/compiler.xml
.idea/libraries/
*.iws
*.iml
*.ipr
### Eclipse ###
.apt_generated
.classpath
.factorypath
.project
.settings
.springBeans
.sts4-cache
### NetBeans ###
/nbproject/private/
/nbbuild/
/dist/
/nbdist/
/.nb-gradle/
build/
!**/src/main/**/build/
!**/src/test/**/build/
### VS Code ###
.vscode/
### Mac OS ###
.DS_Store

72
adapter-pinecone/pom.xml Normal file
View File

@ -0,0 +1,72 @@
<!--
~ Copyright (c) 2022-2023 nosqlbench
~
~ Licensed under the Apache License, Version 2.0 (the "License");
~ you may not use this file except in compliance with the License.
~ You may obtain a copy of the License at
~
~ http://www.apache.org/licenses/LICENSE-2.0
~
~ Unless required by applicable law or agreed to in writing, software
~ distributed under the License is distributed on an "AS IS" BASIS,
~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
~ See the License for the specific language governing permissions and
~ limitations under the License.
-->
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<artifactId>adapter-pinecone</artifactId>
<packaging>jar</packaging>
<parent>
<artifactId>mvn-defaults</artifactId>
<groupId>io.nosqlbench</groupId>
<version>${revision}</version>
<relativePath>../mvn-defaults</relativePath>
</parent>
<name>${project.artifactId}</name>
<description>
An nosqlbench ActivityType (AT) driver module;
Provides basic formatting and output to stdout or files.
</description>
<dependencies>
<dependency>
<groupId>io.nosqlbench</groupId>
<artifactId>nb-annotations</artifactId>
<version>${revision}</version>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>io.nosqlbench</groupId>
<artifactId>adapters-api</artifactId>
<version>${revision}</version>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>io.pinecone</groupId>
<artifactId>pinecone-client</artifactId>
<version>0.2.3</version>
</dependency>
<dependency>
<groupId>io.nosqlbench</groupId>
<artifactId>adapters-api</artifactId>
<version>5.17.3-SNAPSHOT</version>
<scope>compile</scope>
</dependency>
</dependencies>
<build>
<plugins>
</plugins>
</build>
</project>

View File

@ -0,0 +1,54 @@
package io.nosqlbench.adapter.pinecone;
import io.pinecone.PineconeClient;
import io.pinecone.PineconeClientConfig;
import io.pinecone.PineconeConnection;
import io.pinecone.PineconeConnectionConfig;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import java.util.HashMap;
import java.util.Map;
public class PineConeSpace {
private final static Logger LOGGER = LogManager.getLogger(PineConeSpace.class);
private final String apiKey;
private final String environment;
private final String projectName;
private final String name;
private final PineconeClient client;
private PineconeClientConfig config;
/**
* Connections are index-specific so we need to allow for multiple connection management across indices.
* However, note that a single connection object is thread safe and can be used by multiple clients.
*/
private Map<String,PineconeConnection> connections = new HashMap<String,PineconeConnection>();
public PineConeSpace(String apiKey, String environment, String projectName, String name) {
this.apiKey = apiKey;
this.environment = environment;
this.projectName = projectName;
this.name = name;
config = new PineconeClientConfig()
.withApiKey(apiKey)
.withEnvironment(environment)
.withProjectName(projectName);
this.client = new PineconeClient(config);
}
public synchronized PineconeConnection getConnection(String index) {
PineconeConnection connection = connections.get(index);
if (connection == null) {
PineconeConnectionConfig connectionConfig = new PineconeConnectionConfig().withIndexName(index);
connection = client.connect(connectionConfig);
connections.put(index, connection);
}
return connection;
}
}

View File

@ -0,0 +1,30 @@
package io.nosqlbench.adapter.pinecone.ops;
import io.pinecone.proto.DeleteRequest;
import io.pinecone.proto.DeleteResponse;
import io.pinecone.PineconeConnection;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
public class PineconeDeleteOp extends PineconeOp {
private static final Logger LOGGER = LogManager.getLogger(PineconeDeleteOp.class);
private DeleteRequest request;
public PineconeDeleteOp(PineconeConnection connection, DeleteRequest request) {
super(connection);
this.request = request;
}
@Override
public void run() {
try {
DeleteResponse response = connection.getBlockingStub().delete(request);
// Do soemething with the response...
} catch (Exception e) {
LOGGER.error("Exception %s caught trying to do delete", e.getMessage());
LOGGER.error(e.getStackTrace());
}
}
}

View File

@ -0,0 +1,31 @@
package io.nosqlbench.adapter.pinecone.ops;
import io.pinecone.proto.DescribeIndexStatsRequest;
import io.pinecone.proto.DescribeIndexStatsResponse;
import io.pinecone.PineconeConnection;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
public class PineconeDescribeIndexStatsOp extends PineconeOp {
private static final Logger LOGGER = LogManager.getLogger(PineconeDescribeIndexStatsOp.class);
private DescribeIndexStatsRequest request;
public PineconeDescribeIndexStatsOp(PineconeConnection connection, DescribeIndexStatsRequest request) {
super(connection);
this.request = request;
}
@Override
public void run() {
try {
DescribeIndexStatsResponse response = connection.getBlockingStub().describeIndexStats(request);
// Do soemething with the response...
} catch (Exception e) {
LOGGER.error("Exception %s caught trying to do DescribeIndexStats", e.getMessage());
LOGGER.error(e.getStackTrace());
}
}
}

View File

@ -0,0 +1,30 @@
package io.nosqlbench.adapter.pinecone.ops;
import io.pinecone.proto.FetchRequest;
import io.pinecone.PineconeConnection;
import io.pinecone.proto.FetchResponse;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
public class PineconeFetchOp extends PineconeOp {
private static final Logger LOGGER = LogManager.getLogger(PineconeFetchOp.class);
private FetchRequest request;
public PineconeFetchOp(PineconeConnection connection, FetchRequest request) {
super(connection);
this.request = request;
}
@Override
public void run() {
try {
FetchResponse response = connection.getBlockingStub().fetch(request);
// Do soemething with the response...
} catch (Exception e) {
LOGGER.error("Exception %s caught trying to do Fetch", e.getMessage());
LOGGER.error(e.getStackTrace());
}
}
}

View File

@ -0,0 +1,13 @@
package io.nosqlbench.adapter.pinecone.ops;
import io.nosqlbench.engine.api.activityimpl.uniform.flowtypes.RunnableOp;
import io.pinecone.PineconeConnection;
public abstract class PineconeOp implements RunnableOp {
protected final PineconeConnection connection;
public PineconeOp(PineconeConnection connection) {
this.connection = connection;
}
}

View File

@ -0,0 +1,30 @@
package io.nosqlbench.adapter.pinecone.ops;
import io.pinecone.proto.QueryRequest;
import io.pinecone.PineconeConnection;
import io.pinecone.proto.QueryResponse;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
public class PineconeQueryOp extends PineconeOp {
private static final Logger LOGGER = LogManager.getLogger(PineconeQueryOp.class);
private QueryRequest request;
public PineconeQueryOp(PineconeConnection connection, QueryRequest request) {
super(connection);
this.request = request;
}
@Override
public void run() {
try {
QueryResponse response = connection.getBlockingStub().query(request);
// Do soemething with the response...
} catch (Exception e) {
LOGGER.error("Exception %s caught trying to do Query", e.getMessage());
LOGGER.error(e.getStackTrace());
}
}
}

View File

@ -0,0 +1,30 @@
package io.nosqlbench.adapter.pinecone.ops;
import io.pinecone.proto.UpdateRequest;
import io.pinecone.PineconeConnection;
import io.pinecone.proto.UpdateResponse;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
public class PineconeUpdateOp extends PineconeOp {
private static final Logger LOGGER = LogManager.getLogger(PineconeUpdateOp.class);
private UpdateRequest request;
public PineconeUpdateOp(PineconeConnection connection, UpdateRequest request) {
super(connection);
this.request = request;
}
@Override
public void run() {
try {
UpdateResponse response = connection.getBlockingStub().update(request);
// Do soemething with the response...
} catch (Exception e) {
LOGGER.error("Exception %s caught trying to do Update", e.getMessage());
LOGGER.error(e.getStackTrace());
}
}
}

View File

@ -0,0 +1,30 @@
package io.nosqlbench.adapter.pinecone.ops;
import io.pinecone.proto.UpsertRequest;
import io.pinecone.PineconeConnection;
import io.pinecone.proto.UpsertResponse;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
public class PineconeUpsertOp extends PineconeOp {
private static final Logger LOGGER = LogManager.getLogger(PineconeUpsertOp.class);
private UpsertRequest request;
public PineconeUpsertOp(PineconeConnection connection, UpsertRequest request) {
super(connection);
this.request = request;
}
@Override
public void run() {
try {
UpsertResponse response = connection.getBlockingStub().upsert(request);
// Do soemething with the response...
} catch (Exception e) {
LOGGER.error("Exception %s caught trying to do upsert", e.getMessage());
LOGGER.error(e.getStackTrace());
}
}
}