diff --git a/adapter-cockroachdb/pom.xml b/adapter-cockroachdb/pom.xml
deleted file mode 100644
index a8fd6cda6..000000000
--- a/adapter-cockroachdb/pom.xml
+++ /dev/null
@@ -1,62 +0,0 @@
-
-
-
- 4.0.0
-
-
- io.nosqlbench
- mvn-defaults
- 4.17.32-SNAPSHOT
- ../mvn-defaults
-
-
- adapter-cockroachdb
- jar
- ${project.artifactId}
-
-
- A DriverAdapter driver for CockroachDB
-
-
-
-
-
-
- io.nosqlbench
- adapters-api
- 4.17.32-SNAPSHOT
-
-
-
- org.postgresql
- postgresql
- 42.5.1
-
-
-
-
-
-
-
- com.zaxxer
- HikariCP
- 5.0.1
-
-
-
-
-
diff --git a/adapter-cockroachdb/src/main/java/io/nosqlbench/adapter/cockroachdb/CockroachDBCmdType.java b/adapter-cockroachdb/src/main/java/io/nosqlbench/adapter/cockroachdb/CockroachDBCmdType.java
deleted file mode 100644
index dc9c53cb8..000000000
--- a/adapter-cockroachdb/src/main/java/io/nosqlbench/adapter/cockroachdb/CockroachDBCmdType.java
+++ /dev/null
@@ -1,26 +0,0 @@
-/*
- * Copyright (c) 2023 nosqlbench
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package io.nosqlbench.adapter.cockroachdb;
-
-/**
- * Op templates which are supported by the NoSQLBench CockroachDB driver are
- * enumerated below. These command names should mirror those in the official
- * CockroachDB API exactly.
- */
-public class CockroachDBCmdType {
-
-}
diff --git a/adapter-cockroachdb/src/main/java/io/nosqlbench/adapter/cockroachdb/CockroachDBSpace.java b/adapter-cockroachdb/src/main/java/io/nosqlbench/adapter/cockroachdb/CockroachDBSpace.java
deleted file mode 100644
index cd6ba8c63..000000000
--- a/adapter-cockroachdb/src/main/java/io/nosqlbench/adapter/cockroachdb/CockroachDBSpace.java
+++ /dev/null
@@ -1,121 +0,0 @@
-/*
- * Copyright (c) 2023 nosqlbench
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package io.nosqlbench.adapter.cockroachdb;
-
-import io.nosqlbench.api.config.standard.NBConfiguration;
-import io.nosqlbench.api.errors.OpConfigError;
-import org.postgresql.ds.PGSimpleDataSource;
-
-import javax.sql.DataSource;
-import java.sql.Connection;
-import java.util.Optional;
-
-public class CockroachDBSpace {
- private final String name;
- private final DataSource ds = null;
-// private final HikariConfig hikariConfig = null;
-// private final HikariDataSource hikariDataSource = null;
- private Connection connection;
-
- public CockroachDBSpace(String name, NBConfiguration cfg) {
- this.name = name;
- PGSimpleDataSource client = createClient(cfg);
-// dynamoDB= new DynamoDB(client);
- }
- private PGSimpleDataSource createClient(NBConfiguration cfg) {
- PGSimpleDataSource ds = new PGSimpleDataSource();
-
- Optional url = cfg.getOptional("url");
- if(url.isEmpty()) {
- throw new OpConfigError("url option is required.");
- } else {
- ds.setURL(url.get());
- }
-
- Optional serverNames = cfg.getOptional("serverName");
- if(serverNames.isPresent()) {
- ds.setServerNames(new String[]{serverNames.get()});
- } else {
- throw new OpConfigError("Server name option is required.");
- }
-
- Optional databaseName = cfg.getOptional("databaseName");
- if(databaseName.isPresent()) {
- ds.setDatabaseName(databaseName.get());
- } else {
- throw new OpConfigError("Database name option is required.");
- }
-
- Optional portNumber = cfg.getOptional(Integer.class, "portNumber");
- ds.setPortNumbers(new int[] { portNumber.orElse(26257) });
-
- Optional user = cfg.getOptional("user");
- if(user.isPresent()) {
- ds.setUser(user.get());
- }
-
- Optional password = cfg.getOptional("password");
- if(password.isPresent()) {
- if(user.isEmpty()) {
- throw new OpConfigError("Both user and password options are required. Only password is supplied in this case.");
- }
- ds.setPassword(password.get());
- } else {
- if(user.isPresent()) {
- throw new OpConfigError("Both user and password options are required. Only user is supplied in this case.");
- }
- }
-
- Optional sslMode = cfg.getOptional("sslMode");
- if(sslMode.isPresent()) {
- ds.setSslMode(sslMode.get());
- } else {
- ds.setSslMode("verify-full");
- }
-
- Optional applicationName = cfg.getOptional("applicationName");
- if(applicationName.isPresent()) {
- ds.setApplicationName(applicationName.get());
- } else {
- ds.setApplicationName("NoSQLBench");
- }
- Optional rewriteBatchedInserts = cfg.getOptional(Boolean.class, "rewriteBatchedInserts");
- ds.setReWriteBatchedInserts(rewriteBatchedInserts.orElse(false));
-
- return ds;
- }
-
- public static NBConfigModel getConfigModel() {
- return ConfigModel.of(CockroachDBSpace.class)
- .add(Param.optional("url"))
- .add(Param.optional("serverName"))
- .add(Param.optional("databaseName"))
- //TODO remove these below
- .add(Param.optional("client_socket_timeout"))
- .add(Param.optional("client_execution_timeout"))
- .add(Param.optional("client_max_connections"))
- .add(Param.optional("client_max_error_retry"))
- .add(Param.optional("client_user_agent_prefix"))
- .add(Param.optional("client_consecutive_retries_before_throttling"))
- .add(Param.optional("client_gzip"))
- .add(Param.optional("client_tcp_keepalive"))
- .add(Param.optional("client_disable_socket_proxy"))
- .add(Param.optional("client_so_send_size_hint"))
- .add(Param.optional("client_so_recv_size_hint"))
- .asReadOnly();
- }
-}
diff --git a/adapter-cockroachdb/src/main/resources/cockroachdb.md b/adapter-cockroachdb/src/main/resources/cockroachdb.md
deleted file mode 100644
index 7e84eaab5..000000000
--- a/adapter-cockroachdb/src/main/resources/cockroachdb.md
+++ /dev/null
@@ -1 +0,0 @@
-# cockroachdb driver
diff --git a/adapter-jdbc/pom.xml b/adapter-jdbc/pom.xml
new file mode 100644
index 000000000..96a522cc8
--- /dev/null
+++ b/adapter-jdbc/pom.xml
@@ -0,0 +1,115 @@
+
+
+
+ 4.0.0
+
+
+ io.nosqlbench
+ mvn-defaults
+ 5.17.1-SNAPSHOT
+ ../mvn-defaults
+
+
+ adapter-jdbc
+ jar
+ ${project.artifactId}
+
+
+ A DriverAdapter driver for JDBC via PostgreSQL with HikariCP.
+
+
+
+
+
+ io.nosqlbench
+ adapters-api
+ 5.17.1-SNAPSHOT
+
+
+ io.nosqlbench
+ engine-api
+ 5.17.1-SNAPSHOT
+
+
+ io.nosqlbench
+ nb-annotations
+ 5.17.1-SNAPSHOT
+ compile
+
+
+
+
+ org.postgresql
+ postgresql
+ 42.5.1
+
+
+
+
+ com.zaxxer
+ HikariCP
+ 5.0.1
+
+
+
+
+
+
+ org.jacoco
+ jacoco-maven-plugin
+ 0.8.8
+
+
+ prepare-agent
+
+ prepare-agent
+
+
+
+ report
+ test
+
+ report
+
+
+
+ jacoco-check
+ verify
+
+ check
+
+
+
+
+ BUNDLE
+
+
+ INSTRUCTION
+ COVEREDRATIO
+ 0.00
+ 1.00
+
+
+
+
+
+
+
+
+
+
+
diff --git a/adapter-jdbc/src/main/java/io/nosqlbench/adapter/jdbc/JDBCDriverAdapter.java b/adapter-jdbc/src/main/java/io/nosqlbench/adapter/jdbc/JDBCDriverAdapter.java
new file mode 100644
index 000000000..2a1095cbd
--- /dev/null
+++ b/adapter-jdbc/src/main/java/io/nosqlbench/adapter/jdbc/JDBCDriverAdapter.java
@@ -0,0 +1,36 @@
+package io.nosqlbench.adapter.jdbc;
+
+import io.nosqlbench.adapter.jdbc.optypes.JDBCOp;
+import io.nosqlbench.api.config.standard.NBConfigModel;
+import io.nosqlbench.api.config.standard.NBConfiguration;
+import io.nosqlbench.engine.api.activityimpl.OpMapper;
+import io.nosqlbench.engine.api.activityimpl.uniform.BaseDriverAdapter;
+import io.nosqlbench.engine.api.activityimpl.uniform.DriverAdapter;
+import io.nosqlbench.engine.api.activityimpl.uniform.DriverSpaceCache;
+import io.nosqlbench.nb.annotations.Service;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
+
+import java.util.function.Function;
+
+@Service(value = DriverAdapter.class, selector = "jdbc")
+public class JDBCDriverAdapter extends BaseDriverAdapter {
+ private final static Logger logger = LogManager.getLogger(JDBCDriverAdapter.class);
+
+ @Override
+ public OpMapper getOpMapper() {
+ DriverSpaceCache extends JDBCSpace> spaceCache = getSpaceCache();
+ NBConfiguration adapterConfig = getConfiguration();
+ return new JDBCOpMapper(this, adapterConfig, spaceCache);
+ }
+
+ @Override
+ public Function getSpaceInitializer(NBConfiguration cfg) {
+ return (s) -> new JDBCSpace(s, cfg);
+ }
+
+ @Override
+ public NBConfigModel getConfigModel() {
+ return super.getConfigModel().add(JDBCSpace.getConfigModel());
+ }
+}
diff --git a/adapter-jdbc/src/main/java/io/nosqlbench/adapter/jdbc/JDBCOpMapper.java b/adapter-jdbc/src/main/java/io/nosqlbench/adapter/jdbc/JDBCOpMapper.java
new file mode 100644
index 000000000..79ff2f113
--- /dev/null
+++ b/adapter-jdbc/src/main/java/io/nosqlbench/adapter/jdbc/JDBCOpMapper.java
@@ -0,0 +1,70 @@
+package io.nosqlbench.adapter.jdbc;
+
+import io.nosqlbench.adapter.jdbc.opdispensers.JDBCDDLOpDispenser;
+import io.nosqlbench.adapter.jdbc.optypes.JDBCOp;
+import io.nosqlbench.api.config.standard.NBConfiguration;
+import io.nosqlbench.engine.api.activityimpl.OpDispenser;
+import io.nosqlbench.engine.api.activityimpl.OpMapper;
+import io.nosqlbench.engine.api.activityimpl.uniform.DriverAdapter;
+import io.nosqlbench.engine.api.activityimpl.uniform.DriverSpaceCache;
+import io.nosqlbench.engine.api.templating.ParsedOp;
+import io.nosqlbench.engine.api.templating.TypeAndTarget;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
+
+import java.sql.Connection;
+import java.util.function.LongFunction;
+
+public class JDBCOpMapper implements OpMapper {
+ private final static Logger logger = LogManager.getLogger(JDBCOpMapper.class);
+
+ private final DriverAdapter adapter;
+ private final NBConfiguration cfg;
+ private final DriverSpaceCache extends JDBCSpace> spaceCache;
+
+ public JDBCOpMapper(DriverAdapter adapter, NBConfiguration cfg, DriverSpaceCache extends JDBCSpace> spaceCache) {
+ this.adapter = adapter;
+ this.cfg = cfg;
+ this.spaceCache = spaceCache;
+ }
+
+ @Override
+ public OpDispenser extends JDBCOp> apply(ParsedOp op) {
+ LongFunction spaceNameF = op.getAsFunctionOr("space", "default");
+ LongFunction spaceFunc = l -> spaceCache.get(spaceNameF.apply(l));
+ // Since the only needed thing in the JDBCSpace is the session, we can short-circuit
+ // to it here instead of stepping down from the cycle to the space to the connection.
+ LongFunction connectionLongFunc = l -> spaceCache.get(spaceNameF.apply(l)).getConnection();
+
+ // CREATE|DROP TABLE|VIEW uses execute (as opposed to executeQuery which returns a ResultSet)
+ // https://jdbc.postgresql.org/documentation/query/#example54dropping-a-table-in-jdbc
+ //return new JDBCQueryOpDispenser(adapter, spaceFunc, op);working
+
+ /*
+ * If the user provides a body element, then they want to provide the JSON or
+ * a data structure that can be converted into JSON, bypassing any further
+ * specialized type-checking or op-type specific features
+ */
+
+ if (op.isDefined("body")) {
+ throw new RuntimeException("This mode is reserved for later. Do not use the 'body' op field.");
+ }
+ else {
+ TypeAndTarget opType = op.getTypeAndTarget(JDBCOpType.class, String.class, "type", "stmt");
+
+ logger.info(() -> "Using " + opType.enumId + " statement form for '" + op.getName());
+
+ //return new JDBCQueryOpDispenser(adapter, spaceFunc, op/*, opType.targetFunction*/);
+
+
+ return switch (opType.enumId) {
+ // CREATE|DROP TABLE|VIEW uses execute (as opposed to executeQuery which returns a ResultSet)
+ // https://jdbc.postgresql.org/documentation/query/#example54dropping-a-table-in-jdbc
+
+ case select -> null;
+ case update -> null;
+ case create, drop, ddl -> new JDBCDDLOpDispenser(adapter, connectionLongFunc, op, opType.targetFunction)/*.apply(op)*/;
+ };
+ }
+ }
+}
diff --git a/adapter-jdbc/src/main/java/io/nosqlbench/adapter/jdbc/JDBCOpType.java b/adapter-jdbc/src/main/java/io/nosqlbench/adapter/jdbc/JDBCOpType.java
new file mode 100644
index 000000000..6c82243aa
--- /dev/null
+++ b/adapter-jdbc/src/main/java/io/nosqlbench/adapter/jdbc/JDBCOpType.java
@@ -0,0 +1,17 @@
+package io.nosqlbench.adapter.jdbc;
+
+/**
+ * Op templates which are supported by the NoSQLBench CockroachDB driver are
+ * enumerated below. These command names should mirror those in the official
+ * CockroachDB API exactly. See the official API for more details.
+ * @see CockroachDB API Reference
+ */
+public enum JDBCOpType {
+ //See https://jdbc.postgresql.org/documentation/query/
+ select, // used for SELECT operation matches executeQuery
+ update, // used for performing updates such as INSERT/UPDATE/DELETE matches executeUpdate
+ ddl, // used for creating/modifying database objects matches execute
+ //JdbcQuery, // generic placeholder TODO - implement this differently
+ create, // used for CREATE operation matches execute
+ drop, // used for DROP operation matches execute
+}
diff --git a/adapter-jdbc/src/main/java/io/nosqlbench/adapter/jdbc/JDBCSpace.java b/adapter-jdbc/src/main/java/io/nosqlbench/adapter/jdbc/JDBCSpace.java
new file mode 100644
index 000000000..5db378f5c
--- /dev/null
+++ b/adapter-jdbc/src/main/java/io/nosqlbench/adapter/jdbc/JDBCSpace.java
@@ -0,0 +1,225 @@
+/*
+ * Copyright (c) 2023 nosqlbench
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package io.nosqlbench.adapter.jdbc;
+
+import com.zaxxer.hikari.HikariConfig;
+import com.zaxxer.hikari.HikariDataSource;
+import io.nosqlbench.api.config.standard.ConfigModel;
+import io.nosqlbench.api.config.standard.NBConfigModel;
+import io.nosqlbench.api.config.standard.NBConfiguration;
+import io.nosqlbench.api.config.standard.Param;
+import io.nosqlbench.api.errors.OpConfigError;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
+import org.postgresql.ds.PGSimpleDataSource;
+
+import javax.sql.DataSource;
+import java.sql.Connection;
+import java.util.Optional;
+
+public class JDBCSpace implements AutoCloseable {
+ private final static Logger logger = LogManager.getLogger(JDBCSpace.class);
+ private final String spaceName;
+ private DataSource ds;
+ private HikariConfig hikariConfig;
+ private HikariDataSource hikariDataSource;
+ private Connection connection;
+
+ public JDBCSpace(String spaceName, NBConfiguration cfg) {
+ this.spaceName = spaceName;
+ this.hikariDataSource = createClient(cfg);
+ }
+
+ public Connection getConnection() {
+ return this.connection;
+ }
+
+ public HikariDataSource getHikariDataSource() {
+ return this.hikariDataSource;
+ }
+
+ private HikariDataSource createClient(NBConfiguration cfg) {
+ PGSimpleDataSource ds = new PGSimpleDataSource();
+ hikariConfig = new HikariConfig();
+
+ Optional url = cfg.getOptional("url");
+ if(url.isEmpty()) {
+ throw new OpConfigError("url option is required.");
+ } else {
+ ds.setURL(url.get());
+ hikariConfig.setJdbcUrl(url.get());
+ }
+
+ Optional serverNames = cfg.getOptional("serverName");
+ if(serverNames.isPresent()) {
+ ds.setServerNames(new String[]{serverNames.get()});
+ //hds.setServerNames(new String[] {serverNames.get()});
+ hikariConfig.addDataSourceProperty("serverName", serverNames.get());
+ } else {
+ throw new OpConfigError("Server name option is required.");
+ }
+
+ Optional databaseName = cfg.getOptional("databaseName");
+ if(databaseName.isPresent()) {
+ ds.setDatabaseName(databaseName.get());
+ hikariConfig.addDataSourceProperty("databaseName", databaseName.get());
+ } else {
+ throw new OpConfigError("Database name option is required.");
+ }
+
+ Optional portNumber = cfg.getOptional(Integer.class, "portNumber");
+ ds.setPortNumbers(new int[] { portNumber.orElse(26257) });
+ hikariConfig.addDataSourceProperty("portNumber", portNumber.orElse(26257));
+
+ Optional user = cfg.getOptional("user");
+ if(user.isPresent()) {
+ ds.setUser(user.get());
+ hikariConfig.setUsername(user.get());
+ }
+
+ Optional password = cfg.getOptional("password");
+ if(password.isPresent()) {
+ if(user.isEmpty()) {
+ throw new OpConfigError("Both user and password options are required. Only password is supplied in this case.");
+ }
+ ds.setPassword(password.get());
+ hikariConfig.setPassword(password.get());
+ } else {
+ if(user.isPresent()) {
+ throw new OpConfigError("Both user and password options are required. Only user is supplied in this case.");
+ }
+ }
+
+ Optional ssl = cfg.getOptional(Boolean.class,"ssl");
+ if(ssl.isPresent()) {
+ ds.setSsl(ssl.get());
+ hikariConfig.addDataSourceProperty("ssl", ssl.get());
+ } else {
+ ds.setSsl(false);
+ hikariConfig.addDataSourceProperty("ssl", false);
+ }
+
+ Optional sslMode = cfg.getOptional("sslmode");
+ if(sslMode.isPresent()) {
+ ds.setSslMode(sslMode.get());
+ hikariConfig.addDataSourceProperty("sslmode", sslMode.get());
+ } else {
+ ds.setSslMode("verify-full");
+ hikariConfig.addDataSourceProperty("sslmode", "verify-full");
+ }
+
+ Optional sslCert = cfg.getOptional("sslcert");
+ if(sslCert.isPresent()) {
+ ds.setSslcert(sslCert.get());
+ hikariConfig.addDataSourceProperty("sslcert", sslCert.get());
+ } /*else if(sslMode.isPresent() && (!"disable".equalsIgnoreCase(sslMode.get()) || !"allow".equalsIgnoreCase(sslMode.get())) || !"prefer".equalsIgnoreCase(sslMode.get())) {
+ throw new OpConfigError("When sslmode is true, sslcert should be provided.");
+ }*/
+
+ Optional sslRootCert = cfg.getOptional("sslrootcert");
+ if(sslRootCert.isPresent()) {
+ ds.setSslRootCert(sslRootCert.get());
+ hikariConfig.addDataSourceProperty("sslrootcert", sslRootCert.get());
+ }
+
+ Optional applicationName = cfg.getOptional("applicationName");
+ if(applicationName.isPresent()) {
+ ds.setApplicationName(applicationName.get());
+ hikariConfig.addDataSourceProperty("applicationName", applicationName.orElse("NoSQLBench CRDB"));
+ } else {
+ ds.setApplicationName("NoSQLBench CRDB");
+ hikariConfig.addDataSourceProperty("applicationName", "NoSQLBench CRDB");
+ }
+ Optional rewriteBatchedInserts = cfg.getOptional(Boolean.class, "rewriteBatchedInserts");
+ ds.setReWriteBatchedInserts(rewriteBatchedInserts.orElse(true));
+ hikariConfig.addDataSourceProperty("rewriteBatchedInserts", rewriteBatchedInserts.orElse(true));
+
+ Optional autoCommit = cfg.getOptional(Boolean.class, "autoCommit");
+ hikariConfig.setAutoCommit(autoCommit.orElse(false));
+
+ Optional maximumPoolSize = cfg.getOptional(Integer.class,"maximumPoolSize");
+ hikariConfig.setMaximumPoolSize(maximumPoolSize.orElse(40));
+
+ Optional keepaliveTime = cfg.getOptional(Integer.class,"keepaliveTime");
+ hikariConfig.setKeepaliveTime(keepaliveTime.orElse(150000));
+
+ HikariDataSource hds = new HikariDataSource(hikariConfig);
+ try {
+ this.connection = hds.getConnection();
+ } catch (Exception ex) {
+ String exp = "Exception occurred while attempting to create a connection using the Hikari Data Source";
+ logger.error(exp, ex);
+ throw new RuntimeException(exp, ex);
+ }
+
+ return hds;
+ }
+
+ public static NBConfigModel getConfigModel() {
+ return ConfigModel.of(JDBCSpace.class)
+ .add(Param.defaultTo("url", "jdbc:postgresql:/").setDescription("The connection URL used to connect to the DBMS. Defaults to 'jdbc:postgresql:/'"))
+ .add(Param.defaultTo("serverName", "localhost").setDescription("The host name of the server. Defaults to 'localhost'"))
+ .add(Param.optional("databaseName").setDescription("The database name. The default is to connect to a database with the same name as the user name used to connect to the server."))
+ // See https://github.com/brettwooldridge/HikariCP/tree/dev#gear-configuration-knobs-baby & https://jdbc.postgresql.org/documentation/use/
+ .add(Param.defaultTo("portNumber", 5432).setDescription("The port number the server is listening on. Defaults to the PostgreSQLĀ® standard port number (5432)"))
+ .add(Param.optional("user"))
+ .add(Param.optional("password"))
+ .add(Param.optional("ssl"))
+ .add(Param.optional("sslmode"))
+ .add(Param.optional("sslcert"))
+ .add(Param.optional("sslrootcert"))
+ .add(Param.optional("applicationName"))
+ .add(Param.optional("rewriteBatchedInserts"))
+ .add(Param.optional("autoCommit"))
+ .add(Param.optional("connectionTimeout"))
+ .add(Param.optional("idleTimeout"))
+ .add(Param.optional("keepaliveTime"))
+ .add(Param.optional("maxLifetime"))
+ .add(Param.optional("connectionTestQuery"))
+ .add(Param.optional("minimumIdle"))
+ .add(Param.optional("maximumPoolSize"))
+ .add(Param.optional("metricRegistry"))
+ .add(Param.optional("healthCheckRegistry"))
+ .add(Param.optional("poolName"))
+ .add(Param.optional("initializationFailTimeout"))
+ .add(Param.optional("isolateInternalQueries"))
+ .add(Param.optional("allowPoolSuspension"))
+ .add(Param.optional("readOnly"))
+ .add(Param.optional("registerMbeans"))
+ .add(Param.optional("catalog"))
+ .add(Param.optional("connectionInitSql"))
+ .add(Param.optional("driverClassName"))
+ .add(Param.optional("transactionIsolation"))
+ .add(Param.optional("validationTimeout"))
+ .add(Param.optional("leakDetectionThreshold"))
+ .add(Param.optional("dataSource"))
+ .add(Param.optional("schema"))
+ .add(Param.optional("threadFactory"))
+ .add(Param.optional("scheduledExecutor"))
+ .asReadOnly();
+ }
+
+ @Override
+ public void close() {
+ try {
+ this.getConnection().close();
+ } catch (Exception e) {
+ logger.warn("auto-closeable jdbc connection threw exception in jdbc space(" + this.spaceName + "): " + e);
+ throw new RuntimeException(e);
+ }
+ }
+}
diff --git a/adapter-jdbc/src/main/java/io/nosqlbench/adapter/jdbc/opdispensers/JDBCDDLOpDispenser.java b/adapter-jdbc/src/main/java/io/nosqlbench/adapter/jdbc/opdispensers/JDBCDDLOpDispenser.java
new file mode 100644
index 000000000..6e4697033
--- /dev/null
+++ b/adapter-jdbc/src/main/java/io/nosqlbench/adapter/jdbc/opdispensers/JDBCDDLOpDispenser.java
@@ -0,0 +1,51 @@
+package io.nosqlbench.adapter.jdbc.opdispensers;
+
+import io.nosqlbench.adapter.jdbc.JDBCSpace;
+import io.nosqlbench.adapter.jdbc.optypes.JDBCOp;
+import io.nosqlbench.engine.api.activityimpl.BaseOpDispenser;
+import io.nosqlbench.engine.api.activityimpl.uniform.DriverAdapter;
+import io.nosqlbench.engine.api.templating.ParsedOp;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
+
+import java.sql.Connection;
+import java.sql.SQLException;
+import java.sql.Statement;
+import java.util.function.LongFunction;
+
+public class JDBCDDLOpDispenser extends BaseOpDispenser {
+ private static final Logger logger = LogManager.getLogger(JDBCDDLOpDispenser.class);
+ private final LongFunction targetFunction;
+ private final LongFunction connectionLongFunction;
+ private final LongFunction statementLongFunction;
+
+ public JDBCDDLOpDispenser(DriverAdapter adapter, LongFunction connectionLongFunc, ParsedOp op, LongFunction targetFunction) {
+ super(adapter, op);
+
+ this.connectionLongFunction = connectionLongFunc;
+ this.targetFunction = targetFunction;
+ this.statementLongFunction = createStmtFunc(op);
+ }
+
+ protected LongFunction createStmtFunc(ParsedOp cmd) {
+ try {
+ LongFunction basefunc = l -> {
+ try {
+ return this.connectionLongFunction.apply(l).createStatement();
+ } catch (SQLException e) {
+ throw new RuntimeException(e);
+ }
+ };
+ return basefunc;
+ } catch(Exception ex) {
+ String err_msg = "Error while attempting to create the jdbc statement from the connection";
+ logger.error(err_msg, ex);
+ throw new RuntimeException(err_msg, ex);
+ }
+ }
+
+ @Override
+ public JDBCOp apply(long cycle) {
+ return new JDBCOp(this.connectionLongFunction.apply(cycle), this.statementLongFunction.apply(cycle), targetFunction.apply(cycle));
+ }
+}
diff --git a/adapter-jdbc/src/main/java/io/nosqlbench/adapter/jdbc/opdispensers/JDBCQueryOpDispenser.java b/adapter-jdbc/src/main/java/io/nosqlbench/adapter/jdbc/opdispensers/JDBCQueryOpDispenser.java
new file mode 100644
index 000000000..98ea715e9
--- /dev/null
+++ b/adapter-jdbc/src/main/java/io/nosqlbench/adapter/jdbc/opdispensers/JDBCQueryOpDispenser.java
@@ -0,0 +1,130 @@
+package io.nosqlbench.adapter.jdbc.opdispensers;
+
+import io.nosqlbench.adapter.jdbc.JDBCSpace;
+import io.nosqlbench.adapter.jdbc.optypes.JDBCOp;
+import io.nosqlbench.engine.api.activityimpl.BaseOpDispenser;
+import io.nosqlbench.engine.api.activityimpl.uniform.DriverAdapter;
+import io.nosqlbench.engine.api.templating.ParsedOp;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
+
+import javax.sql.DataSource;
+import java.sql.Statement;
+import java.util.function.LongFunction;
+
+public class JDBCQueryOpDispenser extends BaseOpDispenser {
+ private final static Logger logger = LogManager.getLogger(JDBCQueryOpDispenser.class);
+ private final DataSource dataSource;
+ private final LongFunction jdbcOpLongFunction;
+// private final LongFunction tableNameFunc;
+ //private final LongFunction targetFunction;
+
+ public JDBCQueryOpDispenser(DriverAdapter adapter, LongFunction jdbcSpaceLongFunction, ParsedOp op/*, LongFunction targetFunction*/) {
+ super(adapter, op);
+ this.jdbcOpLongFunction = getOpFunc(jdbcSpaceLongFunction, op);
+ //this.targetFunction = targetFunction;
+ //TODO -- implement this
+ dataSource = null;
+ }
+
+ public JDBCQueryOpDispenser(DriverAdapter adapter, ParsedOp op) {
+ super(adapter, op);
+ //TODO -- implement this
+ this.jdbcOpLongFunction = null;
+ this.dataSource = null;
+ //this.targetFunction = null;
+ }
+
+ protected LongFunction createStmtFunc(ParsedOp cmd) {
+ LongFunction basefunc = l -> null;//targetFunction.apply(l));
+ return null;
+ }
+
+ private LongFunction getOpFunc(LongFunction jdbcSpaceLongFunction, ParsedOp op) {
+/*
+ LongFunction builderF = l -> HttpRequest.newBuilder();
+ LongFunction bodyF = op.getAsFunctionOr("body", null);
+ LongFunction bodyPublisherF =
+ l -> Optional.ofNullable(bodyF.apply(l)).map(HttpRequest.BodyPublishers::ofString).orElse(
+ HttpRequest.BodyPublishers.noBody()
+ );
+
+ LongFunction methodF = op.getAsFunctionOr("method", "GET");
+ LongFunction initBuilderF =
+ l -> builderF.apply(l).method(methodF.apply(l), bodyPublisherF.apply(l));
+
+ initBuilderF = op.enhanceFuncOptionally(
+ initBuilderF, "version", String.class,
+ (b, v) -> b.version(HttpClient.Version.valueOf(
+ v.replaceAll("/1.1", "_1_1")
+ .replaceAll("/2.0", "_2")
+ )
+ )
+ );
+
+ Optional> optionalUriFunc = op.getAsOptionalFunction("uri", String.class);
+ LongFunction urifunc;
+ // Add support for URLENCODE on the uri field if either it statically or dynamically contains the E or URLENCODE pattern,
+ // OR the enable_urlencode op field is set to true.
+ if (optionalUriFunc.isPresent()) {
+ String testUriValue = optionalUriFunc.get().apply(0L);
+ if (HttpFormatParser.URLENCODER_PATTERN.matcher(testUriValue).find()
+ || op.getStaticConfigOr("enable_urlencode", true)) {
+ initBuilderF =
+ op.enhanceFuncOptionally(
+ initBuilderF,
+ "uri",
+ String.class,
+ (b, v) -> b.uri(URI.create(HttpFormatParser.rewriteExplicitSections(v)))
+ );
+ }
+ } else {
+ initBuilderF = op.enhanceFuncOptionally(initBuilderF, "uri", String.class, (b, v) -> b.uri(URI.create(v)));
+ }
+
+ op.getOptionalStaticValue("follow_redirects", boolean.class);
+
+
+ List headerNames = op.getDefinedNames().stream()
+ .filter(n -> n.charAt(0) >= 'A')
+ .filter(n -> n.charAt(0) <= 'Z')
+ .toList();
+ if (headerNames.size() > 0) {
+ for (String headerName : headerNames) {
+ initBuilderF = op.enhanceFunc(initBuilderF, headerName, String.class, (b, h) -> b.header(headerName, h));
+ }
+ }
+
+ initBuilderF = op.enhanceFuncOptionally(initBuilderF, "timeout", long.class, (b, v) -> b.timeout(Duration.ofMillis(v)));
+
+ LongFunction finalInitBuilderF = initBuilderF;
+ LongFunction reqF = l -> finalInitBuilderF.apply(l).build();
+
+
+ Pattern ok_status = op.getOptionalStaticValue("ok-status", String.class)
+ .map(Pattern::compile)
+ .orElse(Pattern.compile(DEFAULT_OK_STATUS));
+
+ Pattern ok_body = op.getOptionalStaticValue("ok-body", String.class)
+ .map(Pattern::compile)
+ .orElse(null);
+
+ LongFunction opFunc = cycle -> new HttpOp(
+ jdbcSpaceLongFunction.apply(cycle).getClient(),
+ reqF.apply(cycle),
+ ok_status,
+ ok_body,
+ jdbcSpaceLongFunction.apply(cycle), cycle
+ );
+ */
+ //return null;
+ LongFunction jdbcOpLongFunction = cycle -> new JDBCOp(jdbcSpaceLongFunction.apply(cycle), "DUMMY_STRINGcycle");
+ return jdbcOpLongFunction;
+ }
+
+ @Override
+ public JDBCOp apply(long value) {
+ JDBCOp op = this.jdbcOpLongFunction.apply(value);
+ return op;
+ }
+}
diff --git a/adapter-jdbc/src/main/java/io/nosqlbench/adapter/jdbc/optypes/JDBCOp.java b/adapter-jdbc/src/main/java/io/nosqlbench/adapter/jdbc/optypes/JDBCOp.java
new file mode 100644
index 000000000..7a034bd73
--- /dev/null
+++ b/adapter-jdbc/src/main/java/io/nosqlbench/adapter/jdbc/optypes/JDBCOp.java
@@ -0,0 +1,601 @@
+package io.nosqlbench.adapter.jdbc.optypes;
+
+import io.nosqlbench.adapter.jdbc.JDBCSpace;
+import io.nosqlbench.engine.api.activityimpl.uniform.flowtypes.RunnableOp;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
+
+import javax.sql.DataSource;
+import java.sql.Connection;
+import java.sql.SQLException;
+import java.sql.Statement;
+
+/*
+ * @see HikariCP connection pooling for details.
+ */
+public /*abstract*/ class JDBCOp implements RunnableOp/*CycleOp