restoring accidentally deleted configurations

This commit is contained in:
Mark Wolters 2024-03-08 11:03:27 -04:00
parent b49ecc6087
commit d8962d510a
4 changed files with 35 additions and 5 deletions

View File

@ -0,0 +1,14 @@
<component name="ProjectRunConfigurationManager">
<configuration default="false" name="cql_vector2__drop__E5-BASE-V2" type="JarApplication" folderName="E5-BASE-V2">
<extension name="software.aws.toolkits.jetbrains.core.execution.JavaAwsConnectionExtension">
<option name="credential" />
<option name="region" />
<option name="useCurrentConnection" value="false" />
</extension>
<option name="JAR_PATH" value="$PROJECT_DIR$/nb5/target/nb5.jar" />
<option name="PROGRAM_PARAMETERS" value="cql_vector2_fvec astra_vectors.drop userfile=auth/userfile passfile=auth/passfile scb=auth/scb.zip --show-stacktraces dimensions=768 testsize=10000 trainsize=100000 datafile=intfloat_e5-base-v2 filetype=fvec table=e5_base_v2 similarity_function=cosine --add-labels &quot;dimensions:768,dataset=e5_base_v2&quot;" />
<option name="WORKING_DIRECTORY" value="$ProjectFileDir$/local/jvector" />
<option name="ALTERNATIVE_JRE_PATH" value="jdk21" />
<method v="2" />
</configuration>
</component>

View File

@ -0,0 +1,15 @@
<component name="ProjectRunConfigurationManager">
<configuration default="false" name="vectorsearch-consistency-levels" type="JarApplication">
<extension name="software.aws.toolkits.jetbrains.core.execution.JavaAwsConnectionExtension">
<option name="credential" />
<option name="region" />
<option name="useCurrentConnection" value="false" />
</extension>
<option name="JAR_PATH" value="$PROJECT_DIR$/nb5/target/nb5.jar" />
<option name="PROGRAM_PARAMETERS" value="vector-search.yaml reads errors=stop driverconfig=driver-config.json dimensions=128 read_ratio=1 main-cycles=1 keyspace=baselines128 --report-csv-to metrics read_cl=LOCAL_ONE -v --show-stacktraces" />
<option name="WORKING_DIRECTORY" value="$PROJECT_DIR$/local/vectors-consistency" />
<option name="ALTERNATIVE_JRE_PATH_ENABLED" value="true" />
<option name="ALTERNATIVE_JRE_PATH" value="17" />
<method v="2" />
</configuration>
</component>

View File

@ -501,7 +501,7 @@ public class CGWorkloadExporter implements BundledApp {
/**
* If keycount is 0, all key fields including partition and clustering fields
* are qualfied with predicates.md.
* are qualfied with predicates.
* If keycount is positive, then only that many will be included.
* If keycount is negative, then that many keyfields will be removed from the
* predicate starting with the rightmost (innermost) fields first.
@ -538,7 +538,7 @@ public class CGWorkloadExporter implements BundledApp {
logger.debug("minimum keycount for " + table.getFullName() + " adjusted from " + lastcount + " to " + keycount);
}
// TODO; constraints on predicates.md based on valid constructions
// TODO; constraints on predicates based on valid constructions
pkeys.stream().map(this::genPredicatePart)
.forEach(p -> {
sb.append(p).append("\n AND ");

View File

@ -49,11 +49,12 @@ public class HdfPredicatesToCql implements LongFunction<String>, PredicateAdapte
/**
* Create a new binding function that accepts a long input value for the cycle and returns a string
* @param filename The HDF5 file to read the predicate dataset from
* @param datasetname The name of the dataset internal to the HDF5 file
* @param datasetName The name of the dataset internal to the HDF5 file
* @param serDesType The type of serialization/deserialization to use for the predicate
*/
public HdfPredicatesToCql(String filename, String datasetname, String serDesType) {
public HdfPredicatesToCql(String filename, String datasetName, String serDesType) {
hdfFile = new HdfFile(NBIO.all().search(filename).one().asPath());
dataset = hdfFile.getDatasetByPath(datasetname);
dataset = hdfFile.getDatasetByPath(datasetName);
recordCount = dataset.getDimensions()[0];
serDes = ServiceSelector.of(serDesType, ServiceLoader.load(PredicateSerDes.class)).getOne();
}