mirror of
https://github.com/nosqlbench/nosqlbench.git
synced 2025-02-25 18:55:28 -06:00
incremental commit to allow changes to be downloaded to other machine
This commit is contained in:
@@ -0,0 +1,66 @@
|
||||
/*
|
||||
* Copyright (c) 2023 nosqlbench
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package io.nosqlbench.virtdata.library.hdf5.from_long.to_array;
|
||||
|
||||
|
||||
import io.nosqlbench.virtdata.api.annotations.Categories;
|
||||
import io.nosqlbench.virtdata.api.annotations.Category;
|
||||
import io.nosqlbench.virtdata.api.annotations.ThreadSafeMapper;
|
||||
import io.nosqlbench.virtdata.library.hdf5.from_long.AbstractHdfFileToVectorType;
|
||||
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
import java.util.function.LongFunction;
|
||||
|
||||
@ThreadSafeMapper
|
||||
@Categories(Category.experimental)
|
||||
public class HdfDatasetToPayload extends AbstractHdfFileToVectorType implements LongFunction<Object[]> {
|
||||
private final Map<String,Class> typeMap = new HashMap<>();
|
||||
|
||||
public HdfDatasetToPayload(String filename, String datasetName, String columnDS, String columnTypeDS) {
|
||||
super(filename, datasetName);
|
||||
populateTypeMap(columnDS, columnTypeDS);
|
||||
}
|
||||
|
||||
private void populateTypeMap(String columnDS, String columnTypeDS) {
|
||||
Object columnDataset = hdfFile.getDatasetByPath(columnDS).getData();
|
||||
Object columnTypeDataset = hdfFile.getDatasetByPath(columnTypeDS).getData();
|
||||
String[] columnNames = (String[]) columnDataset;
|
||||
String[] columnTypes = (String[]) columnTypeDataset;
|
||||
if (columnNames.length != columnTypes.length) {
|
||||
throw new RuntimeException("Left hand dataset and right hand dataset must be the same length");
|
||||
}
|
||||
for (int i = 0; i < columnNames.length; i++) {
|
||||
String columnName = columnNames[i];
|
||||
String columnType = columnTypes[i];
|
||||
Class<?> type = switch (columnType) {
|
||||
case "int" -> int.class;
|
||||
case "long" -> long.class;
|
||||
case "float" -> float.class;
|
||||
case "double" -> double.class;
|
||||
case "String" -> String.class;
|
||||
default -> throw new RuntimeException("Unsupported type: " + columnType);
|
||||
};
|
||||
typeMap.put(columnName, type);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public Object[] apply(long value) {
|
||||
return new Object[0];
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,54 @@
|
||||
/*
|
||||
* Copyright (c) 2023 nosqlbench
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package io.nosqlbench.virtdata.library.hdf5.from_long.to_string;
|
||||
|
||||
import io.jhdf.HdfFile;
|
||||
import io.jhdf.api.Dataset;
|
||||
import io.nosqlbench.api.content.NBIO;
|
||||
import io.nosqlbench.virtdata.api.annotations.Categories;
|
||||
import io.nosqlbench.virtdata.api.annotations.Category;
|
||||
import io.nosqlbench.virtdata.api.annotations.ThreadSafeMapper;
|
||||
|
||||
import java.util.function.LongFunction;
|
||||
|
||||
@ThreadSafeMapper
|
||||
@Categories(Category.experimental)
|
||||
public class HdfDatasetToString implements LongFunction<String> {
|
||||
private final HdfFile hdfFile;
|
||||
private final Dataset dataset;
|
||||
private final String separator;
|
||||
|
||||
public HdfDatasetToString(String filename, String dataset, String separator) {
|
||||
hdfFile = new HdfFile(NBIO.all().search(filename).one().asPath());
|
||||
this.dataset = hdfFile.getDatasetByPath(dataset);
|
||||
this.separator = separator;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String apply(long value) {
|
||||
String[] columnDataset = (String[])dataset.getData();
|
||||
StringBuilder sb = new StringBuilder();
|
||||
for (int i = 0; i < columnDataset.length; i++) {
|
||||
sb.append(columnDataset[i]);
|
||||
if (i < columnDataset.length - 1) {
|
||||
sb.append(separator);
|
||||
}
|
||||
}
|
||||
return sb.toString();
|
||||
}
|
||||
|
||||
}
|
||||
Reference in New Issue
Block a user