mirror of
https://github.com/nosqlbench/nosqlbench.git
synced 2025-02-25 18:55:28 -06:00
nosqlbench-1071 NBIO should favor finding exactly specified file path as a short circuit
This commit is contained in:
parent
756e1e5ff0
commit
70408621dd
@ -1,5 +1,5 @@
|
|||||||
/*
|
/*
|
||||||
* Copyright (c) 2022 nosqlbench
|
* Copyright (c) 2022-2023 nosqlbench
|
||||||
*
|
*
|
||||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
* you may not use this file except in compliance with the License.
|
* you may not use this file except in compliance with the License.
|
||||||
@ -99,7 +99,7 @@ public class Cqld4Space implements AutoCloseable {
|
|||||||
int port = cfg.getOrDefault("port", 9042);
|
int port = cfg.getOrDefault("port", 9042);
|
||||||
|
|
||||||
Optional<String> scb = cfg.getOptional(String.class, "secureconnectbundle", "scb");
|
Optional<String> scb = cfg.getOptional(String.class, "secureconnectbundle", "scb");
|
||||||
scb.flatMap(s -> NBIO.all().name(s).first().map(Content::getInputStream))
|
scb.flatMap(s -> NBIO.all().pathname(s).first().map(Content::getInputStream))
|
||||||
.map(builder::withCloudSecureConnectBundle);
|
.map(builder::withCloudSecureConnectBundle);
|
||||||
|
|
||||||
Optional<List<InetSocketAddress>> contactPointsOption = cfg
|
Optional<List<InetSocketAddress>> contactPointsOption = cfg
|
||||||
@ -229,14 +229,14 @@ public class Cqld4Space implements AutoCloseable {
|
|||||||
|
|
||||||
for (String loaderspec : loaderspecs) {
|
for (String loaderspec : loaderspecs) {
|
||||||
// path
|
// path
|
||||||
Optional<Content<?>> fsconfig = NBIO.fs().name(driverconfig).first();
|
Optional<Content<?>> fsconfig = NBIO.fs().pathname(driverconfig).first();
|
||||||
if (fsconfig.isPresent()) {
|
if (fsconfig.isPresent()) {
|
||||||
loaders.add(DriverConfigLoader.fromPath(fsconfig.get().asPath()));
|
loaders.add(DriverConfigLoader.fromPath(fsconfig.get().asPath()));
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
|
||||||
// classpath
|
// classpath
|
||||||
Optional<Content<?>> cpconfig = NBIO.classpath().name(driverconfig).first();
|
Optional<Content<?>> cpconfig = NBIO.classpath().pathname(driverconfig).first();
|
||||||
if (cpconfig.isPresent()) {
|
if (cpconfig.isPresent()) {
|
||||||
loaders.add(DriverConfigLoader.fromClasspath(driverconfig));
|
loaders.add(DriverConfigLoader.fromClasspath(driverconfig));
|
||||||
continue;
|
continue;
|
||||||
@ -244,7 +244,7 @@ public class Cqld4Space implements AutoCloseable {
|
|||||||
|
|
||||||
// URLs
|
// URLs
|
||||||
try {
|
try {
|
||||||
Optional<Content<?>> removeconf = NBIO.remote().name(driverconfig).first();
|
Optional<Content<?>> removeconf = NBIO.remote().pathname(driverconfig).first();
|
||||||
if (removeconf.isPresent()) {
|
if (removeconf.isPresent()) {
|
||||||
loaders.add(DriverConfigLoader.fromUrl(removeconf.get().getURI().toURL()));
|
loaders.add(DriverConfigLoader.fromUrl(removeconf.get().getURI().toURL()));
|
||||||
continue;
|
continue;
|
||||||
|
@ -37,9 +37,9 @@ public class CGDefaultCqlBindings implements BindingsLibrary {
|
|||||||
|
|
||||||
public CGDefaultCqlBindings() {
|
public CGDefaultCqlBindings() {
|
||||||
Content<?> content = NBIO.all()
|
Content<?> content = NBIO.all()
|
||||||
.name(DEFAULT_BINDINGS_FILE)
|
.pathname(DEFAULT_BINDINGS_FILE)
|
||||||
.first()
|
.first()
|
||||||
.or(() -> NBIO.all().prefix(DEFAULT_CFG_DIR).name(DEFAULT_BINDINGS_FILE).first())
|
.or(() -> NBIO.all().searchPrefixes(DEFAULT_CFG_DIR).pathname(DEFAULT_BINDINGS_FILE).first())
|
||||||
.orElseThrow(
|
.orElseThrow(
|
||||||
() -> new RuntimeException("Unable to load " + DEFAULT_BINDINGS_FILE +
|
() -> new RuntimeException("Unable to load " + DEFAULT_BINDINGS_FILE +
|
||||||
", from local dir or internally as cqlgen" + DEFAULT_BINDINGS_FILE)
|
", from local dir or internally as cqlgen" + DEFAULT_BINDINGS_FILE)
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
/*
|
/*
|
||||||
* Copyright (c) 2022 nosqlbench
|
* Copyright (c) 2022-2023 nosqlbench
|
||||||
*
|
*
|
||||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
* you may not use this file except in compliance with the License.
|
* you may not use this file except in compliance with the License.
|
||||||
@ -125,7 +125,7 @@ public class CGWorkloadExporter implements BundledApp {
|
|||||||
Yaml yaml = new Yaml();
|
Yaml yaml = new Yaml();
|
||||||
CGWorkloadExporter exporter;
|
CGWorkloadExporter exporter;
|
||||||
|
|
||||||
Content<?> cqlgencfg = NBIO.local().prefix("cqlgen").name("cqlgen").extension("conf").first().orElseThrow();
|
Content<?> cqlgencfg = NBIO.local().searchPrefixes("cqlgen").pathname("cqlgen").extensionSet("conf").first().orElseThrow();
|
||||||
if (cqlgencfg == null) {
|
if (cqlgencfg == null) {
|
||||||
throw new RuntimeException("Unable to load cqlgen.conf");
|
throw new RuntimeException("Unable to load cqlgen.conf");
|
||||||
}
|
}
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
/*
|
/*
|
||||||
* Copyright (c) 2022 nosqlbench
|
* Copyright (c) 2022-2023 nosqlbench
|
||||||
*
|
*
|
||||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
* you may not use this file except in compliance with the License.
|
* you may not use this file except in compliance with the License.
|
||||||
@ -30,7 +30,7 @@ public class HttpStatusCodes {
|
|||||||
private static final IetfStatusCode[] codes = loadMap();
|
private static final IetfStatusCode[] codes = loadMap();
|
||||||
|
|
||||||
private static IetfStatusCode[] loadMap() {
|
private static IetfStatusCode[] loadMap() {
|
||||||
Content<?> csv = NBIO.local().name("ietf-http-status-codes").extension("csv").one();
|
Content<?> csv = NBIO.local().pathname("ietf-http-status-codes").extensionSet("csv").one();
|
||||||
InputStreamReader isr = new InputStreamReader(csv.getInputStream());
|
InputStreamReader isr = new InputStreamReader(csv.getInputStream());
|
||||||
IetfStatusCode[] codes = new IetfStatusCode[600];
|
IetfStatusCode[] codes = new IetfStatusCode[600];
|
||||||
|
|
||||||
|
@ -54,7 +54,7 @@ public class OpsLoader {
|
|||||||
public static OpsDocList loadPath(String path, Map<String, ?> params, String... searchPaths) {
|
public static OpsDocList loadPath(String path, Map<String, ?> params, String... searchPaths) {
|
||||||
String[] extensions = path.indexOf('.')>-1 ? new String[]{} : YAML_EXTENSIONS;
|
String[] extensions = path.indexOf('.')>-1 ? new String[]{} : YAML_EXTENSIONS;
|
||||||
|
|
||||||
Content<?> foundPath = NBIO.all().prefix(searchPaths).name(path).extension(extensions).first()
|
Content<?> foundPath = NBIO.all().searchPrefixes(searchPaths).pathname(path).extensionSet(extensions).first()
|
||||||
.orElseThrow(() -> new RuntimeException("Unable to load path '" + path + "'"));
|
.orElseThrow(() -> new RuntimeException("Unable to load path '" + path + "'"));
|
||||||
OpTemplateFormat fmt = OpTemplateFormat.valueOfURI(foundPath.getURI());
|
OpTemplateFormat fmt = OpTemplateFormat.valueOfURI(foundPath.getURI());
|
||||||
return loadString(foundPath.asString(), fmt, params, foundPath.getURI());
|
return loadString(foundPath.asString(), fmt, params, foundPath.getURI());
|
||||||
@ -81,9 +81,7 @@ public class OpsLoader {
|
|||||||
|
|
||||||
transformer.checkpointAccesses().forEach((k, v) -> {
|
transformer.checkpointAccesses().forEach((k, v) -> {
|
||||||
layered.addTemplateVariable(k, v);
|
layered.addTemplateVariable(k, v);
|
||||||
if (params.containsKey(k)) {
|
params.remove(k);
|
||||||
params.remove(k);
|
|
||||||
}
|
|
||||||
});
|
});
|
||||||
|
|
||||||
return layered;
|
return layered;
|
||||||
|
@ -69,7 +69,7 @@ public class RawOpsLoader {
|
|||||||
|
|
||||||
String data = null;
|
String data = null;
|
||||||
try {
|
try {
|
||||||
Optional<Content<?>> oyaml = NBIO.all().prefix(searchPaths).name(path).extension(YAML_EXTENSIONS).first();
|
Optional<Content<?>> oyaml = NBIO.all().searchPrefixes(searchPaths).pathname(path).extensionSet(YAML_EXTENSIONS).first();
|
||||||
data = oyaml.map(Content::asString).orElseThrow(() -> new BasicError("Unable to load " + path));
|
data = oyaml.map(Content::asString).orElseThrow(() -> new BasicError("Unable to load " + path));
|
||||||
return loadString(data);
|
return loadString(data);
|
||||||
} catch (Exception e) {
|
} catch (Exception e) {
|
||||||
|
@ -65,7 +65,7 @@ public class RawYamlLoader {
|
|||||||
|
|
||||||
String data = null;
|
String data = null;
|
||||||
try {
|
try {
|
||||||
Optional<Content<?>> oyaml = NBIO.all().prefix(searchPaths).name(path).extension(YAML_EXTENSIONS).first();
|
Optional<Content<?>> oyaml = NBIO.all().searchPrefixes(searchPaths).pathname(path).extensionSet(YAML_EXTENSIONS).first();
|
||||||
data = oyaml.map(Content::asString).orElseThrow(() -> new BasicError("Unable to load " + path));
|
data = oyaml.map(Content::asString).orElseThrow(() -> new BasicError("Unable to load " + path));
|
||||||
return loadString(logger, data);
|
return loadString(logger, data);
|
||||||
} catch (Exception e) {
|
} catch (Exception e) {
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
/*
|
/*
|
||||||
* Copyright (c) 2022 nosqlbench
|
* Copyright (c) 2022-2023 nosqlbench
|
||||||
*
|
*
|
||||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
* you may not use this file except in compliance with the License.
|
* you may not use this file except in compliance with the License.
|
||||||
@ -28,15 +28,11 @@ import io.nosqlbench.api.config.standard.NBConfiguration;
|
|||||||
import io.nosqlbench.api.content.Content;
|
import io.nosqlbench.api.content.Content;
|
||||||
import io.nosqlbench.api.content.NBIO;
|
import io.nosqlbench.api.content.NBIO;
|
||||||
|
|
||||||
import java.io.File;
|
|
||||||
import java.nio.file.Files;
|
|
||||||
import java.nio.file.Path;
|
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
import java.util.Optional;
|
import java.util.Optional;
|
||||||
import java.util.function.Function;
|
import java.util.function.Function;
|
||||||
import java.util.function.LongFunction;
|
import java.util.function.LongFunction;
|
||||||
import java.util.stream.Stream;
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* <P>The DriverAdapter interface is expected to be the replacement
|
* <P>The DriverAdapter interface is expected to be the replacement
|
||||||
@ -185,10 +181,10 @@ public interface DriverAdapter<OPTYPE extends Op, SPACETYPE> {
|
|||||||
|
|
||||||
String dev_docspath = "adapter-" + this.getAdapterName() + "/src/main/resources/docs/" + this.getAdapterName();
|
String dev_docspath = "adapter-" + this.getAdapterName() + "/src/main/resources/docs/" + this.getAdapterName();
|
||||||
String cp_docspath = "docs/" + this.getAdapterName();
|
String cp_docspath = "docs/" + this.getAdapterName();
|
||||||
Optional<Content<?>> bundled_docs = NBIO.local().name(dev_docspath, cp_docspath).first();
|
Optional<Content<?>> bundled_docs = NBIO.local().pathname(dev_docspath, cp_docspath).first();
|
||||||
bundled_docs.map(Content::asPath).ifPresent(docs::addContentsOf);
|
bundled_docs.map(Content::asPath).ifPresent(docs::addContentsOf);
|
||||||
|
|
||||||
Optional<Content<?>> maindoc = NBIO.local().name("/src/main/resources/" + this.getAdapterName() + ".md", this.getAdapterName() + ".md").first();
|
Optional<Content<?>> maindoc = NBIO.local().pathname("/src/main/resources/" + this.getAdapterName() + ".md", this.getAdapterName() + ".md").first();
|
||||||
|
|
||||||
maindoc.map(Content::asPath).ifPresent(docs::addPath);
|
maindoc.map(Content::asPath).ifPresent(docs::addPath);
|
||||||
|
|
||||||
|
@ -30,7 +30,7 @@ public class SpecDocsManifest implements BundledMarkdownManifest {
|
|||||||
@Override
|
@Override
|
||||||
public DocsBinder getDocs() {
|
public DocsBinder getDocs() {
|
||||||
Docs docs = new Docs().namespace("workload_definition");
|
Docs docs = new Docs().namespace("workload_definition");
|
||||||
List<Content<?>> specfiles = NBIO.classpath().prefix("workload_definition/").extension(".md").list();
|
List<Content<?>> specfiles = NBIO.classpath().searchPrefixes("workload_definition/").extensionSet(".md").list();
|
||||||
for (Content<?> specfile : specfiles) {
|
for (Content<?> specfile : specfiles) {
|
||||||
docs.addPath(specfile.asPath());
|
docs.addPath(specfile.asPath());
|
||||||
}
|
}
|
||||||
|
@ -50,10 +50,10 @@ public class NBCLIScenarioParser {
|
|||||||
|
|
||||||
public static boolean isFoundWorkload(String workload, String... includes) {
|
public static boolean isFoundWorkload(String workload, String... includes) {
|
||||||
Optional<Content<?>> found = NBIO.all()
|
Optional<Content<?>> found = NBIO.all()
|
||||||
.prefix("activities")
|
.searchPrefixes("activities")
|
||||||
.prefix(includes)
|
.searchPrefixes(includes)
|
||||||
.name(workload)
|
.pathname(workload)
|
||||||
.extension(RawOpsLoader.YAML_EXTENSIONS)
|
.extensionSet(RawOpsLoader.YAML_EXTENSIONS)
|
||||||
.first();
|
.first();
|
||||||
return found.isPresent();
|
return found.isPresent();
|
||||||
}
|
}
|
||||||
@ -64,10 +64,10 @@ public class NBCLIScenarioParser {
|
|||||||
|
|
||||||
String workloadName = arglist.removeFirst();
|
String workloadName = arglist.removeFirst();
|
||||||
Optional<Content<?>> found = NBIO.all()
|
Optional<Content<?>> found = NBIO.all()
|
||||||
.prefix("activities")
|
.searchPrefixes("activities")
|
||||||
.prefix(includes)
|
.searchPrefixes(includes)
|
||||||
.name(workloadName)
|
.pathname(workloadName)
|
||||||
.extension(RawOpsLoader.YAML_EXTENSIONS)
|
.extensionSet(RawOpsLoader.YAML_EXTENSIONS)
|
||||||
.first();
|
.first();
|
||||||
//
|
//
|
||||||
Content<?> workloadContent = found.orElseThrow();
|
Content<?> workloadContent = found.orElseThrow();
|
||||||
@ -108,10 +108,10 @@ public class NBCLIScenarioParser {
|
|||||||
|
|
||||||
// Load in named scenario
|
// Load in named scenario
|
||||||
Content<?> yamlWithNamedScenarios = NBIO.all()
|
Content<?> yamlWithNamedScenarios = NBIO.all()
|
||||||
.prefix(SEARCH_IN)
|
.searchPrefixes(SEARCH_IN)
|
||||||
.prefix(includes)
|
.searchPrefixes(includes)
|
||||||
.name(workloadName)
|
.pathname(workloadName)
|
||||||
.extension(RawOpsLoader.YAML_EXTENSIONS)
|
.extensionSet(RawOpsLoader.YAML_EXTENSIONS)
|
||||||
.first().orElseThrow();
|
.first().orElseThrow();
|
||||||
// TODO: The yaml needs to be parsed with arguments from each command independently to support template vars
|
// TODO: The yaml needs to be parsed with arguments from each command independently to support template vars
|
||||||
OpsDocList scenariosYaml = OpsLoader.loadContent(yamlWithNamedScenarios, new LinkedHashMap<>(userProvidedParams));
|
OpsDocList scenariosYaml = OpsLoader.loadContent(yamlWithNamedScenarios, new LinkedHashMap<>(userProvidedParams));
|
||||||
@ -320,8 +320,8 @@ public class NBCLIScenarioParser {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
Content<?> content = NBIO.all().prefix(SEARCH_IN)
|
Content<?> content = NBIO.all().searchPrefixes(SEARCH_IN)
|
||||||
.name(referenced).extension(RawOpsLoader.YAML_EXTENSIONS)
|
.pathname(referenced).extensionSet(RawOpsLoader.YAML_EXTENSIONS)
|
||||||
.one();
|
.one();
|
||||||
|
|
||||||
OpsDocList stmts = null;
|
OpsDocList stmts = null;
|
||||||
@ -379,14 +379,14 @@ public class NBCLIScenarioParser {
|
|||||||
|
|
||||||
public static List<WorkloadDesc> getWorkloadsWithScenarioScripts(boolean defaultIncludes, String... includes) {
|
public static List<WorkloadDesc> getWorkloadsWithScenarioScripts(boolean defaultIncludes, String... includes) {
|
||||||
|
|
||||||
NBPathsAPI.GetPrefix searchin = NBIO.all();
|
NBPathsAPI.GetPrefixes searchin = NBIO.all();
|
||||||
if (defaultIncludes) {
|
if (defaultIncludes) {
|
||||||
searchin = searchin.prefix(SEARCH_IN);
|
searchin = searchin.searchPrefixes(SEARCH_IN);
|
||||||
}
|
}
|
||||||
|
|
||||||
List<Content<?>> activities = searchin
|
List<Content<?>> activities = searchin
|
||||||
.prefix(includes)
|
.searchPrefixes(includes)
|
||||||
.extension(RawOpsLoader.YAML_EXTENSIONS)
|
.extensionSet(RawOpsLoader.YAML_EXTENSIONS)
|
||||||
.list();
|
.list();
|
||||||
|
|
||||||
return filterForScenarios(activities);
|
return filterForScenarios(activities);
|
||||||
@ -395,15 +395,15 @@ public class NBCLIScenarioParser {
|
|||||||
|
|
||||||
public static List<String> getScripts(boolean defaultIncludes, String... includes) {
|
public static List<String> getScripts(boolean defaultIncludes, String... includes) {
|
||||||
|
|
||||||
NBPathsAPI.GetPrefix searchin = NBIO.all();
|
NBPathsAPI.GetPrefixes searchin = NBIO.all();
|
||||||
if (defaultIncludes) {
|
if (defaultIncludes) {
|
||||||
searchin = searchin.prefix(SEARCH_IN);
|
searchin = searchin.searchPrefixes(SEARCH_IN);
|
||||||
}
|
}
|
||||||
|
|
||||||
List<Path> scriptPaths = searchin
|
List<Path> scriptPaths = searchin
|
||||||
.prefix("scripts/auto")
|
.searchPrefixes("scripts/auto")
|
||||||
.prefix(includes)
|
.searchPrefixes(includes)
|
||||||
.extension("js")
|
.extensionSet("js")
|
||||||
.list().stream().map(Content::asPath).collect(Collectors.toList());
|
.list().stream().map(Content::asPath).collect(Collectors.toList());
|
||||||
|
|
||||||
List<String> scriptNames = new ArrayList();
|
List<String> scriptNames = new ArrayList();
|
||||||
|
@ -145,7 +145,7 @@ public class BasicScriptBuffer implements ScriptBuffer {
|
|||||||
|
|
||||||
logger.debug(() -> "Looking for " + script_path);
|
logger.debug(() -> "Looking for " + script_path);
|
||||||
|
|
||||||
Content<?> one = NBIO.all().prefix("scripts").name(script_path).extension("js").one();
|
Content<?> one = NBIO.all().searchPrefixes("scripts").pathname(script_path).extensionSet("js").one();
|
||||||
scriptData = one.asString();
|
scriptData = one.asString();
|
||||||
|
|
||||||
StrInterpolator interpolator = new StrInterpolator(cmd.getParams());
|
StrInterpolator interpolator = new StrInterpolator(cmd.getParams());
|
||||||
|
@ -306,16 +306,16 @@ public class NBCLI implements Function<String[], Integer> {
|
|||||||
logger.debug(() -> "user requests to copy out " + resourceToCopy);
|
logger.debug(() -> "user requests to copy out " + resourceToCopy);
|
||||||
|
|
||||||
Optional<Content<?>> tocopy = NBIO.classpath()
|
Optional<Content<?>> tocopy = NBIO.classpath()
|
||||||
.prefix("activities")
|
.searchPrefixes("activities")
|
||||||
.prefix(options.wantsIncludes())
|
.searchPrefixes(options.wantsIncludes())
|
||||||
.name(resourceToCopy).extension(RawOpsLoader.YAML_EXTENSIONS).first();
|
.pathname(resourceToCopy).extensionSet(RawOpsLoader.YAML_EXTENSIONS).first();
|
||||||
|
|
||||||
if (tocopy.isEmpty()) {
|
if (tocopy.isEmpty()) {
|
||||||
|
|
||||||
tocopy = NBIO.classpath()
|
tocopy = NBIO.classpath()
|
||||||
.prefix().prefix(options.wantsIncludes())
|
.searchPrefixes().searchPrefixes(options.wantsIncludes())
|
||||||
.prefix(options.wantsIncludes())
|
.searchPrefixes(options.wantsIncludes())
|
||||||
.name(resourceToCopy).first();
|
.pathname(resourceToCopy).first();
|
||||||
}
|
}
|
||||||
|
|
||||||
Content<?> data = tocopy.orElseThrow(
|
Content<?> data = tocopy.orElseThrow(
|
||||||
|
@ -71,9 +71,9 @@ public class NBCLICommandParser {
|
|||||||
break;
|
break;
|
||||||
default:
|
default:
|
||||||
Optional<Content<?>> scriptfile = NBIO.local()
|
Optional<Content<?>> scriptfile = NBIO.local()
|
||||||
.prefix("scripts/auto")
|
.searchPrefixes("scripts/auto")
|
||||||
.name(word)
|
.pathname(word)
|
||||||
.extension("js")
|
.extensionSet("js")
|
||||||
.first();
|
.first();
|
||||||
|
|
||||||
//Script
|
//Script
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
/*
|
/*
|
||||||
* Copyright (c) 2022 nosqlbench
|
* Copyright (c) 2022-2023 nosqlbench
|
||||||
*
|
*
|
||||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
* you may not use this file except in compliance with the License.
|
* you may not use this file except in compliance with the License.
|
||||||
@ -35,9 +35,9 @@ public class PathCanonicalizer {
|
|||||||
|
|
||||||
public String canonicalizePath(String path) {
|
public String canonicalizePath(String path) {
|
||||||
|
|
||||||
Optional<Content<?>> found = NBIO.local().prefix("activities")
|
Optional<Content<?>> found = NBIO.local().searchPrefixes("activities")
|
||||||
.prefix(includes)
|
.searchPrefixes(includes)
|
||||||
.name(path)
|
.pathname(path)
|
||||||
.first();
|
.first();
|
||||||
|
|
||||||
if (found.isPresent()) {
|
if (found.isPresent()) {
|
||||||
@ -46,7 +46,7 @@ public class PathCanonicalizer {
|
|||||||
rewriteTo=(rewriteTo.startsWith(separator) ? rewriteTo.substring(1) : rewriteTo);
|
rewriteTo=(rewriteTo.startsWith(separator) ? rewriteTo.substring(1) : rewriteTo);
|
||||||
|
|
||||||
if (!rewriteTo.equals(path)) {
|
if (!rewriteTo.equals(path)) {
|
||||||
if (NBIO.local().prefix("activities").prefix(includes).name(rewriteTo).first().isPresent()) {
|
if (NBIO.local().searchPrefixes("activities").searchPrefixes(includes).pathname(rewriteTo).first().isPresent()) {
|
||||||
logger.info("rewrote path for " + path + " as " + rewriteTo);
|
logger.info("rewrote path for " + path + " as " + rewriteTo);
|
||||||
return rewriteTo;
|
return rewriteTo;
|
||||||
} else {
|
} else {
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
/*
|
/*
|
||||||
* Copyright (c) 2022 nosqlbench
|
* Copyright (c) 2022-2023 nosqlbench
|
||||||
*
|
*
|
||||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
* you may not use this file except in compliance with the License.
|
* you may not use this file except in compliance with the License.
|
||||||
@ -33,7 +33,7 @@ public class PMatrixElemTest {
|
|||||||
@Disabled
|
@Disabled
|
||||||
public void testMatrixElem() {
|
public void testMatrixElem() {
|
||||||
Gson gson = new GsonBuilder().create();
|
Gson gson = new GsonBuilder().create();
|
||||||
String json = NBIO.classpath().name("test.json").one().asString();
|
String json = NBIO.classpath().pathname("test.json").one().asString();
|
||||||
Type type = new TypeToken<PromQueryResult<PMatrixData>>() {
|
Type type = new TypeToken<PromQueryResult<PMatrixData>>() {
|
||||||
}.getType();
|
}.getType();
|
||||||
Object result = gson.fromJson(json, type);
|
Object result = gson.fromJson(json, type);
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
/*
|
/*
|
||||||
* Copyright (c) 2022 nosqlbench
|
* Copyright (c) 2022-2023 nosqlbench
|
||||||
*
|
*
|
||||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
* you may not use this file except in compliance with the License.
|
* you may not use this file except in compliance with the License.
|
||||||
@ -114,8 +114,8 @@ public class ActivityTypeLoader {
|
|||||||
|
|
||||||
private Set<URL> addLibDir(Set<URL> urlsToAdd, Path libpath) {
|
private Set<URL> addLibDir(Set<URL> urlsToAdd, Path libpath) {
|
||||||
Set<URL> urls = NBIO.local()
|
Set<URL> urls = NBIO.local()
|
||||||
.prefix(libpath.toString())
|
.searchPrefixes(libpath.toString())
|
||||||
.extension(".jar")
|
.extensionSet(".jar")
|
||||||
.list().stream().map(Content::getURL)
|
.list().stream().map(Content::getURL)
|
||||||
.collect(Collectors.toSet());
|
.collect(Collectors.toSet());
|
||||||
urlsToAdd.addAll(urls);
|
urlsToAdd.addAll(urls);
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
/*
|
/*
|
||||||
* Copyright (c) 2022 nosqlbench
|
* Copyright (c) 2022-2023 nosqlbench
|
||||||
*
|
*
|
||||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
* you may not use this file except in compliance with the License.
|
* you may not use this file except in compliance with the License.
|
||||||
@ -53,10 +53,10 @@ public class MarkdownFinder {
|
|||||||
|
|
||||||
public String forResourceMarkdown(String s, String... additionalSearchPaths) {
|
public String forResourceMarkdown(String s, String... additionalSearchPaths) {
|
||||||
Optional<Content<?>> docs = NBIO.local()
|
Optional<Content<?>> docs = NBIO.local()
|
||||||
.prefix("docs")
|
.searchPrefixes("docs")
|
||||||
.prefix(additionalSearchPaths)
|
.searchPrefixes(additionalSearchPaths)
|
||||||
.name(s)
|
.pathname(s)
|
||||||
.extension(".md")
|
.extensionSet(".md")
|
||||||
.first();
|
.first();
|
||||||
|
|
||||||
return docs.map(Content::asString).orElse(null);
|
return docs.map(Content::asString).orElse(null);
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
/*
|
/*
|
||||||
* Copyright (c) 2022 nosqlbench
|
* Copyright (c) 2022-2023 nosqlbench
|
||||||
*
|
*
|
||||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
* you may not use this file except in compliance with the License.
|
* you may not use this file except in compliance with the License.
|
||||||
@ -82,19 +82,19 @@ public class DockerMetricsManager {
|
|||||||
String GRAFANA_IMG = "grafana/grafana";
|
String GRAFANA_IMG = "grafana/grafana";
|
||||||
tag = (tag == null || tag.isEmpty()) ? "latest" : tag;
|
tag = (tag == null || tag.isEmpty()) ? "latest" : tag;
|
||||||
String name = "grafana";
|
String name = "grafana";
|
||||||
List<Integer> port = Arrays.asList(3000);
|
List<Integer> port = List.of(3000);
|
||||||
|
|
||||||
boolean grafanaFilesExist = grafanaFilesExist();
|
boolean grafanaFilesExist = grafanaFilesExist();
|
||||||
if (!grafanaFilesExist) {
|
if (!grafanaFilesExist) {
|
||||||
setupGrafanaFiles(ip);
|
setupGrafanaFiles(ip);
|
||||||
}
|
}
|
||||||
|
|
||||||
List<String> volumeDescList = Arrays.asList(
|
List<String> volumeDescList = List.of(
|
||||||
userHome + "/.nosqlbench/grafana:/var/lib/grafana:rw"
|
userHome + "/.nosqlbench/grafana:/var/lib/grafana:rw"
|
||||||
//cwd+"/docker-metrics/grafana:/grafana",
|
//cwd+"/docker-metrics/grafana:/grafana",
|
||||||
//cwd+"/docker-metrics/grafana/datasources:/etc/grafana/provisioning/datasources",
|
//cwd+"/docker-metrics/grafana/datasources:/etc/grafana/provisioning/datasources",
|
||||||
//cwd+"/docker-metrics/grafana/dashboardconf:/etc/grafana/provisioning/dashboards"
|
//cwd+"/docker-metrics/grafana/dashboardconf:/etc/grafana/provisioning/dashboards"
|
||||||
//,cwd+"/docker-metrics/grafana/dashboards:/var/lib/grafana/dashboards:ro"
|
//,cwd+"/docker-metrics/grafana/dashboards:/var/lib/grafana/dashboards:ro"
|
||||||
);
|
);
|
||||||
List<String> envList = Arrays.asList(
|
List<String> envList = Arrays.asList(
|
||||||
"GF_SECURITY_ADMIN_PASSWORD=admin",
|
"GF_SECURITY_ADMIN_PASSWORD=admin",
|
||||||
@ -125,7 +125,7 @@ public class DockerMetricsManager {
|
|||||||
logger.info("preparing to start docker metrics");
|
logger.info("preparing to start docker metrics");
|
||||||
String PROMETHEUS_IMG = "prom/prometheus";
|
String PROMETHEUS_IMG = "prom/prometheus";
|
||||||
String name = "prom";
|
String name = "prom";
|
||||||
List<Integer> port = Arrays.asList(9090);
|
List<Integer> port = List.of(9090);
|
||||||
|
|
||||||
if (!promFilesExist()) {
|
if (!promFilesExist()) {
|
||||||
setupPromFiles(ip);
|
setupPromFiles(ip);
|
||||||
@ -175,7 +175,7 @@ public class DockerMetricsManager {
|
|||||||
|
|
||||||
setupGraphiteFiles(volumeDescList);
|
setupGraphiteFiles(volumeDescList);
|
||||||
|
|
||||||
List<String> envList = Arrays.asList();
|
List<String> envList = List.of();
|
||||||
|
|
||||||
String reload = null;
|
String reload = null;
|
||||||
List<String> linkNames = new ArrayList();
|
List<String> linkNames = new ArrayList();
|
||||||
@ -342,7 +342,7 @@ public class DockerMetricsManager {
|
|||||||
|
|
||||||
|
|
||||||
private void configureGrafana() {
|
private void configureGrafana() {
|
||||||
List<Content<?>> dashboardContent = NBIO.all().prefix("docker/dashboards").extension(".json").list();
|
List<Content<?>> dashboardContent = NBIO.all().searchPrefixes("docker/dashboards").extensionSet(".json").list();
|
||||||
|
|
||||||
for (Content<?> content : dashboardContent) {
|
for (Content<?> content : dashboardContent) {
|
||||||
String dashboardData = content.asString();
|
String dashboardData = content.asString();
|
||||||
@ -355,7 +355,7 @@ public class DockerMetricsManager {
|
|||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
List<Content<?>> datasources = NBIO.all().prefix("docker/datasources").extension(".yaml").list();
|
List<Content<?>> datasources = NBIO.all().searchPrefixes("docker/datasources").extensionSet(".yaml").list();
|
||||||
|
|
||||||
for (Content<?> datasource : datasources) {
|
for (Content<?> datasource : datasources) {
|
||||||
String datasourceContent = datasource.asString();
|
String datasourceContent = datasource.asString();
|
||||||
|
@ -38,10 +38,10 @@ public interface BundledApp extends ToIntFunction<String[]> {
|
|||||||
|
|
||||||
String dev_docspath = "app-" + this.getBundledAppName() + "/src/main/resources/docs/" + this.getBundledAppName();
|
String dev_docspath = "app-" + this.getBundledAppName() + "/src/main/resources/docs/" + this.getBundledAppName();
|
||||||
String cp_docspath = "docs/" + this.getBundledAppName();
|
String cp_docspath = "docs/" + this.getBundledAppName();
|
||||||
Optional<Content<?>> bundled_docs = NBIO.local().name(dev_docspath, cp_docspath).first();
|
Optional<Content<?>> bundled_docs = NBIO.local().pathname(dev_docspath, cp_docspath).first();
|
||||||
bundled_docs.map(Content::asPath).ifPresent(docs::addContentsOf);
|
bundled_docs.map(Content::asPath).ifPresent(docs::addContentsOf);
|
||||||
|
|
||||||
Optional<Content<?>> maindoc = NBIO.local().name("/src/main/resources/" + this.getBundledAppName() + ".md", this.getBundledAppName() + ".md").first();
|
Optional<Content<?>> maindoc = NBIO.local().pathname("/src/main/resources/" + this.getBundledAppName() + ".md", this.getBundledAppName() + ".md").first();
|
||||||
|
|
||||||
maindoc.map(Content::asPath).ifPresent(docs::addPath);
|
maindoc.map(Content::asPath).ifPresent(docs::addPath);
|
||||||
|
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
/*
|
/*
|
||||||
* Copyright (c) 2022 nosqlbench
|
* Copyright (c) 2022-2023 nosqlbench
|
||||||
*
|
*
|
||||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
* you may not use this file except in compliance with the License.
|
* you may not use this file except in compliance with the License.
|
||||||
@ -45,7 +45,7 @@ public class DataSources {
|
|||||||
String filename = data.substring("IMPORT{".length(), data.length() - 1);
|
String filename = data.substring("IMPORT{".length(), data.length() - 1);
|
||||||
Path filepath = Path.of(filename);
|
Path filepath = Path.of(filename);
|
||||||
|
|
||||||
src = NBIO.all().name(filename).first()
|
src = NBIO.all().pathname(filename).first()
|
||||||
.map(c -> {
|
.map(c -> {
|
||||||
logger.debug(() -> "found 'data' at " + c.getURI());
|
logger.debug(() -> "found 'data' at " + c.getURI());
|
||||||
return c.asString();
|
return c.asString();
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
/*
|
/*
|
||||||
* Copyright (c) 2022 nosqlbench
|
* Copyright (c) 2022-2023 nosqlbench
|
||||||
*
|
*
|
||||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
* you may not use this file except in compliance with the License.
|
* you may not use this file except in compliance with the License.
|
||||||
@ -103,7 +103,7 @@ public class ConfigLoader {
|
|||||||
String filename = data.substring("IMPORT{".length(), data.length() - 1);
|
String filename = data.substring("IMPORT{".length(), data.length() - 1);
|
||||||
Path filepath = Path.of(filename);
|
Path filepath = Path.of(filename);
|
||||||
|
|
||||||
data = NBIO.all().name(filename).first()
|
data = NBIO.all().pathname(filename).first()
|
||||||
.map(c -> {
|
.map(c -> {
|
||||||
logger.debug(() -> "found 'data' at " + c.getURI());
|
logger.debug(() -> "found 'data' at " + c.getURI());
|
||||||
return c.asString();
|
return c.asString();
|
||||||
@ -111,9 +111,8 @@ public class ConfigLoader {
|
|||||||
}
|
}
|
||||||
|
|
||||||
if (data.startsWith("{") || data.startsWith("[")) {
|
if (data.startsWith("{") || data.startsWith("[")) {
|
||||||
JsonParser parser = new JsonParser();
|
|
||||||
|
|
||||||
JsonElement jsonElement = parser.parse(data);
|
JsonElement jsonElement = JsonParser.parseString(data);
|
||||||
if (jsonElement.isJsonArray()) {
|
if (jsonElement.isJsonArray()) {
|
||||||
JsonArray asJsonArray = jsonElement.getAsJsonArray();
|
JsonArray asJsonArray = jsonElement.getAsJsonArray();
|
||||||
for (JsonElement element : asJsonArray) {
|
for (JsonElement element : asJsonArray) {
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
/*
|
/*
|
||||||
* Copyright (c) 2022 nosqlbench
|
* Copyright (c) 2022-2023 nosqlbench
|
||||||
*
|
*
|
||||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
* you may not use this file except in compliance with the License.
|
* you may not use this file except in compliance with the License.
|
||||||
@ -31,15 +31,13 @@ import java.util.regex.Pattern;
|
|||||||
import java.util.stream.Collectors;
|
import java.util.stream.Collectors;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* NBIO is a helper utility packaged as a search builder and fluent API.
|
* NBIO is a helper utility packaged as a search builder and fluent API. It uses value semantics internally, so it is
|
||||||
* It uses value semantics internally, so it is safe to re-use as a
|
* safe to re-use as a stateful configuration builder for finding files in various ways.
|
||||||
* stateful configuration builder for finding files in various ways.
|
|
||||||
* <p>
|
* <p>
|
||||||
* Since this is meant to ease development around a usually over-complicated
|
* Since this is meant to ease development around a usually over-complicated surface area in the JVM (Files, Paths,
|
||||||
* surface area in the JVM (Files, Paths, URIs, accessing data, knowing where it comes
|
* URIs, accessing data, knowing where it comes from, searching for it, etc), more emphasis was put on ease of use and
|
||||||
* from, searching for it, etc), more emphasis was put on ease of use and
|
* clarity than efficiency. This set of classes is not expected to be used much in NoSqlBench after workload
|
||||||
* clarity than efficiency. This set of classes is not expected to be used
|
* initialization, so is not performance oriented.
|
||||||
* much in NoSqlBench after workload initialization, so is not performance oriented
|
|
||||||
*/
|
*/
|
||||||
public class NBIO implements NBPathsAPI.Facets {
|
public class NBIO implements NBPathsAPI.Facets {
|
||||||
|
|
||||||
@ -52,7 +50,8 @@ public class NBIO implements NBPathsAPI.Facets {
|
|||||||
private URIResolver resolver;
|
private URIResolver resolver;
|
||||||
|
|
||||||
private List<String> names = new ArrayList<>();
|
private List<String> names = new ArrayList<>();
|
||||||
private List<String> extensions = new ArrayList<>();
|
// private List<String> extensions = new ArrayList<>();
|
||||||
|
private List<Set<String>> extensionSets = new ArrayList<>();
|
||||||
private Set<String> prefixes = new HashSet<>(Arrays.asList(globalIncludes));
|
private Set<String> prefixes = new HashSet<>(Arrays.asList(globalIncludes));
|
||||||
|
|
||||||
private NBIO() {
|
private NBIO() {
|
||||||
@ -61,15 +60,15 @@ public class NBIO implements NBPathsAPI.Facets {
|
|||||||
private NBIO(URIResolver resolver,
|
private NBIO(URIResolver resolver,
|
||||||
Set<String> prefixes,
|
Set<String> prefixes,
|
||||||
List<String> names,
|
List<String> names,
|
||||||
List<String> extensions) {
|
List<Set<String>> extensionSets) {
|
||||||
this.resolver = resolver;
|
this.resolver = resolver;
|
||||||
this.prefixes = prefixes;
|
this.prefixes = prefixes;
|
||||||
this.names = names;
|
this.names = names;
|
||||||
this.extensions = extensions;
|
this.extensionSets = extensionSets;
|
||||||
}
|
}
|
||||||
|
|
||||||
public static List<String> readLines(String filename) {
|
public static List<String> readLines(String filename) {
|
||||||
Content<?> data = NBIO.all().prefix("data").name(filename).first().orElseThrow(
|
Content<?> data = NBIO.all().searchPrefixes("data").pathname(filename).first().orElseThrow(
|
||||||
() -> new BasicError("Unable to read lines from " + filename)
|
() -> new BasicError("Unable to read lines from " + filename)
|
||||||
);
|
);
|
||||||
String[] split = data.getCharBuffer().toString().split("\n");
|
String[] split = data.getCharBuffer().toString().split("\n");
|
||||||
@ -94,26 +93,26 @@ public class NBIO implements NBPathsAPI.Facets {
|
|||||||
|
|
||||||
|
|
||||||
private static InputStream readInputStream(String filename, String... searchPaths) {
|
private static InputStream readInputStream(String filename, String... searchPaths) {
|
||||||
return NBIO.all().prefix(searchPaths).name(filename).one().getInputStream();
|
return NBIO.all().searchPrefixes(searchPaths).pathname(filename).one().getInputStream();
|
||||||
}
|
}
|
||||||
|
|
||||||
private static Reader readReader(String filename, String... searchPaths) {
|
private static Reader readReader(String filename, String... searchPaths) {
|
||||||
return NBIO.all().prefix(searchPaths).name(filename).one().getReader();
|
return NBIO.all().searchPrefixes(searchPaths).pathname(filename).one().getReader();
|
||||||
}
|
}
|
||||||
|
|
||||||
public static CharBuffer readCharBuffer(String fileName, String... searchPaths) {
|
public static CharBuffer readCharBuffer(String fileName, String... searchPaths) {
|
||||||
return NBIO.all().prefix(searchPaths).name(fileName).one().getCharBuffer();
|
return NBIO.all().searchPrefixes(searchPaths).pathname(fileName).one().getCharBuffer();
|
||||||
}
|
}
|
||||||
|
|
||||||
public static Path getFirstLocalPath(String... potentials) {
|
public static Path getFirstLocalPath(String... potentials) {
|
||||||
Optional<Content<?>> first = NBIO.local().name(potentials).first();
|
Optional<Content<?>> first = NBIO.local().pathname(potentials).first();
|
||||||
return first.orElseThrow(
|
return first.orElseThrow(
|
||||||
() -> new BasicError("Unable to find loadable content at " + String.join(",", potentials))
|
() -> new BasicError("Unable to find loadable content at " + String.join(",", potentials))
|
||||||
).asPath();
|
).asPath();
|
||||||
}
|
}
|
||||||
|
|
||||||
public static Optional<Path> findFirstLocalPath(String... potentials) {
|
public static Optional<Path> findFirstLocalPath(String... potentials) {
|
||||||
Optional<Content<?>> first = NBIO.local().name(potentials).first();
|
Optional<Content<?>> first = NBIO.local().pathname(potentials).first();
|
||||||
Optional<Path> path = first.map(Content::asPath);
|
Optional<Path> path = first.map(Content::asPath);
|
||||||
return path;
|
return path;
|
||||||
}
|
}
|
||||||
@ -123,46 +122,67 @@ public class NBIO implements NBPathsAPI.Facets {
|
|||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* {@inheritDoc}
|
||||||
|
*/
|
||||||
@Override
|
@Override
|
||||||
public NBPathsAPI.GetPrefix localContent() {
|
public NBPathsAPI.GetPrefixes localContent() {
|
||||||
this.resolver = URIResolvers.inFS().inCP();
|
this.resolver = URIResolvers.inFS().inCP();
|
||||||
return this;
|
return this;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* {@inheritDoc}
|
||||||
|
*/
|
||||||
@Override
|
@Override
|
||||||
public NBPathsAPI.GetPrefix remoteContent() {
|
public NBPathsAPI.GetPrefixes remoteContent() {
|
||||||
this.resolver = URIResolvers.inURLs();
|
this.resolver = URIResolvers.inURLs();
|
||||||
return this;
|
return this;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* {@inheritDoc}
|
||||||
|
*/
|
||||||
@Override
|
@Override
|
||||||
public NBPathsAPI.GetPrefix internalContent() {
|
public NBPathsAPI.GetPrefixes internalContent() {
|
||||||
this.resolver = URIResolvers.inClasspath();
|
this.resolver = URIResolvers.inClasspath();
|
||||||
return this;
|
return this;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* {@inheritDoc}
|
||||||
|
*/
|
||||||
@Override
|
@Override
|
||||||
public NBPathsAPI.GetPrefix fileContent() {
|
public NBPathsAPI.GetPrefixes fileContent() {
|
||||||
this.resolver = URIResolvers.inFS();
|
this.resolver = URIResolvers.inFS();
|
||||||
return this;
|
return this;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* {@inheritDoc}
|
||||||
|
*/
|
||||||
@Override
|
@Override
|
||||||
public NBPathsAPI.GetPrefix allContent() {
|
public NBPathsAPI.GetPrefixes allContent() {
|
||||||
this.resolver = URIResolvers.inFS().inCP().inURLs();
|
this.resolver = URIResolvers.inFS().inCP().inURLs();
|
||||||
return this;
|
return this;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* {@inheritDoc}
|
||||||
|
*/
|
||||||
@Override
|
@Override
|
||||||
public NBPathsAPI.GetPrefix prefix(String... searchPaths) {
|
public NBPathsAPI.GetPrefixes searchPrefixes(String... searchPaths) {
|
||||||
HashSet<String> addingPaths = new HashSet<>(this.prefixes);
|
HashSet<String> addingPaths = new HashSet<>(this.prefixes);
|
||||||
addingPaths.addAll(Arrays.asList(searchPaths));
|
addingPaths.addAll(Arrays.asList(searchPaths));
|
||||||
return new NBIO(resolver, addingPaths, names, extensions);
|
return new NBIO(resolver, addingPaths, names, extensionSets);
|
||||||
}
|
}
|
||||||
|
|
||||||
private final static Pattern extensionPattern = Pattern.compile("\\.[a-zA-Z]+");
|
private final static Pattern extensionPattern = Pattern.compile("\\.[a-zA-Z]+");
|
||||||
private final static Pattern wildcardsPattern = Pattern.compile(".*?[^?+*][\\?\\+\\*].*");
|
private final static Pattern wildcardsPattern = Pattern.compile(".*?[^?+*][\\?\\+\\*].*");
|
||||||
|
|
||||||
|
/**
|
||||||
|
* {@inheritDoc}
|
||||||
|
*/
|
||||||
@Override
|
@Override
|
||||||
public NBPathsAPI.DoSearch search(String... searches) {
|
public NBPathsAPI.DoSearch search(String... searches) {
|
||||||
List<String> prefixesToAdd = new ArrayList<>();
|
List<String> prefixesToAdd = new ArrayList<>();
|
||||||
@ -185,7 +205,7 @@ public class NBIO implements NBPathsAPI.Facets {
|
|||||||
|
|
||||||
int literalsTill = 0;
|
int literalsTill = 0;
|
||||||
while (true) {
|
while (true) {
|
||||||
if (literalsTill>= parts.length-1) {
|
if (literalsTill >= parts.length - 1) {
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
if (wildcardsPattern.matcher(parts[literalsTill]).matches()) {
|
if (wildcardsPattern.matcher(parts[literalsTill]).matches()) {
|
||||||
@ -201,31 +221,81 @@ public class NBIO implements NBPathsAPI.Facets {
|
|||||||
prefixesToAdd.add(prefix);
|
prefixesToAdd.add(prefix);
|
||||||
|
|
||||||
String[] nameary = new String[parts.length - literalsTill];
|
String[] nameary = new String[parts.length - literalsTill];
|
||||||
System.arraycopy(parts,literalsTill,nameary,0,nameary.length);
|
System.arraycopy(parts, literalsTill, nameary, 0, nameary.length);
|
||||||
String name = String.join(File.separator, nameary);
|
String name = String.join(File.separator, nameary);
|
||||||
namesToAdd.add(name);
|
namesToAdd.add(name);
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
return prefix(prefixesToAdd.toArray(new String[]{}))
|
return searchPrefixes(prefixesToAdd.toArray(new String[]{}))
|
||||||
.name(namesToAdd.toArray(new String[]{}))
|
.pathname(namesToAdd.toArray(new String[]{}))
|
||||||
.extension(extensionsToAdd.toArray(new String[]{}));
|
.extensionSet(extensionsToAdd.toArray(new String[]{}));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* {@inheritDoc}
|
||||||
|
*/
|
||||||
@Override
|
@Override
|
||||||
public NBPathsAPI.GetExtension name(String... searchNames) {
|
public NBPathsAPI.GetExtensions pathname(String... searchNames) {
|
||||||
ArrayList<String> addingNames = new ArrayList<>(this.names);
|
ArrayList<String> addingNames = new ArrayList<>(this.names);
|
||||||
addingNames.addAll(Arrays.asList(searchNames));
|
addingNames.addAll(Arrays.asList(searchNames));
|
||||||
return new NBIO(resolver, prefixes, addingNames, extensions);
|
return new NBIO(resolver, prefixes, addingNames, extensionSets);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* {@inheritDoc}
|
||||||
|
*/
|
||||||
@Override
|
@Override
|
||||||
public NBPathsAPI.DoSearch extension(String... extensions) {
|
public NBPathsAPI.GetExtensions extensionSet(String... extensions) {
|
||||||
ArrayList<String> addingExtensions = new ArrayList<>(this.extensions);
|
if (extensions.length==0) {
|
||||||
for (String addingExtension : extensions) {
|
return this;
|
||||||
addingExtensions.add(dotExtension(addingExtension));
|
|
||||||
}
|
}
|
||||||
return new NBIO(resolver, prefixes, names, addingExtensions);
|
return this.extensionSets(
|
||||||
|
new HashSet<>(
|
||||||
|
Arrays.asList(extensions).stream()
|
||||||
|
.map(String::toLowerCase)
|
||||||
|
.map(s -> s.startsWith(".") ? s : "." + s)
|
||||||
|
.collect(Collectors.toList())
|
||||||
|
)
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* {@inheritDoc}
|
||||||
|
*/
|
||||||
|
@Override
|
||||||
|
public NBPathsAPI.GetExtensions extensionSets(Set<String>... cosets) {
|
||||||
|
for (Set<String> coset : cosets) {
|
||||||
|
if (coset.size()==0) {
|
||||||
|
throw new RuntimeException("Extension co-set can not be empty.");
|
||||||
|
}
|
||||||
|
for (String entry : coset) {
|
||||||
|
String adjusted = (entry.startsWith(".") ? entry : "." + entry).toLowerCase();
|
||||||
|
if (coset.add(adjusted)) {
|
||||||
|
coset.remove(entry);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Set<String> addTo = null;
|
||||||
|
for (Set<String> extensionSet : this.extensionSets) {
|
||||||
|
Set<String> union = new LinkedHashSet<>(coset);
|
||||||
|
for (String entry : coset) {
|
||||||
|
if (extensionSet.contains(entry)) {
|
||||||
|
addTo = extensionSet;
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (addTo != null) {
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (addTo == null) {
|
||||||
|
addTo = new LinkedHashSet<>();
|
||||||
|
extensionSets.add(addTo);
|
||||||
|
}
|
||||||
|
addTo.addAll(coset);
|
||||||
|
}
|
||||||
|
return this;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@ -233,7 +303,7 @@ public class NBIO implements NBPathsAPI.Facets {
|
|||||||
*
|
*
|
||||||
* @return a builder
|
* @return a builder
|
||||||
*/
|
*/
|
||||||
public static NBPathsAPI.GetPrefix all() {
|
public static NBPathsAPI.GetPrefixes all() {
|
||||||
return new NBIO().allContent();
|
return new NBIO().allContent();
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -242,7 +312,7 @@ public class NBIO implements NBPathsAPI.Facets {
|
|||||||
*
|
*
|
||||||
* @return a builder
|
* @return a builder
|
||||||
*/
|
*/
|
||||||
public static NBPathsAPI.GetPrefix classpath() {
|
public static NBPathsAPI.GetPrefixes classpath() {
|
||||||
return new NBIO().internalContent();
|
return new NBIO().internalContent();
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -251,7 +321,7 @@ public class NBIO implements NBPathsAPI.Facets {
|
|||||||
*
|
*
|
||||||
* @return a builder
|
* @return a builder
|
||||||
*/
|
*/
|
||||||
public static NBPathsAPI.GetPrefix fs() {
|
public static NBPathsAPI.GetPrefixes fs() {
|
||||||
return new NBIO().fileContent();
|
return new NBIO().fileContent();
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -260,7 +330,7 @@ public class NBIO implements NBPathsAPI.Facets {
|
|||||||
*
|
*
|
||||||
* @return a builder
|
* @return a builder
|
||||||
*/
|
*/
|
||||||
public static NBPathsAPI.GetPrefix local() {
|
public static NBPathsAPI.GetPrefixes local() {
|
||||||
return new NBIO().localContent();
|
return new NBIO().localContent();
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -269,11 +339,14 @@ public class NBIO implements NBPathsAPI.Facets {
|
|||||||
*
|
*
|
||||||
* @return a builder
|
* @return a builder
|
||||||
*/
|
*/
|
||||||
public static NBPathsAPI.GetPrefix remote() {
|
public static NBPathsAPI.GetPrefixes remote() {
|
||||||
return new NBIO().remoteContent();
|
return new NBIO().remoteContent();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* {@inheritDoc}
|
||||||
|
*/
|
||||||
@Override
|
@Override
|
||||||
public Optional<Content<?>> first() {
|
public Optional<Content<?>> first() {
|
||||||
|
|
||||||
@ -296,9 +369,38 @@ public class NBIO implements NBPathsAPI.Facets {
|
|||||||
throw new RuntimeException("Invalid code, go fix it, this should never happen.");
|
throw new RuntimeException("Invalid code, go fix it, this should never happen.");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* {@inheritDoc}
|
||||||
|
*/
|
||||||
@Override
|
@Override
|
||||||
public Content<?> one() {
|
public Content<?> one() {
|
||||||
|
|
||||||
|
if (extensionSets.size()==0) {
|
||||||
|
for (String name : names) {
|
||||||
|
Content<?> found = resolver.resolveOne(name);
|
||||||
|
if (found != null) {
|
||||||
|
return found;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
for (String name : names) {
|
||||||
|
Content<?> found = resolver.resolveOne(name);
|
||||||
|
if (found != null) {
|
||||||
|
return found;
|
||||||
|
}
|
||||||
|
|
||||||
|
for (Set<String> extensionSet : extensionSets) {
|
||||||
|
for (String es : extensionSet) {
|
||||||
|
for (String extension : extensionSet) {
|
||||||
|
found = resolver.resolveOne(name + extension);
|
||||||
|
if (found != null) {
|
||||||
|
return found;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
List<Content<?>> list = list();
|
List<Content<?>> list = list();
|
||||||
if (list.size() == 0) {
|
if (list.size() == 0) {
|
||||||
@ -313,11 +415,14 @@ public class NBIO implements NBPathsAPI.Facets {
|
|||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* {@inheritDoc}
|
||||||
|
*/
|
||||||
@Override
|
@Override
|
||||||
public List<List<Content<?>>> resolveEach() {
|
public List<List<Content<?>>> resolveEach() {
|
||||||
List<List<Content<?>>> resolved = new ArrayList<>();
|
List<List<Content<?>>> resolved = new ArrayList<>();
|
||||||
for (String name : names) {
|
for (String name : names) {
|
||||||
LinkedHashSet<String> slotSearchPaths = expandNamesAndSuffixes(List.of(name), new HashSet<>(extensions));
|
LinkedHashSet<String> slotSearchPaths = expandNamesAndSuffixes(this.prefixes, List.of(name), extensionSets);
|
||||||
Content<?> content = null;
|
Content<?> content = null;
|
||||||
for (String slotSearchPath : slotSearchPaths) {
|
for (String slotSearchPath : slotSearchPaths) {
|
||||||
List<Content<?>> contents = resolver.resolve(slotSearchPath);
|
List<Content<?>> contents = resolver.resolve(slotSearchPath);
|
||||||
@ -330,7 +435,7 @@ public class NBIO implements NBPathsAPI.Facets {
|
|||||||
|
|
||||||
// for testing
|
// for testing
|
||||||
public LinkedHashSet<String> expandNamesAndSuffixes() {
|
public LinkedHashSet<String> expandNamesAndSuffixes() {
|
||||||
return expandNamesAndSuffixes(names, new HashSet(extensions));
|
return expandNamesAndSuffixes(prefixes, names, extensionSets);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
@ -339,46 +444,92 @@ public class NBIO implements NBPathsAPI.Facets {
|
|||||||
* If no name is given, then <pre>{@code .*}</pre> is used.
|
* If no name is given, then <pre>{@code .*}</pre> is used.
|
||||||
* If suffixes are given, then all returned results must include at least
|
* If suffixes are given, then all returned results must include at least
|
||||||
* one of the suffixes. If the name includes one of the suffixes given,
|
* one of the suffixes. If the name includes one of the suffixes given,
|
||||||
* then additional names are expanded to match the additional suffixes.
|
* then additional suffixes are added to the list of searchable path names.
|
||||||
*
|
*
|
||||||
* @param _names base filenames or path fragment, possibly fully-qualified
|
* @param _names
|
||||||
* @param _suffixes zero or more suffixes, which, if given, imply that one of them must match
|
* base filenames or path fragment, possibly fully-qualified
|
||||||
|
* @param _suffixCosetLists
|
||||||
|
* zero or more suffixes, which, if given, imply that one of them must match
|
||||||
* @return Expanded names of valid filename fragments according to the above rules
|
* @return Expanded names of valid filename fragments according to the above rules
|
||||||
*/
|
*/
|
||||||
public LinkedHashSet<String> expandNamesAndSuffixes(
|
public static LinkedHashSet<String> expandNamesAndSuffixes(
|
||||||
|
Set<String> _prefixes,
|
||||||
List<String> _names,
|
List<String> _names,
|
||||||
Set<String> _suffixes) {
|
List<Set<String>> _suffixCosetLists) {
|
||||||
|
|
||||||
LinkedHashSet<String> searches = new LinkedHashSet<>();
|
LinkedHashSet<String> searches = new LinkedHashSet<>();
|
||||||
|
|
||||||
if (_names.size() == 0 && prefixes.size() == 0) {
|
if (_names.size() > 0) {
|
||||||
searches.add(".*");
|
if (_suffixCosetLists.size() > 0) { // names & suffixes
|
||||||
} else if (_names.size() > 0 && _suffixes.size() == 0) {
|
searches.addAll(expandSynonymPaths(_names, _suffixCosetLists));
|
||||||
searches.addAll(_names);
|
} else { // names only
|
||||||
} else if (_names.size() == 0 && _suffixes.size() > 0) {
|
searches.addAll(_names);
|
||||||
_suffixes.stream().map(s -> ".*" + s).forEach(searches::add);
|
}
|
||||||
} else {
|
} else {
|
||||||
for (String name : _names) {
|
if (_suffixCosetLists.size() > 0) { // suffixes only
|
||||||
// if (!name.equals(".*")) {
|
for (Set<String> suffixCoset : _suffixCosetLists) {
|
||||||
// searches.add(name);
|
for (String suffix : suffixCoset) {
|
||||||
// }
|
searches.add(".*" + suffix);
|
||||||
String basename = name;
|
|
||||||
boolean suffixed = false;
|
|
||||||
for (String suffix : _suffixes) {
|
|
||||||
if (name.endsWith(suffix)) {
|
|
||||||
suffixed = true;
|
|
||||||
basename = name.substring(0, name.length() - suffix.length());
|
|
||||||
break;
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
for (String suffix : _suffixes) {
|
} else { // neither
|
||||||
searches.add(basename + suffix);
|
searches.add(".*");
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
if (_prefixes.size() > 0) {
|
||||||
|
LinkedHashSet<String> prefixed = new LinkedHashSet<>(searches);
|
||||||
|
for (String prefix : _prefixes) {
|
||||||
|
for (String search : searches) {
|
||||||
|
prefixed.add(prefix + File.separator + search);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
searches = prefixed;
|
||||||
|
}
|
||||||
|
|
||||||
return searches;
|
return searches;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public static Set<String> expandSynonymPaths(List<String> names, List<Set<String>> suffixSets) {
|
||||||
|
if (suffixSets.size()==0) {
|
||||||
|
return new LinkedHashSet<>(names);
|
||||||
|
}
|
||||||
|
Set<String> newnames = new LinkedHashSet<>();
|
||||||
|
for (String name : names) {
|
||||||
|
Set<String> matchingSet = null;
|
||||||
|
String basename = null;
|
||||||
|
for (Set<String> suffixSet : suffixSets) {
|
||||||
|
for (String suffix : suffixSet) {
|
||||||
|
if (name.endsWith(suffix)) {
|
||||||
|
matchingSet = suffixSet;
|
||||||
|
basename = name.substring(0, name.length() - suffix.length());
|
||||||
|
// newnames.add(name); // Leave this here, it initializes precedence
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (basename != null) {
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (basename == null) {
|
||||||
|
if (name.contains(".") && !name.equals(".*")) {
|
||||||
|
// newnames.add(name);
|
||||||
|
suffixSets.stream().flatMap(s -> s.stream()).map(s -> name + s).forEach(newnames::add);
|
||||||
|
} else {
|
||||||
|
suffixSets.stream().flatMap(s -> s.stream()).map(s -> name + s).forEach(newnames::add);
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
for (String extension : matchingSet) {
|
||||||
|
newnames.add(basename + extension);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return newnames;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* {@inheritDoc}
|
||||||
|
*/
|
||||||
@Override
|
@Override
|
||||||
public List<Content<?>> list() {
|
public List<Content<?>> list() {
|
||||||
LinkedHashSet<String> searches = expandNamesAndSuffixes();
|
LinkedHashSet<String> searches = expandNamesAndSuffixes();
|
||||||
@ -413,6 +564,9 @@ public class NBIO implements NBPathsAPI.Facets {
|
|||||||
return new ArrayList<>(foundFiles);
|
return new ArrayList<>(foundFiles);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* {@inheritDoc}
|
||||||
|
*/
|
||||||
@Override
|
@Override
|
||||||
public List<Path> relativeTo(String... base) {
|
public List<Path> relativeTo(String... base) {
|
||||||
String base1 = base[0];
|
String base1 = base[0];
|
||||||
@ -462,13 +616,16 @@ public class NBIO implements NBPathsAPI.Facets {
|
|||||||
// }
|
// }
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* {@inheritDoc}
|
||||||
|
*/
|
||||||
@Override
|
@Override
|
||||||
public String toString() {
|
public String toString() {
|
||||||
return "NBIO{" +
|
return "NBIO{" +
|
||||||
"resolver=" + resolver +
|
"resolver=" + resolver +
|
||||||
", prefixes=" + prefixes +
|
", prefixes=" + prefixes +
|
||||||
", names=" + names +
|
", names=" + names +
|
||||||
", extensions=" + extensions +
|
", extensionSets=" + extensionSets +
|
||||||
'}';
|
'}';
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
55
nb-api/src/main/java/io/nosqlbench/api/content/NBIOSets.java
Normal file
55
nb-api/src/main/java/io/nosqlbench/api/content/NBIOSets.java
Normal file
@ -0,0 +1,55 @@
|
|||||||
|
/*
|
||||||
|
* Copyright (c) 2023 nosqlbench
|
||||||
|
*
|
||||||
|
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
* you may not use this file except in compliance with the License.
|
||||||
|
* You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing, software
|
||||||
|
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
* See the License for the specific language governing permissions and
|
||||||
|
* limitations under the License.
|
||||||
|
*/
|
||||||
|
|
||||||
|
package io.nosqlbench.api.content;
|
||||||
|
|
||||||
|
import java.util.LinkedHashSet;
|
||||||
|
import java.util.List;
|
||||||
|
import java.util.Set;
|
||||||
|
|
||||||
|
public class NBIOSets {
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Combine overlapping sets or create new ones with no overlap
|
||||||
|
* @param setsData Existing sets
|
||||||
|
* @param newSets Additional sets
|
||||||
|
* @return combined sets
|
||||||
|
* @param <T>
|
||||||
|
*/
|
||||||
|
public static <T extends Comparable<T>> List<Set<T>> combine(List<Set<T>> setsData, Set<T>... newSets) {
|
||||||
|
for (Set<T> coset : newSets) {
|
||||||
|
Set<T> addTo = null;
|
||||||
|
for (Set<T> extensionSet : setsData) {
|
||||||
|
Set<T> union = new LinkedHashSet<>(coset);
|
||||||
|
for (T entry : coset) {
|
||||||
|
if (extensionSet.contains(entry)) {
|
||||||
|
addTo = extensionSet;
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (addTo != null) {
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (addTo==null) {
|
||||||
|
addTo=new LinkedHashSet<>();
|
||||||
|
setsData.add(addTo);
|
||||||
|
}
|
||||||
|
addTo.addAll(coset);
|
||||||
|
}
|
||||||
|
return setsData;
|
||||||
|
}
|
||||||
|
}
|
@ -1,5 +1,5 @@
|
|||||||
/*
|
/*
|
||||||
* Copyright (c) 2022 nosqlbench
|
* Copyright (c) 2022-2023 nosqlbench
|
||||||
*
|
*
|
||||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
* you may not use this file except in compliance with the License.
|
* you may not use this file except in compliance with the License.
|
||||||
@ -19,11 +19,21 @@ package io.nosqlbench.api.content;
|
|||||||
import java.nio.file.Path;
|
import java.nio.file.Path;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.Optional;
|
import java.util.Optional;
|
||||||
|
import java.util.Set;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* This API provides a single service to locate and load content resource from multiple places,
|
||||||
|
* at build time and runtime, using a standard fluent-like pattern. The most important details for new uesrs are:
|
||||||
|
* <UL>
|
||||||
|
* <LI>Search paths are optional, and are tried <strong>after</strong> non-prefixed paths.</LI>
|
||||||
|
* <LI>If extensions are provided, then any result returned <strong>must</strong> match one of the extensions.</LI>
|
||||||
|
* <LI>The most efficient way to access a single path will be to use the {@link Facets#one()} method as {@link NBIO#one()}</LI>
|
||||||
|
* </UL>
|
||||||
|
*/
|
||||||
public interface NBPathsAPI {
|
public interface NBPathsAPI {
|
||||||
|
|
||||||
interface Facets extends
|
interface Facets extends
|
||||||
GetSource, GetPrefix, GetName, GetExtension, DoSearch {
|
GetSource, GetPrefixes, GetPathname, GetExtensions, DoSearch {
|
||||||
}
|
}
|
||||||
|
|
||||||
interface GetSource {
|
interface GetSource {
|
||||||
@ -32,7 +42,7 @@ public interface NBPathsAPI {
|
|||||||
*
|
*
|
||||||
* @return this builder
|
* @return this builder
|
||||||
*/
|
*/
|
||||||
GetPrefix localContent();
|
GetPrefixes localContent();
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Only return content from remote URLs. If the user is providing non-URL content
|
* Only return content from remote URLs. If the user is providing non-URL content
|
||||||
@ -40,7 +50,7 @@ public interface NBPathsAPI {
|
|||||||
*
|
*
|
||||||
* @return this builder
|
* @return this builder
|
||||||
*/
|
*/
|
||||||
GetPrefix remoteContent();
|
GetPrefixes remoteContent();
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Only return content from the runtime classpath, internal resources that are bundled,
|
* Only return content from the runtime classpath, internal resources that are bundled,
|
||||||
@ -48,14 +58,14 @@ public interface NBPathsAPI {
|
|||||||
*
|
*
|
||||||
* @return this builder
|
* @return this builder
|
||||||
*/
|
*/
|
||||||
GetPrefix internalContent();
|
GetPrefixes internalContent();
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Only return content from the filesystem, but not remote URLs nor internal bundled resources.
|
* Only return content from the filesystem, but not remote URLs nor internal bundled resources.
|
||||||
*
|
*
|
||||||
* @return this builder
|
* @return this builder
|
||||||
*/
|
*/
|
||||||
GetPrefix fileContent();
|
GetPrefixes fileContent();
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Return content from everywhere, from remote URls, or from the file system and then the internal
|
* Return content from everywhere, from remote URls, or from the file system and then the internal
|
||||||
@ -63,10 +73,10 @@ public interface NBPathsAPI {
|
|||||||
*
|
*
|
||||||
* @return this builder
|
* @return this builder
|
||||||
*/
|
*/
|
||||||
GetPrefix allContent();
|
GetPrefixes allContent();
|
||||||
}
|
}
|
||||||
|
|
||||||
interface GetPrefix extends GetName {
|
interface GetPrefixes extends GetPathname {
|
||||||
/**
|
/**
|
||||||
* Each of the prefix paths will be searched if the resource is not found with the exact
|
* Each of the prefix paths will be searched if the resource is not found with the exact
|
||||||
* path given. To be specific, if you want to search within a location based on wildcards,
|
* path given. To be specific, if you want to search within a location based on wildcards,
|
||||||
@ -75,10 +85,10 @@ public interface NBPathsAPI {
|
|||||||
* @param prefixPaths A list of paths to include in the search
|
* @param prefixPaths A list of paths to include in the search
|
||||||
* @return this builder
|
* @return this builder
|
||||||
*/
|
*/
|
||||||
GetPrefix prefix(String... prefixPaths);
|
GetPrefixes searchPrefixes(String... prefixPaths);
|
||||||
}
|
}
|
||||||
|
|
||||||
interface GetName extends GetExtension {
|
interface GetPathname extends GetExtensions {
|
||||||
/**
|
/**
|
||||||
* Provide the names of the resources to be resolved. More than one resource may be provided.
|
* Provide the names of the resources to be resolved. More than one resource may be provided.
|
||||||
* If no name is provided, then a wildcard search is assumed.
|
* If no name is provided, then a wildcard search is assumed.
|
||||||
@ -86,7 +96,7 @@ public interface NBPathsAPI {
|
|||||||
* @param name The name of the resource to load
|
* @param name The name of the resource to load
|
||||||
* @return this builder
|
* @return this builder
|
||||||
*/
|
*/
|
||||||
GetExtension name(String... name);
|
GetExtensions pathname(String... name);
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Provide a combined prefix, name and suffix in a combined form. For each search template provided,
|
* Provide a combined prefix, name and suffix in a combined form. For each search template provided,
|
||||||
@ -123,7 +133,7 @@ public interface NBPathsAPI {
|
|||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
interface GetExtension extends DoSearch {
|
interface GetExtensions extends DoSearch {
|
||||||
/**
|
/**
|
||||||
* provide a list of optional file extensions which should be considered. If the content is
|
* provide a list of optional file extensions which should be considered. If the content is
|
||||||
* not found under the provided name, then each of the extension is tried in order.
|
* not found under the provided name, then each of the extension is tried in order.
|
||||||
@ -134,8 +144,8 @@ public interface NBPathsAPI {
|
|||||||
* @param extensions The extension names to try
|
* @param extensions The extension names to try
|
||||||
* @return this builder
|
* @return this builder
|
||||||
*/
|
*/
|
||||||
DoSearch extension(String... extensions);
|
GetExtensions extensionSet(String... extensions);
|
||||||
|
GetExtensions extensionSets(Set<String>... cosets);
|
||||||
}
|
}
|
||||||
|
|
||||||
interface DoSearch {
|
interface DoSearch {
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
/*
|
/*
|
||||||
* Copyright (c) 2022 nosqlbench
|
* Copyright (c) 2022-2023 nosqlbench
|
||||||
*
|
*
|
||||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
* you may not use this file except in compliance with the License.
|
* you may not use this file except in compliance with the License.
|
||||||
@ -22,6 +22,7 @@ import java.net.URI;
|
|||||||
import java.nio.file.Path;
|
import java.nio.file.Path;
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
|
import java.util.Optional;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* This is a stateful search object for resources like Paths or URLs.
|
* This is a stateful search object for resources like Paths or URLs.
|
||||||
@ -48,6 +49,7 @@ public class URIResolver implements ContentResolver {
|
|||||||
* Include resources from all known places, including remote URLs,
|
* Include resources from all known places, including remote URLs,
|
||||||
* the local default filesystem, and the classpath, which includes
|
* the local default filesystem, and the classpath, which includes
|
||||||
* the jars that hold the current runtime application.
|
* the jars that hold the current runtime application.
|
||||||
|
*
|
||||||
* @return this URISearch
|
* @return this URISearch
|
||||||
*/
|
*/
|
||||||
public URIResolver all() {
|
public URIResolver all() {
|
||||||
@ -57,6 +59,7 @@ public class URIResolver implements ContentResolver {
|
|||||||
|
|
||||||
/**
|
/**
|
||||||
* Includ resources in the default filesystem
|
* Includ resources in the default filesystem
|
||||||
|
*
|
||||||
* @return this URISearch
|
* @return this URISearch
|
||||||
*/
|
*/
|
||||||
public URIResolver inFS() {
|
public URIResolver inFS() {
|
||||||
@ -66,6 +69,7 @@ public class URIResolver implements ContentResolver {
|
|||||||
|
|
||||||
/**
|
/**
|
||||||
* Include resources in remote URLs
|
* Include resources in remote URLs
|
||||||
|
*
|
||||||
* @return this URISearch
|
* @return this URISearch
|
||||||
*/
|
*/
|
||||||
public URIResolver inURLs() {
|
public URIResolver inURLs() {
|
||||||
@ -75,6 +79,7 @@ public class URIResolver implements ContentResolver {
|
|||||||
|
|
||||||
/**
|
/**
|
||||||
* Include resources within the classpath.
|
* Include resources within the classpath.
|
||||||
|
*
|
||||||
* @return this URISearch
|
* @return this URISearch
|
||||||
*/
|
*/
|
||||||
public URIResolver inCP() {
|
public URIResolver inCP() {
|
||||||
@ -117,26 +122,38 @@ public class URIResolver implements ContentResolver {
|
|||||||
}
|
}
|
||||||
|
|
||||||
public URIResolver extension(String extension) {
|
public URIResolver extension(String extension) {
|
||||||
this.extensions = this.extensions==null ? new ArrayList<>() : this.extensions;
|
this.extensions = this.extensions == null ? new ArrayList<>() : this.extensions;
|
||||||
this.extensions.add(extension);
|
this.extensions.add(extension);
|
||||||
return this;
|
return this;
|
||||||
}
|
}
|
||||||
|
|
||||||
public URIResolver extraPaths(String extraPath) {
|
public URIResolver extraPaths(String extraPath) {
|
||||||
this.extraPaths = this.extraPaths==null ? new ArrayList<>() : this.extraPaths;
|
this.extraPaths = this.extraPaths == null ? new ArrayList<>() : this.extraPaths;
|
||||||
this.extraPaths.add(Path.of(extraPath));
|
this.extraPaths.add(Path.of(extraPath));
|
||||||
return this;
|
return this;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public Optional<Content<?>> resolveOneOptionally(String candidatePath) {
|
||||||
|
List<Content<?>> contents = resolveAll(candidatePath);
|
||||||
|
if (contents.size() == 1) {
|
||||||
|
return Optional.of(contents.get(0));
|
||||||
|
}
|
||||||
|
if (contents.size() == 0) {
|
||||||
|
return Optional.empty();
|
||||||
|
}
|
||||||
|
throw new BasicError("Error while loading content '" + candidatePath + "', only one is allowed, but " + contents.size() + " were found");
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
public Content<?> resolveOne(String candidatePath) {
|
public Content<?> resolveOne(String candidatePath) {
|
||||||
List<Content<?>> contents = resolveAll(candidatePath);
|
List<Content<?>> contents = resolveAll(candidatePath);
|
||||||
if (contents.size()==1) {
|
if (contents.size() == 1) {
|
||||||
return contents.get(0);
|
return contents.get(0);
|
||||||
}
|
}
|
||||||
if (contents.size()==0) {
|
if (contents.size() == 0) {
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
throw new BasicError("Error while loading content '" + candidatePath +"', only one is allowed, but " + contents.size() + " were found");
|
throw new BasicError("Error while loading content '" + candidatePath + "', only one is allowed, but " + contents.size() + " were found");
|
||||||
}
|
}
|
||||||
|
|
||||||
public String toString() {
|
public String toString() {
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
/*
|
/*
|
||||||
* Copyright (c) 2022 nosqlbench
|
* Copyright (c) 2022-2023 nosqlbench
|
||||||
*
|
*
|
||||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
* you may not use this file except in compliance with the License.
|
* you may not use this file except in compliance with the License.
|
||||||
@ -29,7 +29,7 @@ public abstract class DocsRootDirectory implements RawMarkdownSource {
|
|||||||
|
|
||||||
@Override
|
@Override
|
||||||
public List<Content<?>> getMarkdownInfo() {
|
public List<Content<?>> getMarkdownInfo() {
|
||||||
List<Content<?>> list = NBIO.local().name(getRootPathName()).list();
|
List<Content<?>> list = NBIO.local().pathname(getRootPathName()).list();
|
||||||
NBIOWalker.CollectVisitor capture = new NBIOWalker.CollectVisitor(true, false);
|
NBIOWalker.CollectVisitor capture = new NBIOWalker.CollectVisitor(true, false);
|
||||||
NBIOWalker.RegexFilter filter = new NBIOWalker.RegexFilter("\\.md",true);
|
NBIOWalker.RegexFilter filter = new NBIOWalker.RegexFilter("\\.md",true);
|
||||||
for (Content<?> content : list) {
|
for (Content<?> content : list) {
|
||||||
|
@ -0,0 +1,40 @@
|
|||||||
|
/*
|
||||||
|
* Copyright (c) 2023 nosqlbench
|
||||||
|
*
|
||||||
|
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
* you may not use this file except in compliance with the License.
|
||||||
|
* You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing, software
|
||||||
|
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
* See the License for the specific language governing permissions and
|
||||||
|
* limitations under the License.
|
||||||
|
*/
|
||||||
|
|
||||||
|
package io.nosqlbench.api.content;
|
||||||
|
|
||||||
|
import org.junit.jupiter.api.Test;
|
||||||
|
|
||||||
|
import java.util.ArrayList;
|
||||||
|
import java.util.List;
|
||||||
|
import java.util.Set;
|
||||||
|
|
||||||
|
import static org.assertj.core.api.Assertions.assertThat;
|
||||||
|
|
||||||
|
public class NBIOSetsTest {
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testSetsAddition() {
|
||||||
|
List<Set<String>> data = new ArrayList<>();
|
||||||
|
data=NBIOSets.combine(data, Set.of("a","b"));
|
||||||
|
assertThat(data).isEqualTo(List.of(Set.of("a","b")));
|
||||||
|
data=NBIOSets.combine(data,Set.of("a","c"));
|
||||||
|
assertThat(data).isEqualTo(List.of(Set.of("a","b","c")));
|
||||||
|
data=NBIOSets.combine(data, Set.of("d"));
|
||||||
|
assertThat(data).isEqualTo(List.of(Set.of("a","b","c"),Set.of("d")));
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
@ -1,5 +1,5 @@
|
|||||||
/*
|
/*
|
||||||
* Copyright (c) 2022 nosqlbench
|
* Copyright (c) 2022-2023 nosqlbench
|
||||||
*
|
*
|
||||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
* you may not use this file except in compliance with the License.
|
* you may not use this file except in compliance with the License.
|
||||||
@ -26,13 +26,13 @@ import org.junit.jupiter.api.Test;
|
|||||||
|
|
||||||
import java.io.File;
|
import java.io.File;
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.nio.charset.StandardCharsets;
|
|
||||||
import java.nio.file.Files;
|
import java.nio.file.Files;
|
||||||
import java.nio.file.Path;
|
import java.nio.file.Path;
|
||||||
import java.nio.file.Paths;
|
import java.nio.file.Paths;
|
||||||
import java.util.LinkedHashSet;
|
import java.util.LinkedHashSet;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.Optional;
|
import java.util.Optional;
|
||||||
|
import java.util.Set;
|
||||||
import java.util.stream.Collectors;
|
import java.util.stream.Collectors;
|
||||||
|
|
||||||
import static org.assertj.core.api.Assertions.assertThat;
|
import static org.assertj.core.api.Assertions.assertThat;
|
||||||
@ -42,72 +42,80 @@ public class NBIOTest {
|
|||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testFullyQualifiedNameSearches() {
|
public void testFullyQualifiedNameSearches() {
|
||||||
NBIO extensions = (NBIO) NBIO.all().name("foo.bar");
|
NBIO extensions = (NBIO) NBIO.all().pathname("foo.bar");
|
||||||
LinkedHashSet<String> searches = extensions.expandNamesAndSuffixes();
|
LinkedHashSet<String> searches = extensions.expandNamesAndSuffixes();
|
||||||
assertThat(searches).containsExactly("foo.bar");
|
assertThat(searches).containsExactly("foo.bar");
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testExpandWildcardAndExtensionsOnly() {
|
public void testExpandWildcardAndExtensionsOnly() {
|
||||||
NBIO extensions = (NBIO) NBIO.all().name(".*").extension("foo","bar");
|
NBIO extensions = (NBIO) NBIO.all().pathname(".*").extensionSet("foo","bar");
|
||||||
LinkedHashSet<String> searches = extensions.expandNamesAndSuffixes();
|
LinkedHashSet<String> searches = extensions.expandNamesAndSuffixes();
|
||||||
assertThat(searches).contains(".*.foo",".*.bar");
|
assertThat(searches).contains(".*.foo",".*.bar");
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testExpandNameAndAllSuffixesOnly() {
|
public void testExpandNameAndAllSuffixesOnly() {
|
||||||
NBIO extensions = (NBIO) NBIO.all().name("foo.bar").extension("test1","test2");
|
NBIO extensions = (NBIO) NBIO.all().pathname("foo.bar").extensionSet("test1","test2");
|
||||||
LinkedHashSet<String> searches = extensions.expandNamesAndSuffixes();
|
LinkedHashSet<String> searches = extensions.expandNamesAndSuffixes();
|
||||||
assertThat(searches).containsExactly("foo.bar.test1","foo.bar.test2");
|
assertThat(searches).containsExactly("foo.bar.test1","foo.bar.test2");
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testExpandNamesAndExtensionsIfNotExtended() {
|
public void testExpandNamesAndExtensionsIfNotExtended() {
|
||||||
NBIO extensions = (NBIO) NBIO.all().name("foo").extension("baz","beez");
|
NBIO extensions = (NBIO) NBIO.all().pathname("foo").extensionSet("baz","beez");
|
||||||
LinkedHashSet<String> searches = extensions.expandNamesAndSuffixes();
|
LinkedHashSet<String> searches = extensions.expandNamesAndSuffixes();
|
||||||
assertThat(searches).contains("foo.baz","foo.beez");
|
assertThat(searches).contains("foo.baz","foo.beez");
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testExpandNamesAndExtensionsAvoidsExtendedAlreadyExtended() {
|
public void testExpandNamesAndExtensionsAvoidsExtendedAlreadyExtended() {
|
||||||
NBIO extensions = (NBIO) NBIO.all().name("foo.baz").extension("baz","beez");
|
NBIO extensions = (NBIO) NBIO.all().pathname("foo.baz").extensionSet("baz","beez");
|
||||||
LinkedHashSet<String> searches = extensions.expandNamesAndSuffixes();
|
LinkedHashSet<String> searches = extensions.expandNamesAndSuffixes();
|
||||||
assertThat(searches).contains("foo.baz","foo.beez");
|
assertThat(searches).contains("foo.baz","foo.beez");
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testExpandPrefixesAndFullName() {
|
public void testExpandPrefixesAndFullName() {
|
||||||
NBIO extensions = (NBIO) NBIO.all().prefix("act1","act2").name("foo.bar");
|
NBIO extensions = (NBIO) NBIO.all().searchPrefixes("act1","act2").pathname("foo.bar");
|
||||||
LinkedHashSet<String> searches = extensions.expandNamesAndSuffixes();
|
LinkedHashSet<String> searches = extensions.expandNamesAndSuffixes();
|
||||||
assertThat(searches).containsExactly("foo.bar");
|
assertThat(searches).containsExactly("foo.bar","act1/foo.bar","act2/foo.bar");
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testExpandAddExtensionNotNeeded() {
|
public void testExpandAddExtensionNotNeeded() {
|
||||||
NBIO extensions = (NBIO) NBIO.all().name("foo.bar").extension("bar");
|
NBIO extensions = (NBIO) NBIO.all().pathname("foo.bar").extensionSet("bar");
|
||||||
LinkedHashSet<String> searches = extensions.expandNamesAndSuffixes();
|
LinkedHashSet<String> searches = extensions.expandNamesAndSuffixes();
|
||||||
assertThat(searches).containsExactly("foo.bar");
|
assertThat(searches).containsExactly("foo.bar");
|
||||||
NBIO extensionsDot = (NBIO) NBIO.all().name("foo.bar").extension(".bar");
|
NBIO extensionsDot = (NBIO) NBIO.all().pathname("foo.bar").extensionSet(".bar");
|
||||||
LinkedHashSet<String> searchesDot = extensionsDot.expandNamesAndSuffixes();
|
LinkedHashSet<String> searchesDot = extensionsDot.expandNamesAndSuffixes();
|
||||||
assertThat(searchesDot).containsExactly("foo.bar");
|
assertThat(searchesDot).containsExactly("foo.bar");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testExpandExtensionCosets() {
|
||||||
|
Set<String> paths = NBIO.expandSynonymPaths(List.of("abc123.tubas"), List.of(Set.of(".foo")));
|
||||||
|
assertThat(paths).isEqualTo(Set.of("abc123.tubas.foo"));
|
||||||
|
paths = NBIO.expandSynonymPaths(List.of("abc123.tubas","def.123"), List.of(Set.of(".456",".789",".123")));
|
||||||
|
assertThat(paths).isEqualTo(Set.of("abc123.tubas.123","abc123.tubas.456","abc123.tubas.789","def.123","def.456","def.789"));
|
||||||
|
|
||||||
|
}
|
||||||
@Test
|
@Test
|
||||||
public void testExpandAddExtensionNeeded() {
|
public void testExpandAddExtensionNeeded() {
|
||||||
NBIO extensions = (NBIO) NBIO.all().name("foo").extension("bar");
|
NBIO extensions = (NBIO) NBIO.all().pathname("foo").extensionSet("bar");
|
||||||
LinkedHashSet<String> searches = extensions.expandNamesAndSuffixes();
|
LinkedHashSet<String> searches = extensions.expandNamesAndSuffixes();
|
||||||
assertThat(searches).containsExactly("foo.bar");
|
assertThat(searches).containsExactly("foo.bar");
|
||||||
NBIO extensionsDot = (NBIO) NBIO.all().name("foo").extension(".bar");
|
NBIO extensionsDot = (NBIO) NBIO.all().pathname("foo").extensionSet(".bar");
|
||||||
LinkedHashSet<String> searchesDot = extensionsDot.expandNamesAndSuffixes();
|
LinkedHashSet<String> searchesDot = extensionsDot.expandNamesAndSuffixes();
|
||||||
assertThat(searchesDot).containsExactly("foo.bar");
|
assertThat(searchesDot).containsExactly("foo.bar");
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testLoadCsv1Classpath() {
|
public void testLoadCsv1Classpath() {
|
||||||
NBPathsAPI.GetPrefix forSourceType = NBIO.classpath();
|
NBPathsAPI.GetPrefixes forSourceType = NBIO.classpath();
|
||||||
NBPathsAPI.GetName nesteddir1 = forSourceType.prefix("nesteddir1");
|
NBPathsAPI.GetPathname nesteddir1 = forSourceType.searchPrefixes("nesteddir1");
|
||||||
NBPathsAPI.GetExtension getExtension = nesteddir1.name("nesteddir2/testcsv1");
|
NBPathsAPI.GetExtensions getExtensions = nesteddir1.pathname("nesteddir2/testcsv1");
|
||||||
NBPathsAPI.DoSearch forCsvExtension = getExtension.extension(".csv");
|
NBPathsAPI.DoSearch forCsvExtension = getExtensions.extensionSet(".csv");
|
||||||
Optional<Content<?>> testcsv1 = forCsvExtension.first();
|
Optional<Content<?>> testcsv1 = forCsvExtension.first();
|
||||||
|
|
||||||
assertThat(testcsv1).isNotPresent();
|
assertThat(testcsv1).isNotPresent();
|
||||||
@ -115,10 +123,10 @@ public class NBIOTest {
|
|||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testLoadCsv1Filesystem() {
|
public void testLoadCsv1Filesystem() {
|
||||||
NBPathsAPI.GetPrefix forSourceType = NBIO.fs();
|
NBPathsAPI.GetPrefixes forSourceType = NBIO.fs();
|
||||||
NBPathsAPI.GetName nesteddir1 = forSourceType.prefix("target/test-classes/nesteddir1");
|
NBPathsAPI.GetPathname nesteddir1 = forSourceType.searchPrefixes("target/test-classes/nesteddir1");
|
||||||
NBPathsAPI.GetExtension getExtension = nesteddir1.name("nesteddir2/testcsv1");
|
NBPathsAPI.GetExtensions getExtensions = nesteddir1.pathname("nesteddir2/testcsv1");
|
||||||
NBPathsAPI.DoSearch forCsvExtension = getExtension.extension(".csv");
|
NBPathsAPI.DoSearch forCsvExtension = getExtensions.extensionSet(".csv");
|
||||||
Optional<Content<?>> testcsv1 = forCsvExtension.first();
|
Optional<Content<?>> testcsv1 = forCsvExtension.first();
|
||||||
|
|
||||||
assertThat(testcsv1).isNotPresent();
|
assertThat(testcsv1).isNotPresent();
|
||||||
@ -130,7 +138,7 @@ public class NBIOTest {
|
|||||||
@Test
|
@Test
|
||||||
public void testClasspathTestResource() {
|
public void testClasspathTestResource() {
|
||||||
List<List<Content<?>>> optionals =
|
List<List<Content<?>>> optionals =
|
||||||
NBIO.classpath().name("nesteddir1/nesteddir2/testcsv12.csv").resolveEach();
|
NBIO.classpath().pathname("nesteddir1/nesteddir2/testcsv12.csv").resolveEach();
|
||||||
assertThat(optionals).hasSize(1);
|
assertThat(optionals).hasSize(1);
|
||||||
Content<?> content = optionals.get(0).get(0);
|
Content<?> content = optionals.get(0).get(0);
|
||||||
assertThat(content).isNotNull();
|
assertThat(content).isNotNull();
|
||||||
@ -139,9 +147,9 @@ public class NBIOTest {
|
|||||||
@Test
|
@Test
|
||||||
public void testPathSearchForExtension() {
|
public void testPathSearchForExtension() {
|
||||||
List<Content<?>> list = NBIO.classpath()
|
List<Content<?>> list = NBIO.classpath()
|
||||||
.prefix("nesteddir1")
|
.searchPrefixes("nesteddir1")
|
||||||
.name(".*.csv")
|
.pathname(".*.csv")
|
||||||
.extension("csv")
|
.extensionSet("csv")
|
||||||
.list();
|
.list();
|
||||||
assertThat(list).hasSize(2);
|
assertThat(list).hasSize(2);
|
||||||
}
|
}
|
||||||
@ -149,9 +157,9 @@ public class NBIOTest {
|
|||||||
@Test
|
@Test
|
||||||
public void testPathSearchForExtensionMissing() {
|
public void testPathSearchForExtensionMissing() {
|
||||||
List<Content<?>> list = NBIO.classpath()
|
List<Content<?>> list = NBIO.classpath()
|
||||||
.prefix("nesteddir1")
|
.searchPrefixes("nesteddir1")
|
||||||
.name(".*")
|
.pathname(".*")
|
||||||
.extension("csv")
|
.extensionSet("csv")
|
||||||
.list();
|
.list();
|
||||||
assertThat(list).hasSize(2);
|
assertThat(list).hasSize(2);
|
||||||
}
|
}
|
||||||
@ -159,9 +167,9 @@ public class NBIOTest {
|
|||||||
@Test
|
@Test
|
||||||
public void testPathSearchForMultipleExtensions() {
|
public void testPathSearchForMultipleExtensions() {
|
||||||
List<Content<?>> list = NBIO.classpath()
|
List<Content<?>> list = NBIO.classpath()
|
||||||
.prefix("nesteddir1")
|
.searchPrefixes("nesteddir1")
|
||||||
.name(".*")
|
.pathname(".*")
|
||||||
.extension("csv","txt")
|
.extensionSet("csv","txt")
|
||||||
.list();
|
.list();
|
||||||
assertThat(list).hasSize(3);
|
assertThat(list).hasSize(3);
|
||||||
}
|
}
|
||||||
@ -169,9 +177,9 @@ public class NBIOTest {
|
|||||||
@Test
|
@Test
|
||||||
public void testPathSearchForSuffix() {
|
public void testPathSearchForSuffix() {
|
||||||
List<Content<?>> list = NBIO.classpath()
|
List<Content<?>> list = NBIO.classpath()
|
||||||
.prefix("nesteddir1")
|
.searchPrefixes("nesteddir1")
|
||||||
.name("nesteddir2/testdata12")
|
.pathname("nesteddir2/testdata12")
|
||||||
.extension("txt")
|
.extensionSet("txt")
|
||||||
.list();
|
.list();
|
||||||
assertThat(list).hasSize(1);
|
assertThat(list).hasSize(1);
|
||||||
}
|
}
|
||||||
@ -179,8 +187,8 @@ public class NBIOTest {
|
|||||||
@Test
|
@Test
|
||||||
public void testPathSearchInDifferentVantagePoints() {
|
public void testPathSearchInDifferentVantagePoints() {
|
||||||
List<Path> list = NBIO.fs()
|
List<Path> list = NBIO.fs()
|
||||||
.prefix("target/test-classes/nesteddir1")
|
.searchPrefixes("target/test-classes/nesteddir1")
|
||||||
.extension("csv")
|
.extensionSet("csv")
|
||||||
.list().stream().map(Content::asPath)
|
.list().stream().map(Content::asPath)
|
||||||
.collect(Collectors.toList());
|
.collect(Collectors.toList());
|
||||||
|
|
||||||
@ -193,19 +201,19 @@ public class NBIOTest {
|
|||||||
@Test
|
@Test
|
||||||
public void testLoadNamedFileAsYmlExtension() {
|
public void testLoadNamedFileAsYmlExtension() {
|
||||||
List<Content<?>> list = NBIO.classpath()
|
List<Content<?>> list = NBIO.classpath()
|
||||||
.name("nesteddir1/nesteddir2/testworkload1")
|
.pathname("nesteddir1/nesteddir2/testworkload1")
|
||||||
.extension("yml")
|
.extensionSet("yml")
|
||||||
.list();
|
.list();
|
||||||
assertThat(list).hasSize(1);
|
assertThat(list).hasSize(1);
|
||||||
|
|
||||||
list = NBIO.classpath()
|
list = NBIO.classpath()
|
||||||
.name("nesteddir1/nesteddir2/testworkload1.yml")
|
.pathname("nesteddir1/nesteddir2/testworkload1.yml")
|
||||||
.list();
|
.list();
|
||||||
assertThat(list).hasSize(1);
|
assertThat(list).hasSize(1);
|
||||||
|
|
||||||
list = NBIO.classpath()
|
list = NBIO.classpath()
|
||||||
.name("nesteddir1/nesteddir2/testworkload1")
|
.pathname("nesteddir1/nesteddir2/testworkload1")
|
||||||
.extension("abc","yml")
|
.extensionSet("abc","yml")
|
||||||
.list();
|
.list();
|
||||||
assertThat(list).hasSize(1);
|
assertThat(list).hasSize(1);
|
||||||
}
|
}
|
||||||
@ -214,15 +222,15 @@ public class NBIOTest {
|
|||||||
public void testLoadAllFilesUnderPath() {
|
public void testLoadAllFilesUnderPath() {
|
||||||
List<Content<?>> list = null;
|
List<Content<?>> list = null;
|
||||||
|
|
||||||
list = NBIO.classpath().prefix("./").list();
|
list = NBIO.classpath().searchPrefixes("./").list();
|
||||||
logger.debug("found " + list.size() + " entries for path '.'");
|
logger.debug("found " + list.size() + " entries for path '.'");
|
||||||
assertThat(list).hasSizeGreaterThan(0);
|
assertThat(list).hasSizeGreaterThan(0);
|
||||||
|
|
||||||
list = NBIO.fs().prefix("./").list();
|
list = NBIO.fs().searchPrefixes("./").list();
|
||||||
logger.debug("found " + list.size() + " entries for path '.'");
|
logger.debug("found " + list.size() + " entries for path '.'");
|
||||||
assertThat(list).hasSizeGreaterThan(0);
|
assertThat(list).hasSizeGreaterThan(0);
|
||||||
|
|
||||||
list = NBIO.remote().prefix("./").list();
|
list = NBIO.remote().searchPrefixes("./").list();
|
||||||
logger.debug("found " + list.size() + " entries for path '.'");
|
logger.debug("found " + list.size() + " entries for path '.'");
|
||||||
assertThat(list).hasSize(0);
|
assertThat(list).hasSize(0);
|
||||||
}
|
}
|
||||||
@ -230,17 +238,17 @@ public class NBIOTest {
|
|||||||
@Test
|
@Test
|
||||||
public void test() {
|
public void test() {
|
||||||
List<Content<?>> list = NBIO.fs()
|
List<Content<?>> list = NBIO.fs()
|
||||||
.prefix(Paths.get("target/test-classes/").toString())
|
.searchPrefixes(Paths.get("target/test-classes/").toString())
|
||||||
.name("gamma.yaml").list();
|
.pathname("gamma.yaml").list();
|
||||||
assertThat(list).hasSize(1);
|
assertThat(list).hasSize(1);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testWildcardFilenameMatch() {
|
public void testWildcardFilenameMatch() {
|
||||||
NBPathsAPI.DoSearch gammasSearch = NBIO.all()
|
NBPathsAPI.DoSearch gammasSearch = NBIO.all()
|
||||||
.prefix(Paths.get("target/test-classes/").toString())
|
.searchPrefixes(Paths.get("target/test-classes/").toString())
|
||||||
.name(".*gamma")
|
.pathname(".*gamma")
|
||||||
.extension("yaml");
|
.extensionSet("yaml");
|
||||||
List<Content<?>> gammas = gammasSearch.list();
|
List<Content<?>> gammas = gammasSearch.list();
|
||||||
assertThat(gammas).hasSize(3);
|
assertThat(gammas).hasSize(3);
|
||||||
}
|
}
|
||||||
@ -249,33 +257,57 @@ public class NBIOTest {
|
|||||||
@Test
|
@Test
|
||||||
public void testSpecificFilenameMatch() {
|
public void testSpecificFilenameMatch() {
|
||||||
NBPathsAPI.DoSearch gammasSearch = NBIO.all()
|
NBPathsAPI.DoSearch gammasSearch = NBIO.all()
|
||||||
.prefix(Paths.get("target/test-classes/").toString())
|
.searchPrefixes(Paths.get("target/test-classes/").toString())
|
||||||
.name("gamma")
|
.pathname("gamma")
|
||||||
.extension("yaml");
|
.extensionSet("yaml");
|
||||||
List<Content<?>> gammas = gammasSearch.list();
|
List<Content<?>> gammas = gammasSearch.list();
|
||||||
assertThat(gammas).hasSize(1);
|
assertThat(gammas).hasSize(1);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void onlyMatchExtensionFilesWhenExtensionsProvided() {
|
public void matchOneWithoutTryingPrefixesFirst() {
|
||||||
|
Content<?> result = NBIO.all()
|
||||||
|
.searchPrefixes(
|
||||||
|
Paths.get("target/test-classes/nesteddir1/nesteddir2").toString()
|
||||||
|
)
|
||||||
|
.pathname("nesteddir1/alpha-gamma.yaml")
|
||||||
|
.one();
|
||||||
|
assertThat(result).isNotNull();
|
||||||
|
assertThat(result.getURI().toString()).matches(".*?[^1]/nesteddir1/alpha-gamma.yaml");
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void matchOneFallsThroughToPrefixesSecond() {
|
||||||
|
Content<?> result = NBIO.all()
|
||||||
|
.searchPrefixes(
|
||||||
|
Paths.get("target/test-classes/nesteddir1/nesteddir2").toString()
|
||||||
|
)
|
||||||
|
.pathname("alpha-gamma.yaml")
|
||||||
|
.one();
|
||||||
|
assertThat(result).isNotNull();
|
||||||
|
assertThat(result.getURI().toString()).matches(".*?nesteddir1/nesteddir2/alpha-gamma.yaml");
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void onlyMatchExtensionFilesWhenExtensionInCoset() {
|
||||||
|
|
||||||
// This search is invalid because by providing extensions, all results
|
// This search is invalid because by providing extensions, all results
|
||||||
// are required to match one of the extensions, thus the only valid
|
// are required to match one of the extensions, thus the only valid
|
||||||
// match here would be alpha-gamma.yaml.js
|
// match here would be alpha-gamma.yaml.js
|
||||||
NBPathsAPI.DoSearch invalidSearch = NBIO.all()
|
NBPathsAPI.DoSearch invalidSearch = NBIO.all()
|
||||||
.prefix(Paths.get("target/test-classes/").toString())
|
.searchPrefixes(Paths.get("target/test-classes/").toString())
|
||||||
.name("alpha-gamma.yaml")
|
.pathname("alpha-gamma.yaml")
|
||||||
.extension("js");
|
.extensionSet("js");
|
||||||
|
|
||||||
NBPathsAPI.DoSearch validSearch1 = NBIO.all()
|
NBPathsAPI.DoSearch validSearch1 = NBIO.all()
|
||||||
.prefix(Paths.get("target/test-classes/").toString())
|
.searchPrefixes(Paths.get("target/test-classes/").toString())
|
||||||
.name("alpha-gamma")
|
.pathname("alpha-gamma")
|
||||||
.extension("js");
|
.extensionSet("js");
|
||||||
|
|
||||||
NBPathsAPI.DoSearch validSearch2 = NBIO.all()
|
NBPathsAPI.DoSearch validSearch2 = NBIO.all()
|
||||||
.prefix(Paths.get("target/test-classes/").toString())
|
.searchPrefixes(Paths.get("target/test-classes/").toString())
|
||||||
.name("alpha-gamma.js")
|
.pathname("alpha-gamma.js")
|
||||||
.extension();
|
.extensionSet();
|
||||||
|
|
||||||
|
|
||||||
assertThat(invalidSearch.list()).hasSize(0);
|
assertThat(invalidSearch.list()).hasSize(0);
|
||||||
@ -292,12 +324,13 @@ public class NBIOTest {
|
|||||||
File tempFile = File.createTempFile(tmpdir.toString(), "testfile.csv");
|
File tempFile = File.createTempFile(tmpdir.toString(), "testfile.csv");
|
||||||
tempFile.deleteOnExit();
|
tempFile.deleteOnExit();
|
||||||
String fullpath = tempFile.getAbsolutePath();
|
String fullpath = tempFile.getAbsolutePath();
|
||||||
Files.write(Path.of(fullpath), "COL1,COL2\n\"val1\",\"val2\"\n".getBytes(StandardCharsets.UTF_8));
|
Files.writeString(Path.of(fullpath), "COL1,COL2\n\"val1\",\"val2\"\n");
|
||||||
List<Content<?>> results = NBIO.all().name(fullpath).list();
|
List<Content<?>> results = NBIO.all().pathname(fullpath).list();
|
||||||
assertThat(results.size()).isEqualTo(1);
|
assertThat(results.size()).isEqualTo(1);
|
||||||
} catch (IOException e) {
|
} catch (IOException e) {
|
||||||
throw new RuntimeException(e);
|
throw new RuntimeException(e);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
}
|
}
|
||||||
|
@ -28,7 +28,7 @@ public class BundledVersionManifest implements BundledMarkdownManifest {
|
|||||||
public DocsBinder getDocs() {
|
public DocsBinder getDocs() {
|
||||||
return new Docs()
|
return new Docs()
|
||||||
.namespace("versions")
|
.namespace("versions")
|
||||||
.addPath(NBIO.classpath().name("nb_version_info.md").one().asPath())
|
.addPath(NBIO.classpath().pathname("nb_version_info.md").one().asPath())
|
||||||
.asDocsBinder();
|
.asDocsBinder();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
Loading…
Reference in New Issue
Block a user