mirror of
https://github.com/nosqlbench/nosqlbench.git
synced 2024-11-23 09:16:37 -06:00
provide complete metadata on bundled markdown files
This commit is contained in:
parent
3e8d59d1b2
commit
37a0954fa8
@ -1,6 +1,6 @@
|
|||||||
package io.nosqlbench.engine.cli;
|
package io.nosqlbench.engine.cli;
|
||||||
|
|
||||||
import io.nosqlbench.docapi.BundledMarkdownExporter;
|
import io.nosqlbench.docexporter.BundledMarkdownExporter;
|
||||||
import io.nosqlbench.docsys.core.NBWebServerApp;
|
import io.nosqlbench.docsys.core.NBWebServerApp;
|
||||||
import io.nosqlbench.engine.api.activityapi.cyclelog.outputs.cyclelog.CycleLogDumperUtility;
|
import io.nosqlbench.engine.api.activityapi.cyclelog.outputs.cyclelog.CycleLogDumperUtility;
|
||||||
import io.nosqlbench.engine.api.activityapi.cyclelog.outputs.cyclelog.CycleLogImporterUtility;
|
import io.nosqlbench.engine.api.activityapi.cyclelog.outputs.cyclelog.CycleLogImporterUtility;
|
||||||
|
@ -44,6 +44,11 @@
|
|||||||
<version>0.62.2</version>
|
<version>0.62.2</version>
|
||||||
</dependency>
|
</dependency>
|
||||||
|
|
||||||
|
<dependency>
|
||||||
|
<groupId>org.yaml</groupId>
|
||||||
|
<artifactId>snakeyaml</artifactId>
|
||||||
|
</dependency>
|
||||||
|
|
||||||
|
|
||||||
<dependency>
|
<dependency>
|
||||||
<groupId>net.sf.jopt-simple</groupId>
|
<groupId>net.sf.jopt-simple</groupId>
|
||||||
|
@ -0,0 +1,33 @@
|
|||||||
|
package io.nosqlbench.docexporter;
|
||||||
|
|
||||||
|
import io.nosqlbench.nb.api.markdown.aggregator.MutableMarkdown;
|
||||||
|
|
||||||
|
import java.util.Locale;
|
||||||
|
|
||||||
|
public class BundledFrontmatterInjector implements BundledMarkdownProcessor {
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public MutableMarkdown apply(MutableMarkdown parsedMarkdown) {
|
||||||
|
if (parsedMarkdown.getFrontmatter().getWeight()<0) {
|
||||||
|
String title = parsedMarkdown.getFrontmatter().getTitle();
|
||||||
|
parsedMarkdown.getFrontmatter().setWeight(alphaWeightOf(title));
|
||||||
|
}
|
||||||
|
return parsedMarkdown;
|
||||||
|
}
|
||||||
|
|
||||||
|
private int alphaWeightOf(String name) {
|
||||||
|
name=name.toLowerCase(Locale.ROOT);
|
||||||
|
int sum=0;
|
||||||
|
int pow=26;
|
||||||
|
for (int i = 0; i < 6; i++) {
|
||||||
|
if (name.length()>i) {
|
||||||
|
int ord = name.charAt(i) - 'a';
|
||||||
|
double addend = Math.pow(pow, i) * ord;
|
||||||
|
sum += addend;
|
||||||
|
} else {
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return sum;
|
||||||
|
}
|
||||||
|
}
|
@ -0,0 +1,35 @@
|
|||||||
|
package io.nosqlbench.docexporter;
|
||||||
|
|
||||||
|
import joptsimple.OptionParser;
|
||||||
|
import joptsimple.OptionSet;
|
||||||
|
import joptsimple.OptionSpec;
|
||||||
|
|
||||||
|
import java.io.IOException;
|
||||||
|
import java.nio.file.Path;
|
||||||
|
import java.util.List;
|
||||||
|
|
||||||
|
public class BundledMarkdownExporter {
|
||||||
|
|
||||||
|
public static void main(String[] args) {
|
||||||
|
|
||||||
|
final OptionParser parser = new OptionParser();
|
||||||
|
|
||||||
|
OptionSpec<String> zipfileSpec = parser.accepts("zipfile", "zip file to write to")
|
||||||
|
.withOptionalArg().ofType(String.class).defaultsTo("exported_docs.zip");
|
||||||
|
|
||||||
|
OptionSpec<?> helpSpec = parser.acceptsAll(List.of("help", "h", "?"), "Display help").forHelp();
|
||||||
|
OptionSet options = parser.parse(args);
|
||||||
|
if (options.has(helpSpec)) {
|
||||||
|
try {
|
||||||
|
parser.printHelpOn(System.out);
|
||||||
|
} catch (IOException e) {
|
||||||
|
throw new RuntimeException("Unable to show help:" + e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
String zipfile = options.valueOf(zipfileSpec);
|
||||||
|
|
||||||
|
new BundledMarkdownZipExporter(new BundledFrontmatterInjector()).exportDocs(Path.of(zipfile));
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
@ -0,0 +1,10 @@
|
|||||||
|
package io.nosqlbench.docexporter;
|
||||||
|
|
||||||
|
import io.nosqlbench.nb.api.markdown.aggregator.MutableMarkdown;
|
||||||
|
|
||||||
|
import java.util.function.Function;
|
||||||
|
|
||||||
|
public interface BundledMarkdownProcessor extends Function<MutableMarkdown, MutableMarkdown> {
|
||||||
|
@Override
|
||||||
|
MutableMarkdown apply(MutableMarkdown parsedMarkdown);
|
||||||
|
}
|
@ -1,44 +1,33 @@
|
|||||||
package io.nosqlbench.docapi;
|
package io.nosqlbench.docexporter;
|
||||||
|
|
||||||
import joptsimple.OptionParser;
|
import io.nosqlbench.docapi.BundledMarkdownLoader;
|
||||||
import joptsimple.OptionSet;
|
import io.nosqlbench.docapi.DocsBinder;
|
||||||
import joptsimple.OptionSpec;
|
import io.nosqlbench.docapi.DocsNameSpace;
|
||||||
|
import io.nosqlbench.nb.api.markdown.aggregator.MutableMarkdown;
|
||||||
|
|
||||||
import java.io.File;
|
import java.io.File;
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.io.OutputStream;
|
import java.io.OutputStream;
|
||||||
|
import java.nio.charset.StandardCharsets;
|
||||||
import java.nio.file.DirectoryStream;
|
import java.nio.file.DirectoryStream;
|
||||||
import java.nio.file.Files;
|
import java.nio.file.Files;
|
||||||
import java.nio.file.Path;
|
import java.nio.file.Path;
|
||||||
import java.nio.file.StandardOpenOption;
|
import java.nio.file.StandardOpenOption;
|
||||||
import java.util.List;
|
import java.util.Locale;
|
||||||
|
import java.util.function.Function;
|
||||||
import java.util.zip.ZipEntry;
|
import java.util.zip.ZipEntry;
|
||||||
import java.util.zip.ZipOutputStream;
|
import java.util.zip.ZipOutputStream;
|
||||||
|
|
||||||
public class BundledMarkdownExporter {
|
public class BundledMarkdownZipExporter {
|
||||||
public static void main(String[] args) {
|
|
||||||
|
|
||||||
final OptionParser parser = new OptionParser();
|
private final BundledMarkdownProcessor[] filters;
|
||||||
|
private final Function<Path, MutableMarkdown> parser = MutableMarkdown::new;
|
||||||
|
|
||||||
OptionSpec<String> zipfileSpec = parser.accepts("zipfile", "zip file to write to")
|
public BundledMarkdownZipExporter(BundledMarkdownProcessor... filters) {
|
||||||
.withOptionalArg().ofType(String.class).defaultsTo("exported_docs.zip");
|
this.filters = filters;
|
||||||
|
|
||||||
OptionSpec<?> helpSpec = parser.acceptsAll(List.of("help", "h", "?"), "Display help").forHelp();
|
|
||||||
OptionSet options = parser.parse(args);
|
|
||||||
if (options.has(helpSpec)) {
|
|
||||||
try {
|
|
||||||
parser.printHelpOn(System.out);
|
|
||||||
} catch (IOException e) {
|
|
||||||
throw new RuntimeException("Unable to show help:" + e);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
String zipfile = options.valueOf(zipfileSpec);
|
|
||||||
|
|
||||||
new BundledMarkdownExporter().exportDocs(Path.of(zipfile));
|
|
||||||
}
|
}
|
||||||
|
|
||||||
private void exportDocs(Path out) {
|
public void exportDocs(Path out) {
|
||||||
ZipOutputStream zipstream;
|
ZipOutputStream zipstream;
|
||||||
try {
|
try {
|
||||||
OutputStream stream = Files.newOutputStream(out, StandardOpenOption.WRITE, StandardOpenOption.CREATE, StandardOpenOption.TRUNCATE_EXISTING);
|
OutputStream stream = Files.newOutputStream(out, StandardOpenOption.WRITE, StandardOpenOption.CREATE, StandardOpenOption.TRUNCATE_EXISTING);
|
||||||
@ -66,21 +55,29 @@ public class BundledMarkdownExporter {
|
|||||||
|
|
||||||
ZipEntry entry = new ZipEntry(name);
|
ZipEntry entry = new ZipEntry(name);
|
||||||
|
|
||||||
|
|
||||||
if (Files.isDirectory(p)) {
|
if (Files.isDirectory(p)) {
|
||||||
zos.putNextEntry(entry);
|
zos.putNextEntry(entry);
|
||||||
DirectoryStream<Path> stream = Files.newDirectoryStream(p);
|
DirectoryStream<Path> stream = Files.newDirectoryStream(p);
|
||||||
for (Path path : stream) {
|
for (Path path : stream) {
|
||||||
addEntry(path,r,zos);
|
addEntry(path,r,zos);
|
||||||
}
|
}
|
||||||
zos.closeEntry();
|
|
||||||
} else {
|
} else {
|
||||||
entry.setTime(Files.getLastModifiedTime(p).toMillis());
|
entry.setTime(Files.getLastModifiedTime(p).toMillis());
|
||||||
zos.putNextEntry(entry);
|
zos.putNextEntry(entry);
|
||||||
byte[] bytes = Files.readAllBytes(p);
|
|
||||||
zos.write(bytes);
|
if (p.toString().toLowerCase(Locale.ROOT).endsWith(".md")) {
|
||||||
zos.closeEntry();
|
MutableMarkdown parsed = parser.apply(p);
|
||||||
|
for (BundledMarkdownProcessor filter : this.filters) {
|
||||||
|
parsed = filter.apply(parsed);
|
||||||
|
}
|
||||||
|
zos.write(parsed.getComposedMarkdown().getBytes(StandardCharsets.UTF_8));
|
||||||
|
} else {
|
||||||
|
byte[] bytes = Files.readAllBytes(p);
|
||||||
|
zos.write(bytes);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
zos.closeEntry();
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
@ -0,0 +1,49 @@
|
|||||||
|
package io.nosqlbench.nb.api.markdown.aggregator;
|
||||||
|
|
||||||
|
import org.yaml.snakeyaml.DumperOptions;
|
||||||
|
import org.yaml.snakeyaml.Yaml;
|
||||||
|
|
||||||
|
import java.util.LinkedHashMap;
|
||||||
|
import java.util.List;
|
||||||
|
import java.util.Map;
|
||||||
|
import java.util.Optional;
|
||||||
|
|
||||||
|
public class MutableFrontMatter extends LinkedHashMap<String,List<String>> {
|
||||||
|
String WEIGHT = "weight";
|
||||||
|
String TITLE = "title";
|
||||||
|
|
||||||
|
MutableFrontMatter(Map<String,List<String>> data) {
|
||||||
|
this.putAll(data);
|
||||||
|
}
|
||||||
|
|
||||||
|
public String getTitle() {
|
||||||
|
assertMaxSingleValued(TITLE);
|
||||||
|
return Optional.ofNullable(get(TITLE)).map(l -> l.get(0)).orElse(null);
|
||||||
|
}
|
||||||
|
|
||||||
|
public int getWeight() {
|
||||||
|
assertMaxSingleValued(WEIGHT);
|
||||||
|
return Optional.ofNullable(get(WEIGHT)).map(l -> l.get(0)).map(Integer::parseInt).orElse(0);
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setTitle(String title) {
|
||||||
|
put(TITLE,List.of(title));
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setWeight(int weight) {
|
||||||
|
put(WEIGHT,List.of(String.valueOf(weight)));
|
||||||
|
}
|
||||||
|
|
||||||
|
private void assertMaxSingleValued(String fieldname) {
|
||||||
|
if (containsKey(fieldname) && get(fieldname).size()>1) {
|
||||||
|
throw new RuntimeException("Field '" + fieldname + "' can only have zero or one value. It is single-valued.");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public String asYaml() {
|
||||||
|
DumperOptions options = new DumperOptions();
|
||||||
|
options.setDefaultFlowStyle(DumperOptions.FlowStyle.BLOCK);
|
||||||
|
Yaml yaml = new Yaml(options);
|
||||||
|
return yaml.dump(Map.of(TITLE,getTitle(),WEIGHT,getWeight()));
|
||||||
|
}
|
||||||
|
}
|
@ -0,0 +1,117 @@
|
|||||||
|
package io.nosqlbench.nb.api.markdown.aggregator;
|
||||||
|
|
||||||
|
import com.vladsch.flexmark.ast.Heading;
|
||||||
|
import com.vladsch.flexmark.ast.WhiteSpace;
|
||||||
|
import com.vladsch.flexmark.ext.yaml.front.matter.AbstractYamlFrontMatterVisitor;
|
||||||
|
import com.vladsch.flexmark.ext.yaml.front.matter.YamlFrontMatterBlock;
|
||||||
|
import com.vladsch.flexmark.util.ast.BlankLine;
|
||||||
|
import com.vladsch.flexmark.util.ast.Document;
|
||||||
|
import com.vladsch.flexmark.util.ast.Node;
|
||||||
|
import io.nosqlbench.nb.api.markdown.FlexParser;
|
||||||
|
import org.apache.logging.log4j.LogManager;
|
||||||
|
import org.apache.logging.log4j.Logger;
|
||||||
|
|
||||||
|
import java.io.IOException;
|
||||||
|
import java.nio.file.Files;
|
||||||
|
import java.nio.file.Path;
|
||||||
|
import java.util.List;
|
||||||
|
import java.util.Map;
|
||||||
|
import java.util.Objects;
|
||||||
|
|
||||||
|
public class MutableMarkdown {
|
||||||
|
private final static Logger logger = LogManager.getLogger(MarkdownDocs.class);
|
||||||
|
|
||||||
|
private MutableFrontMatter frontMatter;
|
||||||
|
private final String rawMarkdown;
|
||||||
|
private final Path path;
|
||||||
|
private Heading firstHeading;
|
||||||
|
|
||||||
|
public MutableMarkdown(Path path) {
|
||||||
|
try {
|
||||||
|
this.path = path;
|
||||||
|
this.rawMarkdown = Files.readString(path);
|
||||||
|
parseStructure(rawMarkdown);
|
||||||
|
} catch (IOException e) {
|
||||||
|
throw new RuntimeException(e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private void parseStructure(String rawMarkdown) {
|
||||||
|
AbstractYamlFrontMatterVisitor v = new AbstractYamlFrontMatterVisitor();
|
||||||
|
Document parsed = FlexParser.parser.parse(rawMarkdown);
|
||||||
|
v.visit(parsed);
|
||||||
|
Map<String, List<String>> data = v.getData();
|
||||||
|
this.frontMatter = new MutableFrontMatter(data);
|
||||||
|
|
||||||
|
if (frontMatter.getTitle()==null || frontMatter.getTitle().isEmpty()) {
|
||||||
|
Node node = parsed.getFirstChild();
|
||||||
|
while (node!=null) {
|
||||||
|
if (node instanceof Heading) {
|
||||||
|
this.frontMatter.setTitle(((Heading) node).getText().toString());
|
||||||
|
break;
|
||||||
|
} else if (node instanceof BlankLine) {
|
||||||
|
} else if (node instanceof WhiteSpace) {
|
||||||
|
} else if (node instanceof YamlFrontMatterBlock) {
|
||||||
|
} else {
|
||||||
|
throw new RuntimeException("The markdown file at '" + this.path.toString() + "' must have an initial heading as a title, before any other element, but found:" + node.getClass().getSimpleName());
|
||||||
|
}
|
||||||
|
node=node.getNext();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (frontMatter.getTitle()==null || frontMatter.getTitle().isEmpty()) {
|
||||||
|
throw new RuntimeException("The markdown file at '" + this.path.toString() + "' has no heading to use as a title.");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public Path getPath() {
|
||||||
|
return path;
|
||||||
|
}
|
||||||
|
|
||||||
|
public String getBody() {
|
||||||
|
for (String boundary : List.of("---\n", "+++\n")) {
|
||||||
|
if (rawMarkdown.startsWith(boundary)) {
|
||||||
|
int end = rawMarkdown.indexOf(boundary, 3);
|
||||||
|
if (end>=0) {
|
||||||
|
return rawMarkdown.substring(end+4);
|
||||||
|
} else {
|
||||||
|
throw new RuntimeException("Unable to find matching boundaries in " + path.toString() + ": " + boundary);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return rawMarkdown;
|
||||||
|
}
|
||||||
|
|
||||||
|
public MutableFrontMatter getFrontmatter() {
|
||||||
|
return frontMatter;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public String toString() {
|
||||||
|
return getClass().getSimpleName() + "/" +
|
||||||
|
frontMatter.toString();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public boolean equals(Object o) {
|
||||||
|
if (this == o) return true;
|
||||||
|
if (o == null || getClass() != o.getClass()) return false;
|
||||||
|
MutableMarkdown that = (MutableMarkdown) o;
|
||||||
|
return Objects.equals(frontMatter, that.frontMatter) &&
|
||||||
|
Objects.equals(rawMarkdown, that.rawMarkdown);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public int hashCode() {
|
||||||
|
return Objects.hash(frontMatter, rawMarkdown);
|
||||||
|
}
|
||||||
|
|
||||||
|
public String getComposedMarkdown() {
|
||||||
|
StringBuilder sb = new StringBuilder();
|
||||||
|
sb.append("---\n");
|
||||||
|
sb.append(frontMatter.asYaml());
|
||||||
|
sb.append("---\n");
|
||||||
|
|
||||||
|
sb.append(getBody());
|
||||||
|
return sb.toString();
|
||||||
|
}
|
||||||
|
}
|
@ -1,5 +1,6 @@
|
|||||||
package io.nosqlbench.nb.api.markdown.aggregator;
|
package io.nosqlbench.nb.api.markdown.aggregator;
|
||||||
|
|
||||||
|
import io.nosqlbench.nb.api.markdown.types.BasicFrontMatterInfo;
|
||||||
import io.nosqlbench.nb.api.markdown.types.DocScope;
|
import io.nosqlbench.nb.api.markdown.types.DocScope;
|
||||||
import io.nosqlbench.nb.api.markdown.types.FrontMatterInfo;
|
import io.nosqlbench.nb.api.markdown.types.FrontMatterInfo;
|
||||||
import org.apache.logging.log4j.LogManager;
|
import org.apache.logging.log4j.LogManager;
|
||||||
@ -14,34 +15,40 @@ public class ParsedFrontMatter implements FrontMatterInfo {
|
|||||||
|
|
||||||
private final static Logger logger = LogManager.getLogger(ParsedFrontMatter.class);
|
private final static Logger logger = LogManager.getLogger(ParsedFrontMatter.class);
|
||||||
|
|
||||||
private final Map<String, List<String>> data;
|
private final Map<String, List<String>> data = new LinkedHashMap<>();
|
||||||
|
|
||||||
public ParsedFrontMatter(Map<String, List<String>> data) {
|
public ParsedFrontMatter(Map<String, List<String>> data) {
|
||||||
this.data = data;
|
this.data.putAll(data);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public String getTitle() {
|
public String getTitle() {
|
||||||
List<String> titles = data.get(FrontMatterInfo.TITLE);
|
List<String> titles = data.get(BasicFrontMatterInfo.TITLE);
|
||||||
if (titles==null) {
|
if (titles==null) {
|
||||||
return "";
|
return "";
|
||||||
}
|
}
|
||||||
if (titles.size()!=1) {
|
if (titles.size()>1) {
|
||||||
throw new InvalidParameterException(FrontMatterInfo.TITLE + " can only contain a single value.");
|
throw new InvalidParameterException(BasicFrontMatterInfo.TITLE + " can only contain a single value.");
|
||||||
}
|
}
|
||||||
return titles.get(0);
|
if (titles.size()==1) {
|
||||||
|
return titles.get(0);
|
||||||
|
}
|
||||||
|
return "";
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public int getWeight() {
|
public int getWeight() {
|
||||||
List<String> weights = data.get(FrontMatterInfo.WEIGHT);
|
List<String> weights = data.get(BasicFrontMatterInfo.WEIGHT);
|
||||||
if (weights==null) {
|
if (weights==null) {
|
||||||
return 0;
|
return 0;
|
||||||
}
|
}
|
||||||
if (weights.size()!=1) {
|
if (weights.size()>1) {
|
||||||
throw new InvalidParameterException(FrontMatterInfo.WEIGHT + " can only contain a single value.");
|
throw new InvalidParameterException(BasicFrontMatterInfo.WEIGHT + " can only contain a single value.");
|
||||||
}
|
}
|
||||||
return Integer.parseInt(weights.get(0));
|
if (weights.size()==1) {
|
||||||
|
return Integer.parseInt(weights.get(0));
|
||||||
|
}
|
||||||
|
return 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
@ -93,22 +100,21 @@ public class ParsedFrontMatter implements FrontMatterInfo {
|
|||||||
return scopeNames.stream().map(DocScope::valueOf).collect(Collectors.toSet());
|
return scopeNames.stream().map(DocScope::valueOf).collect(Collectors.toSet());
|
||||||
}
|
}
|
||||||
|
|
||||||
public List<String> getDiagnostics() {
|
@Override
|
||||||
List<String> warnings = new ArrayList<>();
|
public List<String> getDiagnostics(List<String> buffer) {
|
||||||
for (String propname : data.keySet()) {
|
for (String propname : data.keySet()) {
|
||||||
if (!FrontMatterInfo.FrontMatterKeyWords.contains(propname)) {
|
if (!FrontMatterInfo.FrontMatterKeyWords.contains(propname)) {
|
||||||
warnings.add("unrecognized frontm atter property " + propname);
|
buffer.add("unrecognized frontmatter property " + propname);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return warnings;
|
return buffer;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public List<String> getDiagnostics() {
|
||||||
public void setTopics(Set<String> newTopics) {
|
return getDiagnostics(new ArrayList<>());
|
||||||
// TODO: allow functional version of this
|
|
||||||
// this.data.put(FrontMatterInfo.TOPICS,newTopics);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
public ParsedFrontMatter withTopics(List<String> assigning) {
|
public ParsedFrontMatter withTopics(List<String> assigning) {
|
||||||
HashMap<String, List<String>> newmap = new HashMap<>();
|
HashMap<String, List<String>> newmap = new HashMap<>();
|
||||||
newmap.putAll(this.data);
|
newmap.putAll(this.data);
|
||||||
@ -142,4 +148,13 @@ public class ParsedFrontMatter implements FrontMatterInfo {
|
|||||||
public int hashCode() {
|
public int hashCode() {
|
||||||
return Objects.hash(data);
|
return Objects.hash(data);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public void setTitle(String title) {
|
||||||
|
this.data.put(FrontMatterInfo.TITLE,List.of(title));
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setWeight(int weight) {
|
||||||
|
data.put(FrontMatterInfo.WEIGHT,List.of(String.valueOf(weight)));
|
||||||
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
@ -0,0 +1,17 @@
|
|||||||
|
package io.nosqlbench.nb.api.markdown.types;
|
||||||
|
|
||||||
|
public interface BasicFrontMatterInfo {
|
||||||
|
|
||||||
|
String WEIGHT = "weight";
|
||||||
|
String TITLE = "title";
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @return A title for the given markdown source file.
|
||||||
|
*/
|
||||||
|
String getTitle();
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @return A weight for the given markdown source file.
|
||||||
|
*/
|
||||||
|
int getWeight();
|
||||||
|
}
|
@ -9,13 +9,11 @@ import java.util.regex.Pattern;
|
|||||||
* If the markdown source file does not contain the metadata requested, then reasonable non-null
|
* If the markdown source file does not contain the metadata requested, then reasonable non-null
|
||||||
* defaults must be provided.
|
* defaults must be provided.
|
||||||
*/
|
*/
|
||||||
public interface FrontMatterInfo {
|
public interface FrontMatterInfo extends BasicFrontMatterInfo, HasDiagnostics {
|
||||||
|
|
||||||
String SCOPES = "scopes";
|
String SCOPES = "scopes";
|
||||||
String AGGREGATE = "aggregate";
|
String AGGREGATE = "aggregate";
|
||||||
String TOPICS = "topics";
|
String TOPICS = "topics";
|
||||||
String WEIGHT = "weight";
|
|
||||||
String TITLE = "title";
|
|
||||||
String INCLUDED = "included";
|
String INCLUDED = "included";
|
||||||
|
|
||||||
Set<String> FrontMatterKeyWords =
|
Set<String> FrontMatterKeyWords =
|
||||||
|
@ -1,7 +1,5 @@
|
|||||||
package io.nosqlbench.nb.api.markdown.types;
|
package io.nosqlbench.nb.api.markdown.types;
|
||||||
|
|
||||||
import io.nosqlbench.nb.api.markdown.aggregator.CompositeMarkdownInfo;
|
|
||||||
import io.nosqlbench.nb.api.markdown.types.FrontMatterInfo;
|
|
||||||
import org.jetbrains.annotations.NotNull;
|
import org.jetbrains.annotations.NotNull;
|
||||||
|
|
||||||
import java.nio.file.Path;
|
import java.nio.file.Path;
|
||||||
|
@ -1,5 +1,6 @@
|
|||||||
package io.nosqlbench.docapi;
|
package io.nosqlbench.docapi;
|
||||||
|
|
||||||
|
import io.nosqlbench.docexporter.BundledMarkdownExporter;
|
||||||
import org.junit.jupiter.api.Test;
|
import org.junit.jupiter.api.Test;
|
||||||
|
|
||||||
public class BundledMarkdownExporterTest {
|
public class BundledMarkdownExporterTest {
|
||||||
|
@ -1,3 +1,9 @@
|
|||||||
|
---
|
||||||
|
RandomFrontMatter1: value
|
||||||
|
---
|
||||||
|
|
||||||
|
# Heading
|
||||||
|
|
||||||
## Basic Markdown File
|
## Basic Markdown File
|
||||||
|
|
||||||
- item 1.
|
- item 1.
|
||||||
|
Loading…
Reference in New Issue
Block a user