[SolutionArray] Rework SolutionArray and Storage

This is a squashed commit implementing the following:
- Make data shareable and sliceable
- Order SolutionArray extra entries
- Switch extra to AnyValue
- Use AnyValue in addExtra
- Set/get extra entries as AnyMap
- Set/get components as AnyValue
- Refine extra component handling
- Add shape information in C++
- Add additional HDF storage modes
- Access entries as 'loc' rather than 'index' (This change of
  nomenclature is inspired by pandas, where the index refers to the
  indexing column, while location refers to the row.
- Update HDF subfolder logic
- Address edge cases in HDF Storage wrapper
- Fix edge cases in SolutionArray
This commit is contained in:
Ingmar Schoegl 2023-01-20 21:53:45 -06:00 committed by Ray Speth
parent a177366c29
commit 6d1835c722
8 changed files with 1545 additions and 517 deletions

View File

@ -18,8 +18,8 @@ class ThermoPhase;
/*!
* A container class providing a convenient interface for representing many
* thermodynamic states using the same Solution object. C++ SolutionArray objects are
* one-dimensional by design; extensions to multi-dimensional arrays need to be
* implemented in high-level API's.
* one-dimensional by design; while shape information for multi-dimensional arrays is
* stored, reshaping operations need to be implemented in high-level API's.
*
* @since New in Cantera 3.0.
* @warning This class is an experimental part of the %Cantera API and may be
@ -32,6 +32,9 @@ private:
size_t size,
const AnyMap& meta);
SolutionArray(const shared_ptr<SolutionArray>& arr,
const vector<int>& indices);
public:
virtual ~SolutionArray() {}
@ -49,7 +52,21 @@ public:
return shared_ptr<SolutionArray>(new SolutionArray(sol, size, meta));
}
//! Reset SolutionArray to current Solution state
/*!
* Share locations from an existing SolutionArray and return new reference.
*
* Both SolutionArray object share common data. The method is used for slicing
* of SolutionArrays from high-level API's. Note that meta data are not inherited.
* @param other SolutionArray object containing shared data
* @param selected List of locations for shared entries
*/
static shared_ptr<SolutionArray> share(const shared_ptr<SolutionArray>& other,
const vector<int>& selected)
{
return shared_ptr<SolutionArray>(new SolutionArray(other, selected));
}
//! Reset all entries of the SolutionArray to the current Solution state
void reset();
//! Size of SolutionArray (number of entries)
@ -57,9 +74,23 @@ public:
return m_size;
}
//! Resize SolutionArray
//! Resize SolutionArray objects with a single dimension (default).
void resize(size_t size);
//! SolutionArray shape information used by high-level API's.
vector<long int> apiShape() const {
return m_apiShape;
}
//! Set SolutionArray shape information used by high-level API's.
//! The size of the SolutionArray is adjusted automatically.
void setApiShape(const vector<long int>& shape);
//! Number of SolutionArray dimensions used by high-level API's.
size_t apiNdim() const {
return m_apiShape.size();
}
//! SolutionArray meta data.
AnyMap& meta() {
return m_meta;
@ -70,130 +101,168 @@ public:
m_meta = meta;
}
//! Retrieve associated Solution object
shared_ptr<Solution> solution() {
return m_sol;
}
//! Retrieve associated ThermoPhase object
shared_ptr<ThermoPhase> thermo();
//! Retrieve list of component names
std::vector<std::string> components() const;
vector<string> componentNames() const;
//! Add auxiliary component to SolutionArray and initialize to default value
void addComponent(const std::string& name, double value=0.);
//! Check whether SolutionArray contains a component (property defining state or
//! auxiliary variable)
bool hasComponent(const string& name) const;
/*!
* Check whether SolutionArray contains a component (property defining state or
* auxiliary variable)
* Retrieve a component of the SolutionArray by name.
* Returns an AnyValue containing an array with length size() with a type
* specific to the component; in most cases, the type is double, but may differ
* for auxiliary data.
*/
bool hasComponent(const std::string& name) const;
//! Retrieve a component of the SolutionArray by name
vector_fp getComponent(const std::string& name) const;
AnyValue getComponent(const string& name) const;
/*!
* Set a component of the SolutionArray by name.
* The passed AnyValue should containing an array with length size() with a type
* specific to the component; in most cases, the type is double, but may differ
* for auxiliary data.
*
* @param name Name of component (property defining state or auxiliary variable)
* @param name Name of component (property defining auxiliary variable)
* @param data Component data
* @param force If true, add new component to SolutionArray
*/
void setComponent(const std::string& name, const vector_fp& data, bool force=false);
void setComponent(const string& name, const AnyValue& data);
/*!
* Update the buffered index used to access entries.
* Update the buffered location used to access SolutionArray entries.
*/
void setIndex(size_t index, bool restore=true);
void setLoc(size_t loc, bool restore=true);
//! Retrieve the state vector for a single entry.
vector_fp getState(size_t index);
/*!
* Update state at given location to state of associated Solution object.
*/
void updateState(size_t loc);
//! Set the state vector for a single entry
void setState(size_t index, const vector_fp& data);
//! Retrieve the state vector for a given location.
vector<double> getState(size_t loc);
//! Retrieve auxiliary data for a single entry.
std::map<std::string, double> getExtra(size_t index);
//! Set the state vector for a given location.
void setState(size_t loc, const vector<double>& state);
//! Set auxiliary data for a single entry.
void setAuxiliary(size_t index, std::map<std::string, double> data);
/*!
* Add auxiliary component to SolutionArray. Initialization requires a subsequent
* call of setComponent.
*
* @param name Name of component (property defining auxiliary variable)
* @param back If true (default), add name after components representing the
* state, otherwise add to front of list. Front and back components are
* populated left to right.
*/
void addExtra(const string& name, bool back=true);
//! Check whether SolutionArray contains an extra component
bool hasExtra(const string& name) const {
return m_extra->count(name);
}
//! Retrieve list of extra component names
vector<string> listExtra(bool all=true) const;
//! Retrieve auxiliary data for a given location.
AnyMap getAuxiliary(size_t loc);
//! Set auxiliary data for a given location.
void setAuxiliary(size_t loc, const AnyMap& data);
//! Append location entry at end of SolutionArray.
void append(const vector<double>& state, const AnyMap& extra);
/*!
* Write header data to container file.
*
* @param fname Name of HDF container file
* @param id Identifier of SolutionArray within the container file
* @param id Identifier of root location within the container file
* @param desc Description
*/
static void writeHeader(const std::string& fname, const std::string& id,
const std::string& desc);
static void writeHeader(const string& fname, const string& id, const string& desc);
/*!
* Write header data to AnyMap.
*
* @param root Root node of AnyMap structure
* @param id Identifier of SolutionArray node within AnyMap structure
* @param id Identifier of root location within the container file
* @param desc Description
*/
static void writeHeader(AnyMap& root, const std::string& id,
const std::string& desc);
static void writeHeader(AnyMap& root, const string& id, const string& desc);
/*!
* Write SolutionArray data to container file.
*
* @param fname Name of HDF container file
* @param id Identifier of SolutionArray within the container file
* @param id Identifier of root location within the container file
* @param sub Name identifier for the subgroup holding actual data
* @param compression Compression level; optional (default=0; HDF only)
*/
void writeEntry(const std::string& fname, const std::string& id,
int compression=0);
void writeEntry(const string& fname, const string& id,
const string& sub, int compression=0);
/*!
* Write SolutionArray data to AnyMap.
*
* @param root Root node of AnyMap structure
* @param id Identifier of SolutionArray node within AnyMap structure
* @param id Identifier of root location within the container file
* @param sub Name identifier for the subgroup holding actual data
*/
void writeEntry(AnyMap& root, const std::string& id);
void writeEntry(AnyMap& root, const string& id, const string& sub);
/*!
* Save current SolutionArray and header to a container file.
*
* @param fname Name of output container file (YAML or HDF)
* @param id Identifier of SolutionArray within the container file
* @param desc Description
* @param id Identifier of root location within the container file
* @param sub Name identifier for the subgroup holding actual data
* @param desc Custom comment describing the dataset to be stored
* @param compression Compression level; optional (default=0; HDF only)
*/
void save(const std::string& fname, const std::string& id,
const std::string& desc, int compression=0);
string save(const string& fname, const string& id,
const string& sub,
const string& desc, int compression=0);
/*!
* Read header data from container file.
*
* @param fname Name of HDF container file
* @param id Identifier of SolutionArray within the file structure
* @param id Identifier of root location within the container file
*/
static AnyMap readHeader(const std::string& fname, const std::string& id);
static AnyMap readHeader(const string& fname, const string& id);
/*!
* Read header data from AnyMap.
*
* @param root Root node of AnyMap structure
* @param id Identifier of SolutionArray node within AnyMap structure
* @param id Identifier of root location within the container file
*/
static AnyMap readHeader(const AnyMap& root, const std::string& id);
static AnyMap readHeader(const AnyMap& root, const string& id);
/*!
* Restore SolutionArray entry from a container file.
*
* @param fname Name of HDF container file
* @param id Identifier of SolutionArray within the file structure
* @param id Identifier of root location within the container file
* @param sub Name of the subgroup holding actual data
*/
void readEntry(const std::string& fname, const std::string& id);
void readEntry(const string& fname, const string& id, const string& sub);
/*!
* Restore SolutionArray entry from AnyMap.
*
* @param root Root node of AnyMap structure
* @param id Identifier of SolutionArray node within AnyMap structure
* @param id Identifier of root location within the container file
* @param sub Name of the subgroup holding actual data
*/
void readEntry(const AnyMap& root, const std::string& id);
void readEntry(const AnyMap& root, const string& id, const string& sub);
/*!
* Restore SolutionArray entry and header from a container file.
@ -201,27 +270,67 @@ public:
* @param fname Name of container file (YAML or HDF)
* @param id Identifier of SolutionArray within the container file
*/
AnyMap restore(const std::string& fname, const std::string& id);
AnyMap restore(const string& fname, const string& id, const string& sub);
protected:
//! Service function used to resize SolutionArray
void _resize(size_t size);
/*!
* Initialize extra SolutionArray component
*
* @param name Name of component (property defining auxiliary variable)
* @param value Default value; used to determine type of component
*/
void _initExtra(const string& name, const AnyValue& value);
/*!
* Resize extra SolutionArray component
*
* @param name Name of component (property defining auxiliary variable)
* @param value Default value
*/
void _resizeExtra(const string& name, const AnyValue& value=AnyValue());
/*!
* Set extra SolutionArray component
*
* @param name Name of component (property defining auxiliary variable)
* @param data Value to be set
*/
void _setExtra(const string& name, const AnyValue& data=AnyValue());
/*!
* Identify storage mode of state data (combination of properties defining state);
* valid modes include Phase::nativeState ("native") or other property combinations
* defined by Phase::fullStates (three-letter acronyms, for example "TDY", "TPX").
*/
std::string detectMode(const std::set<std::string>& names, bool native=true);
string _detectMode(const set<string>& names, bool native=true);
//! Retrieve set containing list of properties defining state
std::set<std::string> stateProperties(const std::string& mode, bool alias=false);
set<string> _stateProperties(const string& mode, bool alias=false);
shared_ptr<Solution> m_sol; //!< Solution object associated with state data
size_t m_size; //!< Number of entries in SolutionArray
size_t m_dataSize; //!< Total size of unsliced data
size_t m_stride; //!< Stride between SolutionArray entries
AnyMap m_meta; //!< Metadata
size_t m_index = npos; //!< Buffered index
size_t m_loc = npos; //!< Buffered location within data vector
vector<long int> m_apiShape; //!< Shape information used by high-level API's
vector_fp m_data; //!< Work vector holding states
std::map<std::string, vector_fp> m_extra; //!< Auxiliary data
shared_ptr<vector<double>> m_data; //!< Work vector holding states
//! Auxiliary (extra) components; size of first dimension has to match m_dataSize
shared_ptr<map<string, AnyValue>> m_extra;
//! Mapping of auxiliary component names, where the index is used as the
//! mapping key. Names with index >= zero are listed before state components, while
//! names with index < zero are added at end. The name with the most negative index
//! corresponds to the last entry (different from Python index convention).
shared_ptr<map<int, string>> m_order;
bool m_shared = false; //!< True if data are shared from another object
vector<int> m_active; //!< Vector of locations referencing active entries
};
}

View File

@ -28,7 +28,8 @@ namespace Cantera
{
/*!
* A wrapper class handling storage to HDF; acts as a thin wrapper for HighFive
* A wrapper class handling storage to HDF; acts as a thin wrapper for HighFive.
* The class implements methods that are intended to be called from SolutionArray.
*
* @since New in Cantera 3.0.
* @warning This class is an experimental part of the %Cantera API and may be
@ -37,84 +38,81 @@ namespace Cantera
class Storage
{
public:
Storage(std::string fname, bool write);
Storage(string fname, bool write);
~Storage();
//! Set compression level (0..9)
/*!
* Compression is only applied to species data; note that compression may increase
* file size for small data sets (compression requires setting of chunk sizes,
* which involves considerable overhead for metadata).
*/
//!
//! Compression is only applied to matrix-type data; note that compression may
//! increase file size for small data sets (compression requires setting of chunk
//! sizes, which involves considerable overhead for metadata).
void setCompressionLevel(int level);
//! Check whether path location exists
//! If the file has write access, create location if necessary
//! Check whether location `id` represents a group
bool hasGroup(const string& id) const;
//! Check whether path location exists.
//! If the file has write access, create location; otherwise exceptions are thrown.
//! @param id storage location within file
bool checkGroup(const std::string& id);
//! @param permissive if true, do not raise exceptions
bool checkGroup(const string& id, bool permissive=false);
//! Retrieve contents of file from a specified location
//! @param id storage location within file
//! @returns pair containing size and list of entry names of stored data set
std::pair<size_t, std::set<std::string>> contents(const std::string& id) const;
pair<size_t, set<string>> contents(const string& id) const;
//! Read attributes from a specified location
//! @param id storage location within file
//! @param attr name of attribute to be checked
bool hasAttribute(const std::string& id, const std::string& attr) const;
bool hasAttribute(const string& id, const string& attr) const;
//! Read attributes from a specified location
//! @param id storage location within file
//! @param recursive boolean indicating whether subgroups should be included
//! @returns AnyMap containing attributes
AnyMap readAttributes(const std::string& id, bool recursive) const;
AnyMap readAttributes(const string& id, bool recursive) const;
//! Write attributes to a specified location
//! @param id storage location within file
//! @param meta AnyMap containing attributes
void writeAttributes(const std::string& id, const AnyMap& meta);
void writeAttributes(const string& id, const AnyMap& meta);
//! Read data vector from a specified location
//! Read dataset from a specified location
//! @param id storage location within file
//! @param name name of data vector entry
//! @param size size of data vector entry
//! @returns data vector
vector_fp readVector(const std::string& id,
const std::string& name, size_t size) const;
//! @param name name of vector/matrix entry
//! @param rows number of vector length or matrix rows
//! @param cols number of matrix columns, if applicable; if 0, a vector is
//! expected, if npos, the size is detected automatically; otherwise, an exact
//! number of columns needs to be matched.
//! @returns matrix or vector containing data; implemented for types
//! `vector<double>`, `vector<long int>`, `vector<string>`,
//! `vector<vector<double>>`, `vector<vector<long int>>` and
//! `vector<vector<string>>`
AnyValue readData(const string& id,
const string& name, size_t rows, size_t cols=npos) const;
//! Write data vector to a specified location
//! @param id storage location within file
//! @param name name of data vector entry
//! @param data data vector
void writeVector(const std::string& id,
const std::string& name, const vector_fp& data);
//! Read matrix from a specified location
//! Write dataset to a specified location
//! @param id storage location within file
//! @param name name of matrix entry
//! @param rows number of matrix rows
//! @param cols number of matrix columns
//! @returns matrix containing data (vector of vectors)
std::vector<vector_fp> readMatrix(const std::string& id,
const std::string& name,
size_t rows, size_t cols) const;
//! Write matrix to a specified location
//! @param id storage location within file
//! @param name name of matrix entry
//! @param data matrix containing data (vector of vectors)
void writeMatrix(const std::string& id,
const std::string& name, const std::vector<vector_fp>& data);
//! @param data vector or matrix containing data; implemented for types
//! `vector<double>`, `vector<long int>`, `vector<string>`
//! `vector<vector<double>>`, `vector<vector<long int>>` and
//! `vector<vector<string>>`
void writeData(const string& id, const string& name, const AnyValue& data);
private:
#if CT_USE_HDF5
bool checkGroupRead(const std::string& id) const;
bool checkGroupWrite(const std::string& id);
//! ensure that HDF group is readable
bool checkGroupRead(const string& id) const;
std::unique_ptr<HighFive::File> m_file;
bool m_write;
int m_compressionLevel=0;
//! ensure that HDF group is writeable
bool checkGroupWrite(const string& id, bool permissive);
std::unique_ptr<HighFive::File> m_file; //!< HDF container file
bool m_write; //!< HDF access mode
int m_compressionLevel=0; //!< HDF compression level
#endif
};

File diff suppressed because it is too large Load Diff

View File

@ -48,7 +48,7 @@ namespace Cantera
#if CT_USE_HDF5
Storage::Storage(std::string fname, bool write) : m_write(write)
Storage::Storage(string fname, bool write) : m_write(write)
{
if (m_write) {
m_file = make_unique<h5::File>(fname, h5::File::OpenOrCreate);
@ -66,59 +66,103 @@ void Storage::setCompressionLevel(int level)
{
if (level < 0 || level > 9) {
throw CanteraError("Storage::setCompressionLevel",
"Invalid compression level '{}' (needs to be 0..9).", level);
"Invalid compression level '{}' (needs to be 0..9).", level);
}
m_compressionLevel = level;
}
bool Storage::checkGroupRead(const std::string& id) const
bool Storage::hasGroup(const string& id) const
{
std::vector<std::string> tokens;
if (!m_file->exist(id)) {
return false;
}
if (m_file->getObjectType(id) != h5::ObjectType::Group) {
return false;
}
return true;
}
bool Storage::checkGroupRead(const string& id) const
{
vector<string> tokens;
tokenizePath(id, tokens);
std::string grp = tokens[0];
if (!m_file->exist(grp) || m_file->getObjectType(grp) != h5::ObjectType::Group) {
string grp = tokens[0];
if (!hasGroup(grp)) {
throw CanteraError("Storage::checkGroupRead",
"No group with id '{}' found", grp);
"No group with id '{}' found at root.", grp);
}
std::string path = grp;
string path = grp;
h5::Group sub = m_file->getGroup(grp);
tokens.erase(tokens.begin());
for (auto& grp : tokens) {
path += "/" + grp;
if (!sub.exist(grp) || sub.getObjectType(grp) != h5::ObjectType::Group) {
if (!hasGroup(path + "/" + grp)) {
throw CanteraError("Storage::checkGroupRead",
"No group with id '{}' found", path);
"No group with id '{}' found at '{}'.", grp, path);
}
path += "/" + grp;
sub = sub.getGroup(grp);
}
return true;
}
bool Storage::checkGroupWrite(const std::string& id)
bool Storage::checkGroupWrite(const string& id, bool permissive)
{
if (!m_write) {
throw CanteraError("Storage::checkGroupWrite",
"Cannot write to file opened in read mode.");
}
if (id == "") {
throw CanteraError("Storage::checkGroupWrite",
"Cannot write to empty group id '' (root location).");
}
if (!m_file->exist(id)) {
if (!permissive) {
throw CanteraError("Storage::checkGroupWrite",
"Specified group with id '{}' does not exist.", id);
}
m_file->createGroup(id);
return true;
}
if (m_file->getObjectType(id) != h5::ObjectType::Group) {
throw CanteraError("Storage::checkGroupWrite",
"Invalid object with id '{}' exists", id);
"Unable to write to existing object with id '{}'.", id);
}
return true;
}
bool Storage::checkGroup(const std::string& id) {
if (m_write) {
return checkGroupWrite(id);
bool Storage::checkGroup(const string& id, bool permissive)
{
try {
if (m_write) {
return checkGroupWrite(id, permissive);
}
return checkGroupRead(id);
} catch (const CanteraError& err) {
if (permissive) {
return false;
}
throw CanteraError("Storage::checkGroup", err.getMessage());
} catch (const std::exception& err) {
if (permissive) {
return false;
}
// convert HighFive exception
throw CanteraError("Storage::checkGroup",
"Encountered exception for group '{}':\n{}", id, err.what());
}
return checkGroupRead(id);
}
std::pair<size_t, std::set<std::string>> Storage::contents(const std::string& id) const
std::pair<size_t, set<string>> Storage::contents(const string& id) const
{
try {
checkGroupRead(id);
} catch (const CanteraError& err) {
throw CanteraError("Storage::contents",
"Caught exception for group '{}':\n", id, err.getMessage());
}
h5::Group sub = m_file->getGroup(id);
std::set<std::string> names;
set<string> names;
size_t nDims = npos;
size_t nElements = 0;
for (auto& name : sub.listObjectNames()) {
@ -148,7 +192,7 @@ AnyMap readH5Attributes(const h5::Group& sub, bool recursive)
h5::DataTypeClass dclass = dtype.getClass();
if (dclass == h5::DataTypeClass::Float) {
if (attr.getSpace().getElementCount() > 1) {
std::vector<double> values;
vector<double> values;
attr.read(values);
out[name] = values;
} else {
@ -158,30 +202,30 @@ AnyMap readH5Attributes(const h5::Group& sub, bool recursive)
}
} else if (dclass == h5::DataTypeClass::Integer) {
if (attr.getSpace().getElementCount() > 1) {
std::vector<int> values;
vector<long int> values;
attr.read(values);
out[name] = values;
} else {
int value;
long int value;
attr.read(value);
out[name] = value;
}
} else if (dclass == h5::DataTypeClass::String) {
if (attr.getSpace().getElementCount() > 1) {
std::vector<std::string> values;
vector<string> values;
attr.read(values);
out[name] = values;
} else {
std::string value;
string value;
attr.read(value);
out[name] = value;
}
} else if (dclass == h5::DataTypeClass::Enum) {
// only booleans are supported
if (attr.getSpace().getElementCount() > 1) {
std::vector<H5Boolean> values;
vector<H5Boolean> values;
attr.read(values);
std::vector<bool> bValues;
vector<bool> bValues;
for (auto v : values) {
bValues.push_back(bool(v));
}
@ -208,24 +252,31 @@ AnyMap readH5Attributes(const h5::Group& sub, bool recursive)
return out;
}
bool Storage::hasAttribute(const std::string& id, const std::string& attr) const
bool Storage::hasAttribute(const string& id, const string& attr) const
{
if (id == "") {
return false;
try {
checkGroupRead(id);
} catch (const CanteraError& err) {
throw CanteraError("Storage::hasAttribute",
"Caught exception for group '{}':\n", id, err.getMessage());
}
h5::Group sub = m_file->getGroup(id);
auto names = sub.listAttributeNames();
return std::find(names.begin(), names.end(), attr) != names.end();
}
AnyMap Storage::readAttributes(const std::string& id, bool recursive) const
AnyMap Storage::readAttributes(const string& id, bool recursive) const
{
h5::Group sub = m_file->getGroup(id);
try {
checkGroupRead(id);
h5::Group sub = m_file->getGroup(id);
return readH5Attributes(sub, recursive);
} catch (const Cantera::NotImplementedError& err) {
throw NotImplementedError("Storage::readAttribute",
"{} in group '{}'.", err.getMessage(), id);
} catch (const CanteraError& err) {
throw CanteraError("Storage::readAttribute",
"Caught exception for group '{}':\n", id, err.getMessage());
}
}
@ -236,14 +287,14 @@ void writeH5Attributes(h5::Group sub, const AnyMap& meta)
throw NotImplementedError("writeH5Attributes",
"Unable to overwrite existing Attribute '{}'", name);
}
if (item.is<double>()) {
double value = item.asDouble();
h5::Attribute attr = sub.createAttribute<double>(
if (item.is<long int>()) {
int value = item.asInt();
h5::Attribute attr = sub.createAttribute<long int>(
name, h5::DataSpace::From(value));
attr.write(value);
} else if (item.is<int>() || item.is<long int>()) {
int value = item.asInt();
h5::Attribute attr = sub.createAttribute<int>(
} else if (item.is<double>()) {
double value = item.asDouble();
h5::Attribute attr = sub.createAttribute<double>(
name, h5::DataSpace::From(value));
attr.write(value);
} else if (item.is<string>()) {
@ -257,16 +308,16 @@ void writeH5Attributes(h5::Group sub, const AnyMap& meta)
h5::Attribute attr = sub.createAttribute<H5Boolean>(
name, h5::DataSpace::From(value));
attr.write(value);
} else if (item.is<vector<long int>>()) {
auto values = item.as<vector<long int>>();
h5::Attribute attr = sub.createAttribute<long int>(
name, h5::DataSpace::From(values));
attr.write(values);
} else if (item.is<vector<double>>()) {
auto values = item.as<vector<double>>();
h5::Attribute attr = sub.createAttribute<double>(
name, h5::DataSpace::From(values));
attr.write(values);
} else if (item.is<vector<int>>()) {
auto values = item.as<vector<int>>();
h5::Attribute attr = sub.createAttribute<int>(
name, h5::DataSpace::From(values));
attr.write(values);
} else if (item.is<vector<string>>()) {
auto values = item.as<vector<string>>();
h5::Attribute attr = sub.createAttribute<string>(
@ -294,115 +345,215 @@ void writeH5Attributes(h5::Group sub, const AnyMap& meta)
}
}
void Storage::writeAttributes(const std::string& id, const AnyMap& meta)
void Storage::writeAttributes(const string& id, const AnyMap& meta)
{
h5::Group sub = m_file->getGroup(id);
try {
checkGroupWrite(id, false);
h5::Group sub = m_file->getGroup(id);
writeH5Attributes(sub, meta);
} catch (const Cantera::NotImplementedError& err) {
throw NotImplementedError("Storage::writeAttribute",
"{} in group '{}'.", err.getMessage(), id);
} catch (const CanteraError& err) {
// rethrow with public method attribution
throw CanteraError("Storage::writeAttributes", "{}", err.getMessage());
} catch (const std::exception& err) {
// convert HighFive exception
throw CanteraError("Storage::writeAttributes",
"Encountered exception for group '{}':\n{}", id, err.what());
}
}
vector_fp Storage::readVector(const std::string& id,
const std::string& name, size_t size) const
AnyValue Storage::readData(const string& id,
const string& name, size_t rows, size_t cols) const
{
try {
checkGroupRead(id);
} catch (const CanteraError& err) {
throw CanteraError("Storage::readData",
"Caught exception for group '{}':\n", id, err.getMessage());
}
h5::Group sub = m_file->getGroup(id);
if (!sub.exist(name)) {
throw CanteraError("Storage::readVector",
throw CanteraError("Storage::readData",
"DataSet '{}' not found in group '{}'.", name, id);
}
h5::DataSet dataset = sub.getDataSet(name);
if (dataset.getDataType().getClass() != h5::DataTypeClass::Float) {
throw CanteraError("Storage::readVector",
"Type of DataSet '{}' is inconsistent; expected HDF float.", name);
}
if (dataset.getElementCount() != size) {
throw CanteraError("Storage::readVector",
"Size of DataSet '{}' is inconsistent; expected {} elements but "
"received {} elements.", name, size, dataset.getElementCount());
}
vector_fp out;
dataset.read(out);
return out;
}
void Storage::writeVector(const std::string& id,
const std::string& name, const vector_fp& data)
{
h5::Group sub = m_file->getGroup(id);
if (sub.exist(name)) {
throw NotImplementedError("Storage::writeVector",
"Unable to overwrite existing DataSet '{}' in group '{}'.", name, id);
}
std::vector<size_t> dims{data.size()};
h5::DataSet dataset = sub.createDataSet<double>(name, h5::DataSpace(dims));
dataset.write(data);
}
std::vector<vector_fp> Storage::readMatrix(const std::string& id,
const std::string& name,
size_t rows, size_t cols) const
{
h5::Group sub = m_file->getGroup(id);
if (!sub.exist(name)) {
throw CanteraError("Storage::readMatrix",
"DataSet '{}' not found in group '{}'.", name, id);
}
h5::DataSet dataset = sub.getDataSet(name);
if (dataset.getDataType().getClass() != h5::DataTypeClass::Float) {
throw CanteraError("Storage::readMatrix",
"Type of DataSet '{}' is inconsistent; expected HDF float.", name);
}
h5::DataSpace space = dataset.getSpace();
if (space.getNumberDimensions() != 2) {
throw CanteraError("Storage::readMatrix",
"Shape of DataSet '{}' is inconsistent; expected two dimensions.", name);
size_t ndim = space.getNumberDimensions();
if (cols == 0 && ndim != 1) {
throw CanteraError("Storage::readData",
"Shape of DataSet '{}' is inconsistent; expected one dimensions but "
"received {}.", name, ndim);
} else if (cols != 0 && cols != npos && ndim != 2) {
throw CanteraError("Storage::readData",
"Shape of DataSet '{}' is inconsistent; expected two dimensions but "
"received {}.", name, ndim);
}
if (ndim == 0 || ndim > 2) {
throw NotImplementedError("Storage::readData",
"Cannot process DataSet '{}' as data has {} dimensions.", name, ndim);
}
const auto& shape = space.getDimensions();
if (shape[0] != rows) {
throw CanteraError("Storage::readMatrix",
"Shape of DataSet '{}' is inconsistent; expected {} rows.", name, rows);
throw CanteraError("Storage::readData",
"Shape of DataSet '{}' is inconsistent; expected {} rows "
"but received {}.", name, rows, shape[0]);
}
if (shape[1] != cols) {
throw CanteraError("Storage::readMatrix",
"Shape of DataSet '{}' is inconsistent; expected {} columns.", name, cols);
if (cols != 0 && cols != npos && shape[1] != cols) {
throw CanteraError("Storage::readData",
"Shape of DataSet '{}' is inconsistent; expected {} columns "
"but received {}.", name, cols, shape[1]);
}
AnyValue out;
const auto datatype = dataset.getDataType().getClass();
if (datatype == h5::DataTypeClass::Float) {
try {
if (ndim == 1) {
vector<double> data;
dataset.read(data);
out = data;
} else { // ndim == 2
vector<vector<double>> data;
dataset.read(data);
out = data;
}
} catch (const std::exception& err) {
throw NotImplementedError("Storage::readData",
"Encountered HighFive exception for DataSet '{}' in group '{}':\n{}",
name, id, err.what());
}
} else if (datatype == h5::DataTypeClass::Integer) {
try {
if (ndim == 1) {
vector<long int> data;
dataset.read(data);
out = data;
} else { // ndim == 2
vector<vector<long int>> data;
dataset.read(data);
out = data;
}
} catch (const std::exception& err) {
throw NotImplementedError("Storage::readData",
"Encountered HighFive exception for DataSet '{}' in group '{}':\n{}",
name, id, err.what());
}
} else if (datatype == h5::DataTypeClass::String) {
try {
if (ndim == 1) {
vector<string> data;
dataset.read(data);
out = data;
} else { // ndim == 2
vector<vector<string>> data;
dataset.read(data);
out = data;
}
} catch (const std::exception& err) {
throw NotImplementedError("Storage::readData",
"Encountered HighFive exception for DataSet '{}' in group '{}':\n{}",
name, id, err.what());
}
} else {
throw NotImplementedError("Storage::readData",
"DataSet '{}' is not readable.", name);
}
std::vector<vector_fp> out;
dataset.read(out);
return out;
}
void Storage::writeMatrix(const std::string& id,
const std::string& name, const std::vector<vector_fp>& data)
void Storage::writeData(const string& id, const string& name, const AnyValue& data)
{
try {
checkGroupWrite(id, false);
} catch (const CanteraError& err) {
// rethrow with public method attribution
throw CanteraError("Storage::writeData", "{}", err.getMessage());
} catch (const std::exception& err) {
// convert HighFive exception
throw CanteraError("Storage::writeData",
"Encountered exception for group '{}':\n{}", id, err.what());
}
h5::Group sub = m_file->getGroup(id);
if (sub.exist(name)) {
throw NotImplementedError("Storage::writeMatrix",
throw NotImplementedError("Storage::writeData",
"Unable to overwrite existing DataSet '{}' in group '{}'.", name, id);
}
std::vector<size_t> dims{data.size()};
dims.push_back(data.size() ? data[0].size() : 0);
size_t size = data.vectorSize();
auto [rows, cols] = data.matrixShape();
if (size == npos && rows == npos) {
throw CanteraError("Storage::writeData",
"Cannot write DataSet '{}' in group '{}' as input data with type\n"
"'{}'\nis neither a vector nor a matrix.", name, id, data.type_str());
}
vector<size_t> dims{data.vectorSize()};
if (data.isVector<long int>()) {
h5::DataSet dataset = sub.createDataSet<long int>(name, h5::DataSpace(dims));
dataset.write(data.asVector<long int>());
return;
}
if (data.isVector<double>()) {
h5::DataSet dataset = sub.createDataSet<double>(name, h5::DataSpace(dims));
dataset.write(data.asVector<double>());
return;
}
if (data.isVector<string>()) {
h5::DataSet dataset = sub.createDataSet<string>(name, h5::DataSpace(dims));
dataset.write(data.asVector<string>());
return;
}
if (cols != npos) {
dims.clear();
dims.push_back(rows);
dims.push_back(cols);
} else {
throw NotImplementedError("Storage::writeData",
"Cannot write DataSet '{}' in group '{}' as input data with type\n"
"'{}'\nis not supported.", name, id, data.type_str());
}
if (m_compressionLevel) {
// Set chunk size to single chunk and apply compression level; for caveats, see
// https://stackoverflow.com/questions/32994766/compressed-files-bigger-in-h5py
h5::DataSpace space(dims, dims); //{h5::DataSpace::UNLIMITED, dims[1]});
h5::DataSetCreateProps props;
props.add(h5::Chunking(std::vector<hsize_t>{dims[0], dims[1]}));
props.add(h5::Chunking(vector<hsize_t>{dims[0], dims[1]}));
props.add(h5::Deflate(m_compressionLevel));
h5::DataSet dataset = sub.createDataSet<double>(name, space, props);
dataset.write(data);
if (data.isVector<vector<long int>>()) {
h5::DataSet dataset = sub.createDataSet<long int>(name, space, props);
dataset.write(data.asVector<vector<long int>>());
} else if (data.isVector<vector<double>>()) {
h5::DataSet dataset = sub.createDataSet<double>(name, space, props);
dataset.write(data.asVector<vector<double>>());
} else if (data.isVector<vector<string>>()) {
h5::DataSet dataset = sub.createDataSet<string>(name, space, props);
dataset.write(data.asVector<vector<string>>());
} else {
throw NotImplementedError("Storage::writeData",
"Cannot write DataSet '{}' in group '{}' as input data with type\n"
"'{}'\nis not supported.", name, id, data.type_str());
}
} else {
h5::DataSpace space(dims);
h5::DataSet dataset = sub.createDataSet<double>(name, space);
dataset.write(data);
if (data.isVector<vector<long int>>()) {
h5::DataSet dataset = sub.createDataSet<long int>(name, space);
dataset.write(data.asVector<vector<long int>>());
} else if (data.isVector<vector<double>>()) {
h5::DataSet dataset = sub.createDataSet<double>(name, space);
dataset.write(data.asVector<vector<double>>());
} else if (data.isVector<vector<string>>()) {
h5::DataSet dataset = sub.createDataSet<string>(name, space);
dataset.write(data.asVector<vector<string>>());
} else {
throw NotImplementedError("Storage::writeData",
"Cannot write DataSet '{}' in group '{}' as input data with type\n"
"'{}'\nis not supported.", name, id, data.type_str());
}
}
}
#else
Storage::Storage(std::string fname, bool write)
Storage::Storage(string fname, bool write)
{
throw CanteraError("Storage::Storage",
"Saving to HDF requires HighFive installation.");
@ -418,62 +569,53 @@ void Storage::setCompressionLevel(int level)
"Saving to HDF requires HighFive installation.");
}
bool Storage::checkGroup(const std::string& id)
bool Storage::hasGroup(const string& id) const
{
throw CanteraError("Storage::hasGroup",
"Saving to HDF requires HighFive installation.");
}
bool Storage::checkGroup(const string& id, bool permissive)
{
throw CanteraError("Storage::checkGroup",
"Saving to HDF requires HighFive installation.");
}
std::pair<size_t, std::set<std::string>> Storage::contents(const std::string& id) const
std::pair<size_t, set<string>> Storage::contents(const string& id) const
{
throw CanteraError("Storage::contents",
"Saving to HDF requires HighFive installation.");
}
bool Storage::hasAttribute(const std::string& id, const std::string& attr) const
bool Storage::hasAttribute(const string& id, const string& attr) const
{
throw CanteraError("Storage::hasAttribute",
"Saving to HDF requires HighFive installation.");
}
AnyMap Storage::readAttributes(const std::string& id, bool recursive) const
AnyMap Storage::readAttributes(const string& id, bool recursive) const
{
throw CanteraError("Storage::readAttributes",
"Saving to HDF requires HighFive installation.");
}
void Storage::writeAttributes(const std::string& id, const AnyMap& meta)
void Storage::writeAttributes(const string& id, const AnyMap& meta)
{
throw CanteraError("Storage::writeAttributes",
"Saving to HDF requires HighFive installation.");
}
vector_fp Storage::readVector(const std::string& id,
const std::string& name, size_t size) const
AnyValue Storage::readData(const string& id,
const string& name, size_t rows, size_t cols) const
{
throw CanteraError("Storage::readVector",
throw CanteraError("Storage::readData",
"Saving to HDF requires HighFive installation.");
}
void Storage::writeVector(const std::string& id,
const std::string& name, const vector_fp& data)
void Storage::writeData(const string& id,
const string& name, const AnyValue& data)
{
throw CanteraError("Storage::writeVector",
"Saving to HDF requires HighFive installation.");
}
std::vector<vector_fp> Storage::readMatrix(const std::string& id,
const std::string& name,
size_t rows, size_t cols) const
{
throw CanteraError("Storage::readMatrix",
"Saving to HDF requires HighFive installation.");
}
void Storage::writeMatrix(const std::string& id,
const std::string& name, const std::vector<vector_fp>& data)
{
throw CanteraError("Storage::writeMatrix",
throw CanteraError("Storage::writeData",
"Saving to HDF requires HighFive installation.");
}

View File

@ -244,7 +244,7 @@ shared_ptr<SolutionArray> Inlet1D::asArray(const double* soln) const
void Inlet1D::restore(SolutionArray& arr, double* soln, int loglevel)
{
Boundary1D::setMeta(arr.meta(), loglevel);
arr.setIndex(0);
arr.setLoc(0);
auto phase = arr.thermo();
auto meta = arr.meta();
m_temp = phase->temperature();
@ -252,8 +252,8 @@ void Inlet1D::restore(SolutionArray& arr, double* soln, int loglevel)
m_mdot = meta.at("mass-flux").asDouble();
} else {
// convert data format used by Python h5py export (Cantera < 3.0)
auto aux = arr.getExtra(0);
m_mdot = phase->density() * aux["velocity"];
auto aux = arr.getAuxiliary(0);
m_mdot = phase->density() * aux.at("velocity").as<double>();
}
phase->getMassFractions(m_yin.data());
}
@ -531,7 +531,7 @@ shared_ptr<SolutionArray> OutletRes1D::asArray(const double* soln) const
void OutletRes1D::restore(SolutionArray& arr, double* soln, int loglevel)
{
Boundary1D::setMeta(arr.meta(), loglevel);
arr.setIndex(0);
arr.setLoc(0);
auto phase = arr.thermo();
m_temp = phase->temperature();
auto Y = phase->massFractions();
@ -581,7 +581,7 @@ shared_ptr<SolutionArray> Surf1D::asArray(const double* soln) const
void Surf1D::restore(SolutionArray& arr, double* soln, int loglevel)
{
Boundary1D::setMeta(arr.meta(), loglevel);
arr.setIndex(0);
arr.setLoc(0);
m_temp = arr.thermo()->temperature();
}
@ -788,7 +788,7 @@ shared_ptr<SolutionArray> ReactingSurf1D::asArray(const double* soln) const
void ReactingSurf1D::restore(SolutionArray& arr, double* soln, int loglevel)
{
Boundary1D::setMeta(arr.meta(), loglevel);
arr.setIndex(0);
arr.setLoc(0);
auto surf = std::dynamic_pointer_cast<SurfPhase>(arr.thermo());
if (!surf) {
throw CanteraError("ReactingSurf1D::restore",

View File

@ -133,7 +133,7 @@ AnyMap Domain1D::serialize(const double* soln) const
"To be removed after Cantera 3.0; superseded by asArray.");
AnyMap out;
auto arr = asArray(soln);
arr->writeEntry(out, "");
arr->writeEntry(out, "", "");
return out;
}
@ -174,7 +174,7 @@ void Domain1D::restore(const AnyMap& state, double* soln, int loglevel)
warn_deprecated("Domain1D::restore",
"To be removed after Cantera 3.0; restore from SolutionArray instead.");
auto arr = SolutionArray::create(solution());
arr->readEntry(state, "");
arr->readEntry(state, "", "");
restore(*arr, soln, loglevel);
}

View File

@ -102,7 +102,7 @@ void Sim1D::save(const std::string& fname, const std::string& id,
if (extension == "h5" || extension == "hdf" || extension == "hdf5") {
for (auto dom : m_dom) {
auto arr = dom->asArray(m_x.data() + dom->loc());
arr->writeEntry(fname, id + "/" + dom->id(), compression);
arr->writeEntry(fname, id, dom->id(), compression);
}
SolutionArray::writeHeader(fname, id, desc);
if (loglevel > 0) {
@ -120,7 +120,7 @@ void Sim1D::save(const std::string& fname, const std::string& id,
for (auto dom : m_dom) {
auto arr = dom->asArray(m_x.data() + dom->loc());
arr->writeEntry(data, id + "/" + dom->id());
arr->writeEntry(data, id, dom->id());
}
// Write the output file and remove the now-outdated cached file
@ -216,13 +216,14 @@ AnyMap legacyH5(shared_ptr<SolutionArray> arr, const AnyMap& header={})
if (header.hasKey("fixed_temperature")) {
double temp = header.getDouble("fixed_temperature", -1.);
auto profile = arr->getComponent("T");
auto profile = arr->getComponent("T").as<vector<double>>();
int ix = 0;
while (profile[ix] <= temp && ix < arr->size()) {
ix++;
}
if (ix != 0) {
out["fixed-point"]["location"] = arr->getComponent("grid")[ix - 1];
auto grid = arr->getComponent("grid").as<vector<double>>();
out["fixed-point"]["location"] = grid[ix - 1];
out["fixed-point"]["temperature"] = temp;
}
}
@ -246,7 +247,7 @@ AnyMap Sim1D::restore(const std::string& fname, const std::string& id,
for (auto dom : m_dom) {
auto arr = SolutionArray::create(dom->solution());
arr->readEntry(fname, id + "/" + dom->id());
arr->readEntry(fname, id, dom->id());
dom->resize(dom->nComponents(), arr->size());
if (!header.hasKey("generator")) {
arr->meta() = legacyH5(arr, header);
@ -266,7 +267,7 @@ AnyMap Sim1D::restore(const std::string& fname, const std::string& id,
for (auto dom : m_dom) {
auto arr = SolutionArray::create(dom->solution());
arr->readEntry(root, id + "/" + dom->id());
arr->readEntry(root, id, dom->id());
dom->resize(dom->nComponents(), arr->size());
arrs[dom->id()] = arr;
}

View File

@ -785,20 +785,31 @@ AnyMap StFlow::getMeta() const
shared_ptr<SolutionArray> StFlow::asArray(const double* soln) const
{
auto arr = SolutionArray::create(m_solution, nPoints(), getMeta());
arr->setComponent("grid", m_z, true);
arr->addExtra("grid", false); // leading entry
AnyValue value;
value = m_z;
arr->setComponent("grid", value);
vector_fp data(nPoints());
for (size_t i = 0; i < nComponents(); i++) {
if (componentActive(i)) {
auto name = componentName(i);
for (size_t j = 0; j < nPoints(); j++) {
data[j] = soln[index(i, j)];
}
arr->setComponent(componentName(i), data, true);
if (!arr->hasComponent(name)) {
arr->addExtra(name, false); // add to front
}
value = data;
arr->setComponent(name, value);
}
}
arr->setComponent("D", m_rho); // use density rather than pressure
value = m_rho;
arr->setComponent("D", value); // use density rather than pressure
if (m_do_radiation) {
arr->setComponent("radiative-heat-loss", m_qdotRadiation, true);
arr->addExtra("radiative-heat-loss", true); // add at end
value = m_qdotRadiation;
arr->setComponent("radiative-heat-loss", value);
}
return arr;
@ -807,11 +818,11 @@ shared_ptr<SolutionArray> StFlow::asArray(const double* soln) const
void StFlow::restore(SolutionArray& arr, double* soln, int loglevel)
{
Domain1D::setMeta(arr.meta(), loglevel);
arr.setIndex(0);
arr.setLoc(0);
auto phase = arr.thermo();
m_press = phase->pressure();
const auto grid = arr.getComponent("grid");
const auto grid = arr.getComponent("grid").as<std::vector<double>>();
setupGrid(nPoints(), &grid[0]);
for (size_t i = 0; i < nComponents(); i++) {
@ -820,7 +831,7 @@ void StFlow::restore(SolutionArray& arr, double* soln, int loglevel)
}
std::string name = componentName(i);
if (arr.hasComponent(name)) {
const vector_fp data = arr.getComponent(name);
const vector_fp data = arr.getComponent(name).as<std::vector<double>>();
for (size_t j = 0; j < nPoints(); j++) {
soln[index(i,j)] = data[j];
}