From 55ec7960bef6addf5d8ec33e560e8d472c62fa1a Mon Sep 17 00:00:00 2001 From: bangshiuh Date: Mon, 18 Apr 2022 09:16:45 -0400 Subject: [PATCH 01/93] [oneD] use enum for constants --- include/cantera/oneD/StFlow.h | 17 ++++++++++------- 1 file changed, 10 insertions(+), 7 deletions(-) diff --git a/include/cantera/oneD/StFlow.h b/include/cantera/oneD/StFlow.h index c7444567da1..23826cb8df1 100644 --- a/include/cantera/oneD/StFlow.h +++ b/include/cantera/oneD/StFlow.h @@ -19,13 +19,16 @@ namespace Cantera // constants //------------------------------------------ -// Offsets of solution components in the solution array. -const size_t c_offset_U = 0; // axial velocity -const size_t c_offset_V = 1; // strain rate -const size_t c_offset_T = 2; // temperature -const size_t c_offset_L = 3; // (1/r)dP/dr -const size_t c_offset_E = 4; // electric poisson's equation -const size_t c_offset_Y = 5; // mass fractions +//! Offsets of solution components in the 1D solution array. +enum offset +{ + c_offset_U //! axial velocity + , c_offset_V //! strain rate + , c_offset_T //! temperature + , c_offset_L //! (1/r)dP/dr + , c_offset_E //! electric poisson's equation + , c_offset_Y //! mass fractions +}; class Transport; From 67dcc9f808a04b187dc68b609886b64c416c150c Mon Sep 17 00:00:00 2001 From: Ingmar Schoegl Date: Sun, 4 Sep 2022 14:53:31 -0500 Subject: [PATCH 02/93] [oneD] Remove deprecated Inlet1D.h --- include/cantera/oneD/Inlet1D.h | 17 ----------------- 1 file changed, 17 deletions(-) delete mode 100644 include/cantera/oneD/Inlet1D.h diff --git a/include/cantera/oneD/Inlet1D.h b/include/cantera/oneD/Inlet1D.h deleted file mode 100644 index f5b54beb9a3..00000000000 --- a/include/cantera/oneD/Inlet1D.h +++ /dev/null @@ -1,17 +0,0 @@ -/** - * @file Inlet1D.h - * - * Boundary objects for one-dimensional simulations. - */ - -// This file is part of Cantera. See License.txt in the top-level directory or -// at https://cantera.org/license.txt for license and copyright information. - -#ifndef CT_BDRY1D_H -#define CT_BDRY1D_H - -#pragma message("warning: Inlet1D.h is renamed to Boundary1D.h and will be removed after Cantera 2.5.") - -#include "Boundary1D.h" - -#endif From 3dfb13e8be50690fc04c5b021b5f38cde558cfd6 Mon Sep 17 00:00:00 2001 From: Ingmar Schoegl Date: Mon, 5 Sep 2022 14:24:52 -0500 Subject: [PATCH 03/93] [oneD] Add Solution to ReactingSurf1D objects --- include/cantera/oneD/Boundary1D.h | 1 + interfaces/cython/cantera/_onedim.pxd | 1 + interfaces/cython/cantera/_onedim.pyx | 67 +++++++++++++++++++++++---- interfaces/cython/cantera/onedim.py | 3 +- src/oneD/Boundary1D.cpp | 24 ++++++++++ test/python/test_onedim.py | 3 +- 6 files changed, 86 insertions(+), 13 deletions(-) diff --git a/include/cantera/oneD/Boundary1D.h b/include/cantera/oneD/Boundary1D.h index 5d5129a06a5..a6812927417 100644 --- a/include/cantera/oneD/Boundary1D.h +++ b/include/cantera/oneD/Boundary1D.h @@ -272,6 +272,7 @@ class ReactingSurf1D : public Boundary1D { public: ReactingSurf1D(); + ReactingSurf1D(shared_ptr solution); void setKineticsMgr(InterfaceKinetics* kin); diff --git a/interfaces/cython/cantera/_onedim.pxd b/interfaces/cython/cantera/_onedim.pxd index e59d0513ba8..e4addd9d71f 100644 --- a/interfaces/cython/cantera/_onedim.pxd +++ b/interfaces/cython/cantera/_onedim.pxd @@ -66,6 +66,7 @@ cdef extern from "cantera/oneD/Boundary1D.h": cdef cppclass CxxReactingSurf1D "Cantera::ReactingSurf1D": CxxReactingSurf1D() + CxxReactingSurf1D(shared_ptr[CxxSolution]) except +translate_exception void setKineticsMgr(CxxInterfaceKinetics*) except +translate_exception void enableCoverageEquations(cbool) except +translate_exception cbool coverageEnabled() diff --git a/interfaces/cython/cantera/_onedim.pyx b/interfaces/cython/cantera/_onedim.pyx index a035784c731..2b6b5662f7a 100644 --- a/interfaces/cython/cantera/_onedim.pyx +++ b/interfaces/cython/cantera/_onedim.pyx @@ -26,6 +26,8 @@ cdef class Domain1D: if name is not None: self.name = name + if not isinstance(phase, _SolutionBase): + raise TypeError(f"Received phase with invalid type '{type(phase)}'.") self.gas = phase self.gas._references[self._weakref_proxy] = True self.set_default_tolerances() @@ -284,11 +286,14 @@ cdef class Boundary1D(Domain1D): def __cinit__(self, *args, **kwargs): self.boundary = NULL - def __init__(self, *args, **kwargs): + def __init__(self, *args, phase=None, **kwargs): if self.boundary is NULL: raise TypeError("Can't instantiate abstract class Boundary1D.") self.domain = (self.boundary) - Domain1D.__init__(self, *args, **kwargs) + if phase is not None: + Domain1D.__init__(self, phase, *args, **kwargs) + else: + Domain1D.__init__(self, *args, **kwargs) property T: """ The temperature [K] at this boundary. """ @@ -405,15 +410,51 @@ cdef class Surface1D(Boundary1D): cdef class ReactingSurface1D(Boundary1D): - """A reacting solid surface.""" - def __cinit__(self, *args, **kwargs): - self.surf = new CxxReactingSurf1D() + """A reacting solid surface. + + :param phase: + The (surface) phase corresponding to the boundary + + .. versionchanged:: 3.0 + + Starting in Cantera 3.0, parameter `phase` should reference surface instead of + gas phase. + """ + def __cinit__(self, *args, phase=None, **kwargs): + cdef _SolutionBase sol + if isinstance(phase, _SolutionBase) and phase.phase_of_matter != "gas": + sol = phase + self.surf = new CxxReactingSurf1D(sol._base) + else: + # legacy pathway - deprecation is handled in __init__ + self.surf = new CxxReactingSurf1D() self.boundary = (self.surf) - def __init__(self, *args, **kwargs): + def __init__(self, *args, phase=None, **kwargs): self._weakref_proxy = _WeakrefProxy() - super().__init__(*args, **kwargs) - self.surface = None + if phase is None and isinstance(args[0], _SolutionBase): + phase = args[0] + args = args[1:] + cdef _SolutionBase sol + if isinstance(phase, _SolutionBase): + if phase.phase_of_matter == "gas": + warnings.warn("Starting in Cantera 3.0, parameter 'phase' should " + "reference surface instead of gas phase.", DeprecationWarning) + super().__init__(*args, phase=phase, **kwargs) + else: + sol = phase + gas = None + for val in sol._adjacent.values(): + if val.phase_of_matter == "gas": + gas = val + break + if gas is None: + raise CanteraError("ReactingSurface1D needs an adjacent gas phase") + super().__init__(*args, phase=gas, **kwargs) + else: + super().__init__(*args, phase=phase, **kwargs) + self.surface = phase + self.surface._references[self._weakref_proxy] = True def __dealloc__(self): del self.surf @@ -426,7 +467,15 @@ cdef class ReactingSurface1D(Boundary1D): return self.surface def set_kinetics(self, Kinetics kin): - """Set the kinetics manager (surface reaction mechanism object).""" + """Set the kinetics manager (surface reaction mechanism object). + + .. deprecated:: 3.0 + + Method to be removed after Cantera 3.0; set `Kinetics` when instantiating + `ReactingSurface1D` instead. + """ + warnings.warn("Method to be removed after Cantera 3.0; set 'Kinetics' when " + "instantiating 'ReactingSurface1D' instead.", DeprecationWarning) if pystr(kin.kinetics.kineticsType()) not in ("Surf", "Edge"): raise TypeError('Kinetics object must be derived from ' 'InterfaceKinetics.') diff --git a/interfaces/cython/cantera/onedim.py b/interfaces/cython/cantera/onedim.py index 0af294bb06c..5749a2cc7c6 100644 --- a/interfaces/cython/cantera/onedim.py +++ b/interfaces/cython/cantera/onedim.py @@ -1502,8 +1502,7 @@ def __init__(self, gas, grid=None, width=None, surface=None): self.surface = Surface1D(name='surface', phase=gas) self.surface.T = gas.T else: - self.surface = ReactingSurface1D(name='surface', phase=gas) - self.surface.set_kinetics(surface) + self.surface = ReactingSurface1D(name='surface', phase=surface) self.surface.T = surface.T super().__init__((self.inlet, self.flame, self.surface), gas, grid) diff --git a/src/oneD/Boundary1D.cpp b/src/oneD/Boundary1D.cpp index f111a6641d1..3b91720a314 100644 --- a/src/oneD/Boundary1D.cpp +++ b/src/oneD/Boundary1D.cpp @@ -610,6 +610,30 @@ ReactingSurf1D::ReactingSurf1D() m_type = cSurfType; } +ReactingSurf1D::ReactingSurf1D(shared_ptr solution) +{ + if (!std::dynamic_pointer_cast(solution->thermo())) { + throw CanteraError("ReactingSurf1D::ReactingSurf1D", + "Detected incompatible ThermoPhase type '{}'", solution->thermo()->type()); + } + if (!std::dynamic_pointer_cast(solution->kinetics())) { + throw CanteraError("ReactingSurf1D::ReactingSurf1D", + "Detected incompatible kinetics type '{}'", + solution->kinetics()->kineticsType()); + } + m_solution = solution; + m_kin = (InterfaceKinetics*)solution->kinetics().get(); + m_surfindex = m_kin->surfacePhaseIndex(); + m_sphase = (SurfPhase*)&m_kin->thermo(m_surfindex); + if (m_sphase->name() != m_solution->thermo()->name()) { + throw CanteraError("ReactingSurf1D::ReactingSurf1D", + "Detected inconsistent ThermoPhase objects: mismatch of '{}' and '{}'.", + m_sphase->name(), m_solution->thermo()->name()); + } + m_nsp = m_sphase->nSpecies(); + m_enabled = true; +} + void ReactingSurf1D::setKineticsMgr(InterfaceKinetics* kin) { m_kin = kin; diff --git a/test/python/test_onedim.py b/test/python/test_onedim.py index dfbcba505f3..b73b52cba37 100644 --- a/test/python/test_onedim.py +++ b/test/python/test_onedim.py @@ -23,8 +23,7 @@ def test_instantiateSurface(self): solid = ct.Solution("diamond.yaml", "diamond") interface = ct.Solution("diamond.yaml", "diamond_100", (gas, solid)) - surface = ct.ReactingSurface1D(phase=gas) - surface.set_kinetics(interface) + surface = ct.ReactingSurface1D(phase=interface) def test_boundaryProperties(self): gas1 = ct.Solution("h2o2.yaml") From 760ea59eddee8115e77659f63c03b9bf5d337a4d Mon Sep 17 00:00:00 2001 From: Ingmar Schoegl Date: Mon, 5 Sep 2022 18:29:14 -0500 Subject: [PATCH 04/93] [oneD] Ensure Python API passes Solutions to boundaries --- include/cantera/oneD/Boundary1D.h | 24 +++++++++ interfaces/cython/cantera/_onedim.pxd | 12 ++--- interfaces/cython/cantera/_onedim.pyx | 78 ++++++++++++--------------- src/oneD/Boundary1D.cpp | 16 +++--- test/python/test_onedim.py | 13 +++++ 5 files changed, 83 insertions(+), 60 deletions(-) diff --git a/include/cantera/oneD/Boundary1D.h b/include/cantera/oneD/Boundary1D.h index a6812927417..61cebf850a1 100644 --- a/include/cantera/oneD/Boundary1D.h +++ b/include/cantera/oneD/Boundary1D.h @@ -106,6 +106,10 @@ class Inlet1D : public Boundary1D public: Inlet1D(); + Inlet1D(shared_ptr solution) : Inlet1D() { + m_solution = solution; + } + //! set spreading rate virtual void setSpreadRate(double V0) { m_V0 = V0; @@ -154,6 +158,10 @@ class Empty1D : public Boundary1D m_type = cEmptyType; } + Empty1D(shared_ptr solution) : Empty1D() { + m_solution = solution; + } + virtual void showSolution(const double* x) {} virtual void init(); @@ -176,6 +184,10 @@ class Symm1D : public Boundary1D m_type = cSymmType; } + Symm1D(shared_ptr solution) : Symm1D() { + m_solution = solution; + } + virtual void init(); virtual void eval(size_t jg, double* xg, double* rg, @@ -196,6 +208,10 @@ class Outlet1D : public Boundary1D m_type = cOutletType; } + Outlet1D(shared_ptr solution) : Outlet1D() { + m_solution = solution; + } + virtual void init(); virtual void eval(size_t jg, double* xg, double* rg, @@ -214,6 +230,10 @@ class OutletRes1D : public Boundary1D public: OutletRes1D(); + OutletRes1D(shared_ptr solution) : OutletRes1D() { + m_solution = solution; + } + virtual void showSolution(const double* x) {} virtual size_t nSpecies() { @@ -251,6 +271,10 @@ class Surf1D : public Boundary1D m_type = cSurfType; } + Surf1D(shared_ptr solution) : Surf1D() { + m_solution = solution; + } + virtual void init(); virtual void eval(size_t jg, double* xg, double* rg, diff --git a/interfaces/cython/cantera/_onedim.pxd b/interfaces/cython/cantera/_onedim.pxd index e4addd9d71f..7e8ed4d8eaf 100644 --- a/interfaces/cython/cantera/_onedim.pxd +++ b/interfaces/cython/cantera/_onedim.pxd @@ -48,24 +48,24 @@ cdef extern from "cantera/oneD/Boundary1D.h": double massFraction(size_t) cdef cppclass CxxInlet1D "Cantera::Inlet1D": - CxxInlet1D() + CxxInlet1D(shared_ptr[CxxSolution]) double spreadRate() void setSpreadRate(double) cdef cppclass CxxOutlet1D "Cantera::Outlet1D": - CxxOutlet1D() + CxxOutlet1D(shared_ptr[CxxSolution]) cdef cppclass CxxOutletRes1D "Cantera::OutletRes1D": - CxxOutletRes1D() + CxxOutletRes1D(shared_ptr[CxxSolution]) cdef cppclass CxxSymm1D "Cantera::Symm1D": - CxxSymm1D() + CxxSymm1D(shared_ptr[CxxSolution]) cdef cppclass CxxSurf1D "Cantera::Surf1D": - CxxSurf1D() + CxxSurf1D(shared_ptr[CxxSolution]) cdef cppclass CxxReactingSurf1D "Cantera::ReactingSurf1D": - CxxReactingSurf1D() + CxxReactingSurf1D() # deprecated in Python API (Cantera 3.0) CxxReactingSurf1D(shared_ptr[CxxSolution]) except +translate_exception void setKineticsMgr(CxxInterfaceKinetics*) except +translate_exception void enableCoverageEquations(cbool) except +translate_exception diff --git a/interfaces/cython/cantera/_onedim.pyx b/interfaces/cython/cantera/_onedim.pyx index 2b6b5662f7a..9389b8887ac 100644 --- a/interfaces/cython/cantera/_onedim.pyx +++ b/interfaces/cython/cantera/_onedim.pyx @@ -10,15 +10,16 @@ from ._utils cimport stringify, pystr from ._utils import CanteraError from cython.operator import dereference as deref + # Need a pure-python class to store weakrefs to class _WeakrefProxy: pass cdef class Domain1D: - def __cinit__(self, *args, **kwargs): + def __cinit__(self, _SolutionBase phase not None, *args, **kwargs): self.domain = NULL - def __init__(self, _SolutionBase phase, *args, name=None, **kwargs): + def __init__(self, phase, *args, name=None, **kwargs): self._weakref_proxy = _WeakrefProxy() if self.domain is NULL: raise TypeError("Can't instantiate abstract class Domain1D.") @@ -26,8 +27,6 @@ cdef class Domain1D: if name is not None: self.name = name - if not isinstance(phase, _SolutionBase): - raise TypeError(f"Received phase with invalid type '{type(phase)}'.") self.gas = phase self.gas._references[self._weakref_proxy] = True self.set_default_tolerances() @@ -286,14 +285,11 @@ cdef class Boundary1D(Domain1D): def __cinit__(self, *args, **kwargs): self.boundary = NULL - def __init__(self, *args, phase=None, **kwargs): + def __init__(self, phase, name=None): if self.boundary is NULL: raise TypeError("Can't instantiate abstract class Boundary1D.") self.domain = (self.boundary) - if phase is not None: - Domain1D.__init__(self, phase, *args, **kwargs) - else: - Domain1D.__init__(self, *args, **kwargs) + Domain1D.__init__(self, phase, name=name) property T: """ The temperature [K] at this boundary. """ @@ -347,8 +343,8 @@ cdef class Inlet1D(Boundary1D): domain - it must be either the leftmost or rightmost domain in a stack. """ - def __cinit__(self, *args, **kwargs): - self.inlet = new CxxInlet1D() + def __cinit__(self, _SolutionBase phase, *args, **kwargs): + self.inlet = new CxxInlet1D(phase._base) self.boundary = (self.inlet) def __dealloc__(self): @@ -369,8 +365,8 @@ cdef class Outlet1D(Boundary1D): A one-dimensional outlet. An outlet imposes a zero-gradient boundary condition on the flow. """ - def __cinit__(self, *args, **kwargs): - self.outlet = new CxxOutlet1D() + def __cinit__(self, _SolutionBase phase, *args, **kwargs): + self.outlet = new CxxOutlet1D(phase._base) self.boundary = (self.outlet) def __dealloc__(self): @@ -381,8 +377,8 @@ cdef class OutletReservoir1D(Boundary1D): """ A one-dimensional outlet into a reservoir. """ - def __cinit__(self, *args, **kwargs): - self.outlet = new CxxOutletRes1D() + def __cinit__(self, _SolutionBase phase, *args, **kwargs): + self.outlet = new CxxOutletRes1D(phase._base) self.boundary = (self.outlet) def __dealloc__(self): @@ -391,8 +387,8 @@ cdef class OutletReservoir1D(Boundary1D): cdef class SymmetryPlane1D(Boundary1D): """A symmetry plane.""" - def __cinit__(self, *args, **kwargs): - self.symm = new CxxSymm1D() + def __cinit__(self, _SolutionBase phase, *args, **kwargs): + self.symm = new CxxSymm1D(phase._base) self.boundary = (self.symm) def __dealloc__(self): @@ -401,8 +397,8 @@ cdef class SymmetryPlane1D(Boundary1D): cdef class Surface1D(Boundary1D): """A solid surface.""" - def __cinit__(self, *args, **kwargs): - self.surf = new CxxSurf1D() + def __cinit__(self, _SolutionBase phase, *args, **kwargs): + self.surf = new CxxSurf1D(phase._base) self.boundary = (self.surf) def __dealloc__(self): @@ -420,39 +416,31 @@ cdef class ReactingSurface1D(Boundary1D): Starting in Cantera 3.0, parameter `phase` should reference surface instead of gas phase. """ - def __cinit__(self, *args, phase=None, **kwargs): - cdef _SolutionBase sol - if isinstance(phase, _SolutionBase) and phase.phase_of_matter != "gas": - sol = phase - self.surf = new CxxReactingSurf1D(sol._base) + def __cinit__(self, _SolutionBase phase, *args, **kwargs): + if phase.phase_of_matter != "gas": + self.surf = new CxxReactingSurf1D(phase._base) else: # legacy pathway - deprecation is handled in __init__ self.surf = new CxxReactingSurf1D() self.boundary = (self.surf) - def __init__(self, *args, phase=None, **kwargs): + def __init__(self, _SolutionBase phase, name=None): self._weakref_proxy = _WeakrefProxy() - if phase is None and isinstance(args[0], _SolutionBase): - phase = args[0] - args = args[1:] - cdef _SolutionBase sol - if isinstance(phase, _SolutionBase): - if phase.phase_of_matter == "gas": - warnings.warn("Starting in Cantera 3.0, parameter 'phase' should " - "reference surface instead of gas phase.", DeprecationWarning) - super().__init__(*args, phase=phase, **kwargs) - else: - sol = phase - gas = None - for val in sol._adjacent.values(): - if val.phase_of_matter == "gas": - gas = val - break - if gas is None: - raise CanteraError("ReactingSurface1D needs an adjacent gas phase") - super().__init__(*args, phase=gas, **kwargs) + if phase.phase_of_matter == "gas": + warnings.warn("Starting in Cantera 3.0, parameter 'phase' should " + "reference surface instead of gas phase.", DeprecationWarning) + super().__init__(phase, name=name) else: - super().__init__(*args, phase=phase, **kwargs) + sol = phase + gas = None + for val in sol._adjacent.values(): + if val.phase_of_matter == "gas": + gas = val + break + if gas is None: + raise CanteraError("ReactingSurface1D needs an adjacent gas phase") + super().__init__(gas, name=name) + self.surface = phase self.surface._references[self._weakref_proxy] = True diff --git a/src/oneD/Boundary1D.cpp b/src/oneD/Boundary1D.cpp index 3b91720a314..9d0bcfa1d8b 100644 --- a/src/oneD/Boundary1D.cpp +++ b/src/oneD/Boundary1D.cpp @@ -612,24 +612,22 @@ ReactingSurf1D::ReactingSurf1D() ReactingSurf1D::ReactingSurf1D(shared_ptr solution) { - if (!std::dynamic_pointer_cast(solution->thermo())) { + auto phase = std::dynamic_pointer_cast(solution->thermo()); + if (!phase) { throw CanteraError("ReactingSurf1D::ReactingSurf1D", "Detected incompatible ThermoPhase type '{}'", solution->thermo()->type()); } - if (!std::dynamic_pointer_cast(solution->kinetics())) { + auto kin = std::dynamic_pointer_cast(solution->kinetics()); + if (!kin) { throw CanteraError("ReactingSurf1D::ReactingSurf1D", "Detected incompatible kinetics type '{}'", solution->kinetics()->kineticsType()); } m_solution = solution; - m_kin = (InterfaceKinetics*)solution->kinetics().get(); + m_kin = kin.get(); + m_sphase = phase.get(); + m_surfindex = m_kin->surfacePhaseIndex(); - m_sphase = (SurfPhase*)&m_kin->thermo(m_surfindex); - if (m_sphase->name() != m_solution->thermo()->name()) { - throw CanteraError("ReactingSurf1D::ReactingSurf1D", - "Detected inconsistent ThermoPhase objects: mismatch of '{}' and '{}'.", - m_sphase->name(), m_solution->thermo()->name()); - } m_nsp = m_sphase->nSpecies(); m_enabled = true; } diff --git a/test/python/test_onedim.py b/test/python/test_onedim.py index b73b52cba37..e65488761e8 100644 --- a/test/python/test_onedim.py +++ b/test/python/test_onedim.py @@ -76,6 +76,19 @@ def test_uncopyable(self): with self.assertRaises(NotImplementedError): copy.copy(flame) + def test_exceptions(self): + with pytest.raises(TypeError, match="Argument 'phase' has incorrect type"): + ct.Inlet1D(None) + gas = ct.Solution("h2o2.yaml") + with pytest.warns(DeprecationWarning, match="should reference surface"): + ct.ReactingSurface1D(gas) + with pytest.raises(TypeError, match="unexpected keyword"): + ct.ReactingSurface1D(gas, foo="bar") + interface = ct.Solution("diamond.yaml", "diamond_100") + surf = ct.ReactingSurface1D(interface) + with pytest.warns(DeprecationWarning, match="Method to be removed"): + surf.set_kinetics(interface) + def test_invalid_property(self): gas1 = ct.Solution("h2o2.yaml") inlet = ct.Inlet1D(name='something', phase=gas1) From 488bd1a3ab9ef16d0ab30dd84fc12ad18ccf5287 Mon Sep 17 00:00:00 2001 From: Ingmar Schoegl Date: Tue, 27 Sep 2022 08:56:58 -0500 Subject: [PATCH 05/93] [samples] Fix regression in diffusion_flame_batch.py Remove code introduced in #1394 --- samples/python/onedim/diffusion_flame_batch.py | 4 ---- 1 file changed, 4 deletions(-) diff --git a/samples/python/onedim/diffusion_flame_batch.py b/samples/python/onedim/diffusion_flame_batch.py index 82a83feee6e..6937ba700fc 100644 --- a/samples/python/onedim/diffusion_flame_batch.py +++ b/samples/python/onedim/diffusion_flame_batch.py @@ -26,9 +26,6 @@ import cantera as ct -import warnings -warnings.filterwarnings("error", module=".*") - class FlameExtinguished(Exception): pass @@ -52,7 +49,6 @@ class FlameExtinguished(Exception): reaction_mechanism = 'h2o2.yaml' gas = ct.Solution(reaction_mechanism) -gas.reactant_stoich_coeffs3 width = 18e-3 # 18mm wide f = ct.CounterflowDiffusionFlame(gas, width=width) From 7684653bb24199c15e9d9f1da1f3c1278b0ed60b Mon Sep 17 00:00:00 2001 From: Bryan Weber Date: Sun, 25 Sep 2022 20:33:41 -0400 Subject: [PATCH 06/93] [CI] Fix running examples by ignoring warnings Matplotlib 3.6.0 introduced a new incidental warning which has nothing to do with our code. This change ignores that warning so other warnings can still be caught. --- .github/workflows/main.yml | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index e563499707b..253a72040bf 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -309,7 +309,8 @@ jobs: -exec sh -c 'for n; do echo "$n" | tee -a results.txt && python3 "$n" >> results.txt || exit 1; done' sh {} + env: PYTHONPATH: build/python - PYTHONWARNINGS: error + # The ignore setting here is due to a new warning introduced in Matplotlib==3.6.0 + PYTHONWARNINGS: "error,ignore:warn_name_set_on_empty_Forward::pyparsing" MPLBACKEND: Agg - name: Save the results file for inspection uses: actions/upload-artifact@v2 From e20a57acfb53885f8cb2b67769666ccb4acde032 Mon Sep 17 00:00:00 2001 From: Ray Speth Date: Tue, 27 Sep 2022 21:57:54 -0400 Subject: [PATCH 07/93] Fix Python package build on Windows with setuptools >= 65.4.0 For some reason, setuptools thinks it needs to be able to find the user's home directory to build a wheel, but on Windows this relies on an environment variable (USERPROFILE) that we weren't by default passing in through SCons. --- .github/workflows/main.yml | 2 +- SConstruct | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index 253a72040bf..b38f4f9e917 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -495,7 +495,7 @@ jobs: shell: bash - name: Build Cantera run: scons build -j2 boost_inc_dir=%BOOST_ROOT% debug=n logging=debug - python_package=full env_vars=PYTHONPATH,GITHUB_ACTIONS + python_package=full env_vars=USERPROFILE,GITHUB_ACTIONS msvc_version=${{ matrix.vs-toolset }} f90_interface=n --debug=time shell: cmd - name: Test Cantera diff --git a/SConstruct b/SConstruct index 87f9b9992dd..fc3cf277b36 100644 --- a/SConstruct +++ b/SConstruct @@ -428,7 +428,7 @@ config_options = [ """Environment variables to propagate through to SCons. Either the string 'all' or a comma separated list of variable names, for example, 'LD_LIBRARY_PATH,HOME'.""", - "PATH,LD_LIBRARY_PATH,DYLD_LIBRARY_PATH,PYTHONPATH"), + "PATH,LD_LIBRARY_PATH,DYLD_LIBRARY_PATH,PYTHONPATH,USERPROFILE"), BoolOption( "use_pch", "Use a precompiled-header to speed up compilation", From cbeaf0c5ff9d3a122d462299da508794a269074a Mon Sep 17 00:00:00 2001 From: Ray Speth Date: Tue, 27 Sep 2022 23:38:16 -0400 Subject: [PATCH 08/93] Fix compiler warnings from VS2019 --- src/numerics/AdaptivePreconditioner.cpp | 4 ++-- src/thermo/Elements.cpp | 10 +++++----- src/transport/GasTransport.cpp | 14 +++++++------- src/zeroD/IdealGasConstPressureMoleReactor.cpp | 10 ++++++---- src/zeroD/IdealGasMoleReactor.cpp | 10 +++++++--- src/zeroD/MoleReactor.cpp | 4 ++-- src/zeroD/Reactor.cpp | 4 +++- 7 files changed, 32 insertions(+), 24 deletions(-) diff --git a/src/numerics/AdaptivePreconditioner.cpp b/src/numerics/AdaptivePreconditioner.cpp index 8b2829cac25..0bca1904d5c 100644 --- a/src/numerics/AdaptivePreconditioner.cpp +++ b/src/numerics/AdaptivePreconditioner.cpp @@ -16,7 +16,7 @@ AdaptivePreconditioner::AdaptivePreconditioner() void AdaptivePreconditioner::setValue(size_t row, size_t col, double value) { - m_jac_trips.emplace_back(row, col, value); + m_jac_trips.emplace_back(static_cast(row), static_cast(col), value); } void AdaptivePreconditioner::stateAdjustment(vector_fp& state) { @@ -47,7 +47,7 @@ void AdaptivePreconditioner::initialize(size_t networkSize) setIlutDropTol(1e-10); } if (m_drop_tol == 0) { - setIlutFillFactor(m_dim/4); + setIlutFillFactor(static_cast(m_dim) / 4); } // update initialized status m_init = true; diff --git a/src/thermo/Elements.cpp b/src/thermo/Elements.cpp index ea2bec760cc..2673bbc03e4 100644 --- a/src/thermo/Elements.cpp +++ b/src/thermo/Elements.cpp @@ -279,7 +279,7 @@ double getElementWeight(const std::string& ename) double getElementWeight(int atomicNumber) { - int num = numElementsDefined(); + int num = static_cast(numElementsDefined()); if (atomicNumber > num || atomicNumber < 1) { throw IndexError("getElementWeight", "atomicWeightTable", atomicNumber, num); } @@ -309,7 +309,7 @@ string getElementSymbol(const std::string& ename) string getElementSymbol(int atomicNumber) { - int num = numElementsDefined(); + int num = static_cast(numElementsDefined()); if (atomicNumber > num || atomicNumber < 1) { throw IndexError("getElementSymbol", "atomicWeightTable", atomicNumber, num); } @@ -334,7 +334,7 @@ string getElementName(const std::string& ename) string getElementName(int atomicNumber) { - int num = numElementsDefined(); + int num = static_cast(numElementsDefined()); if (atomicNumber > num || atomicNumber < 1) { throw IndexError("getElementName", "atomicWeightTable", atomicNumber, num); } @@ -349,9 +349,9 @@ int getAtomicNumber(const std::string& ename) string name = toLowerCopy(symbol); for (size_t i = 0; i < numElements; i++) { if (symbol == atomicWeightTable[i].symbol) { - return i + 1; + return static_cast(i) + 1; } else if (name == atomicWeightTable[i].fullName) { - return i + 1; + return static_cast(i) + 1; } } for (size_t i = 0; i < numIsotopes; i++) { diff --git a/src/transport/GasTransport.cpp b/src/transport/GasTransport.cpp index 27ccfa6120b..82a3bc1e22b 100644 --- a/src/transport/GasTransport.cpp +++ b/src/transport/GasTransport.cpp @@ -823,14 +823,14 @@ void GasTransport::getBinDiffCorrection(double t, MMCollisionInt& integrals, void GasTransport::getViscosityPolynomial(size_t i, double* coeffs) const { - for (size_t k = 0; k < (m_mode == CK_Mode ? 4 : 5); k++) { + for (int k = 0; k < (m_mode == CK_Mode ? 4 : 5); k++) { coeffs[k] = m_visccoeffs[i][k]; } } void GasTransport::getConductivityPolynomial(size_t i, double* coeffs) const { - for (size_t k = 0; k < (m_mode == CK_Mode ? 4 : 5); k++) { + for (int k = 0; k < (m_mode == CK_Mode ? 4 : 5); k++) { coeffs[k] = m_condcoeffs[i][k]; } } @@ -845,7 +845,7 @@ void GasTransport::getBinDiffusivityPolynomial(size_t i, size_t j, double* coeff } ic += mj - mi; - for (size_t k = 0; k < (m_mode == CK_Mode ? 4 : 5); k++) { + for (int k = 0; k < (m_mode == CK_Mode ? 4 : 5); k++) { coeffs[k] = m_diffcoeffs[ic][k]; } } @@ -855,7 +855,7 @@ void GasTransport::getCollisionIntegralPolynomial(size_t i, size_t j, double* bstar_coeffs, double* cstar_coeffs) const { - for (size_t k = 0; k < (m_mode == CK_Mode ? 6 : COLL_INT_POLY_DEGREE) + 1; k++) { + for (int k = 0; k < (m_mode == CK_Mode ? 6 : COLL_INT_POLY_DEGREE) + 1; k++) { astar_coeffs[k] = m_astar_poly[m_poly[i][j]][k]; bstar_coeffs[k] = m_bstar_poly[m_poly[i][j]][k]; cstar_coeffs[k] = m_cstar_poly[m_poly[i][j]][k]; @@ -864,7 +864,7 @@ void GasTransport::getCollisionIntegralPolynomial(size_t i, size_t j, void GasTransport::setViscosityPolynomial(size_t i, double* coeffs) { - for (size_t k = 0; k < (m_mode == CK_Mode ? 4 : 5); k++) { + for (int k = 0; k < (m_mode == CK_Mode ? 4 : 5); k++) { m_visccoeffs[i][k] = coeffs[k]; } @@ -877,7 +877,7 @@ void GasTransport::setViscosityPolynomial(size_t i, double* coeffs) void GasTransport::setConductivityPolynomial(size_t i, double* coeffs) { - for (size_t k = 0; k < (m_mode == CK_Mode ? 4 : 5); k++) { + for (int k = 0; k < (m_mode == CK_Mode ? 4 : 5); k++) { m_condcoeffs[i][k] = coeffs[k]; } @@ -898,7 +898,7 @@ void GasTransport::setBinDiffusivityPolynomial(size_t i, size_t j, double* coeff } ic += mj - mi; - for (size_t k = 0; k < (m_mode == CK_Mode ? 4 : 5); k++) { + for (int k = 0; k < (m_mode == CK_Mode ? 4 : 5); k++) { m_diffcoeffs[ic][k] = coeffs[k]; } diff --git a/src/zeroD/IdealGasConstPressureMoleReactor.cpp b/src/zeroD/IdealGasConstPressureMoleReactor.cpp index e44e8529ee4..800189392d6 100644 --- a/src/zeroD/IdealGasConstPressureMoleReactor.cpp +++ b/src/zeroD/IdealGasConstPressureMoleReactor.cpp @@ -144,7 +144,8 @@ Eigen::SparseMatrix IdealGasConstPressureMoleReactor::jacobian() // which is small and would completely destroy the sparsity of the Jacobian for (int k = 0; k < dwdX.outerSize(); k++) { for (Eigen::SparseMatrix::InnerIterator it(dwdX, k); it; ++it) { - m_jac_trips.emplace_back(it.row() + m_sidx, it.col() + m_sidx, + m_jac_trips.emplace_back(static_cast(it.row() + m_sidx), + static_cast(it.col() + m_sidx), it.value() * molarVolume); } } @@ -171,7 +172,8 @@ Eigen::SparseMatrix IdealGasConstPressureMoleReactor::jacobian() for (size_t j = 0; j < m_nv; j++) { double ydotPerturbed = rhsPerturbed[j] / lhsPerturbed[j]; double ydotCurrent = rhsCurrent[j] / lhsCurrent[j]; - m_jac_trips.emplace_back(j, 0, (ydotPerturbed - ydotCurrent) / deltaTemp); + m_jac_trips.emplace_back(static_cast(j), 0, + (ydotPerturbed - ydotCurrent) / deltaTemp); } // d T_dot/dnj Eigen::VectorXd specificHeat(m_nsp); @@ -188,8 +190,8 @@ Eigen::SparseMatrix IdealGasConstPressureMoleReactor::jacobian() // determine derivatives // spans columns Eigen::VectorXd hkdwkdnjSum = enthalpy.transpose() * dwdX; - for (int j = 0; j < m_nsp; j++) { - m_jac_trips.emplace_back(0, j + m_sidx, + for (size_t j = 0; j < m_nsp; j++) { + m_jac_trips.emplace_back(0, static_cast(j + m_sidx), ((specificHeat[j] - cp_mole) * m_vol * qdot - m_vol * cp_mole * hkdwkdnjSum[j] + totalCp * hk_dwdot_dC_sum) / (totalCp * totalCp)); diff --git a/src/zeroD/IdealGasMoleReactor.cpp b/src/zeroD/IdealGasMoleReactor.cpp index a553f5aedb3..157a22e81a2 100644 --- a/src/zeroD/IdealGasMoleReactor.cpp +++ b/src/zeroD/IdealGasMoleReactor.cpp @@ -172,7 +172,10 @@ Eigen::SparseMatrix IdealGasMoleReactor::jacobian() // add to preconditioner for (int k=0; k::InnerIterator it(speciesDervs, k); it; ++it) { - m_jac_trips.emplace_back(it.row() + m_sidx, it.col() + m_sidx, it.value()); + m_jac_trips.emplace_back( + static_cast(it.row() + m_sidx), + static_cast(it.col() + m_sidx), + it.value()); } } // Temperature Derivatives @@ -199,7 +202,8 @@ Eigen::SparseMatrix IdealGasMoleReactor::jacobian() for (size_t j = 0; j < m_nv; j++) { double ydotPerturbed = rhsPerturbed[j] / lhsPerturbed[j]; double ydotCurrent = rhsCurrent[j] / lhsCurrent[j]; - m_jac_trips.emplace_back(j, 0, (ydotPerturbed - ydotCurrent) / deltaTemp); + m_jac_trips.emplace_back(static_cast(j), 0, + (ydotPerturbed - ydotCurrent) / deltaTemp); } // find derivatives d T_dot/dNj vector_fp specificHeat(m_nsp); @@ -236,7 +240,7 @@ Eigen::SparseMatrix IdealGasMoleReactor::jacobian() ukdnkdnjSum += internal_energy[k] * speciesDervs.coeff(k, j); } // set appropriate column of preconditioner - m_jac_trips.emplace_back(0, j + m_sidx, + m_jac_trips.emplace_back(0, static_cast(j + m_sidx), (ukdwdCtotSum - ukdnkdnjSum + specificHeat[j] * uknkSum / totalCv) / totalCv); } } diff --git a/src/zeroD/MoleReactor.cpp b/src/zeroD/MoleReactor.cpp index 9d4bedacd3f..1bfe6ac3bf1 100644 --- a/src/zeroD/MoleReactor.cpp +++ b/src/zeroD/MoleReactor.cpp @@ -24,7 +24,7 @@ void MoleReactor::getSurfaceInitialConditions(double* y) for (auto& S : m_surfaces) { double area = S->area(); auto currPhase = S->thermo(); - double tempLoc = currPhase->nSpecies(); + size_t tempLoc = currPhase->nSpecies(); double surfDensity = currPhase->siteDensity(); S->getCoverages(y + loc); // convert coverages to moles @@ -49,7 +49,7 @@ void MoleReactor::updateSurfaceState(double* y) auto surf = S->thermo(); double invArea = 1/S->area(); double invSurfDensity = 1/surf->siteDensity(); - double tempLoc = surf->nSpecies(); + size_t tempLoc = surf->nSpecies(); for (size_t i = 0; i < tempLoc; i++) { coverages[i + loc] = y[i + loc] * invArea * surf->size(i) * invSurfDensity; } diff --git a/src/zeroD/Reactor.cpp b/src/zeroD/Reactor.cpp index b5e03080fa0..5c45a00bcde 100644 --- a/src/zeroD/Reactor.cpp +++ b/src/zeroD/Reactor.cpp @@ -351,7 +351,9 @@ Eigen::SparseMatrix Reactor::finiteDifferenceJacobian() double ydotPerturbed = rhsPerturbed[i] / lhsPerturbed[i]; double ydotCurrent = rhsCurrent[i] / lhsCurrent[i]; if (ydotCurrent != ydotPerturbed) { - m_jac_trips.emplace_back(i, j, (ydotPerturbed - ydotCurrent) / delta_y); + m_jac_trips.emplace_back( + static_cast(i), static_cast(j), + (ydotPerturbed - ydotCurrent) / delta_y); } } } From 332bb9b9d970c6ead5160f428ff49976fb2947fa Mon Sep 17 00:00:00 2001 From: Ray Speth Date: Wed, 28 Sep 2022 14:54:34 -0400 Subject: [PATCH 09/93] Fix compiler warnings identified by Clang 14.0 --- src/equil/vcs_solve.cpp | 2 -- src/equil/vcs_solve_TP.cpp | 2 -- src/oneD/IonFlow.cpp | 2 -- 3 files changed, 6 deletions(-) diff --git a/src/equil/vcs_solve.cpp b/src/equil/vcs_solve.cpp index b6f7fcfbdc4..a7a9288447d 100644 --- a/src/equil/vcs_solve.cpp +++ b/src/equil/vcs_solve.cpp @@ -1326,10 +1326,8 @@ double VCS_SOLVE::vcs_phaseStabilityTest(const size_t iph) } // Now possibly dampen the estimate. - double sumADel = 0.0; for (size_t k = 0; k < nsp; k++) { delFrac[k] = fracDelta_raw[k] - fracDelta_old[k]; - sumADel += fabs(delFrac[k]); } normUpdate = vcs_l2norm(delFrac); diff --git a/src/equil/vcs_solve_TP.cpp b/src/equil/vcs_solve_TP.cpp index 4d3218cf789..0d927ca7909 100644 --- a/src/equil/vcs_solve_TP.cpp +++ b/src/equil/vcs_solve_TP.cpp @@ -2962,7 +2962,6 @@ double VCS_SOLVE::vcs_tmoles() void VCS_SOLVE::check_tmoles() const { - double sum = 0.0; for (size_t i = 0; i < m_numPhases; i++) { double m_tPhaseMoles_old_a = TPhInertMoles[i]; @@ -2971,7 +2970,6 @@ void VCS_SOLVE::check_tmoles() const m_tPhaseMoles_old_a += m_molNumSpecies_old[k]; } } - sum += m_tPhaseMoles_old_a; double denom = m_tPhaseMoles_old[i]+ m_tPhaseMoles_old_a + 1.0E-19; if (!vcs_doubleEqual(m_tPhaseMoles_old[i]/denom, m_tPhaseMoles_old_a/denom)) { diff --git a/src/oneD/IonFlow.cpp b/src/oneD/IonFlow.cpp index b2e0ca8b5be..edbe88bf071 100644 --- a/src/oneD/IonFlow.cpp +++ b/src/oneD/IonFlow.cpp @@ -151,11 +151,9 @@ void IonFlow::electricFieldMethod(const double* x, size_t j0, size_t j1) double dz = z(j+1) - z(j); // mixture-average diffusion - double sum = 0.0; for (size_t k = 0; k < m_nsp; k++) { m_flux(k,j) = m_wt[k]*(rho*m_diff[k+m_nsp*j]/wtm); m_flux(k,j) *= (X(x,k,j) - X(x,k,j+1))/dz; - sum -= m_flux(k,j); } // ambipolar diffusion From 84a643c6b5543ba2d1cab3c655263d04d80b7e7e Mon Sep 17 00:00:00 2001 From: Corey Randall Date: Tue, 27 Sep 2022 10:59:53 -0600 Subject: [PATCH 10/93] interface_current method and accompanying test added --- interfaces/cython/cantera/kinetics.pyx | 9 +++++++++ test/python/test_kinetics.py | 23 +++++++++++++++++++++++ 2 files changed, 32 insertions(+) diff --git a/interfaces/cython/cantera/kinetics.pyx b/interfaces/cython/cantera/kinetics.pyx index f881931484f..3377a7d0e2d 100644 --- a/interfaces/cython/cantera/kinetics.pyx +++ b/interfaces/cython/cantera/kinetics.pyx @@ -6,6 +6,7 @@ import warnings cimport numpy as np import numpy as np +from .constants import * from .reaction cimport * from ._utils cimport * from . import _utils @@ -880,6 +881,14 @@ cdef class InterfaceKinetics(Kinetics): """ return self.net_production_rates[self._phase_slice(phase)] + def interface_current(self, phase): + """ + The interface current is useful when charge transfer reactions occur at + an interface. It is defined here as the net positive charge entering the + phase ``phase`` (Units: A/m^2). + """ + return sum(self.get_net_production_rates(phase)*phase.charges)*faraday + def write_yaml(self, filename, phases=None, units=None, precision=None, skip_user_defined=None): """ diff --git a/test/python/test_kinetics.py b/test/python/test_kinetics.py index 18a325b4c03..567882d4c03 100644 --- a/test/python/test_kinetics.py +++ b/test/python/test_kinetics.py @@ -1065,6 +1065,29 @@ def cathode_current(phi_s, phi_l, X_Li_cathode): ref = np.genfromtxt(self.test_data_path / "lithium-ion-battery-test.csv") assert np.allclose(data, ref, rtol=1e-7) + def test_interface_current(self): + file = "lithium_ion_battery.yaml" + + # The 'elde' electrode phase is needed as a source/sink for electrons: + anode = ct.Solution(file,"anode") + elect = ct.Solution(file,"electron") + elyte = ct.Solution(file,"electrolyte") + anode_int = ct.Interface(file,"edge_anode_electrolyte",[anode,elect,elyte]) + + anode.X = [0.9, 0.1] + elyte.X = [0.4, 0.3, 0.15, 0.15] + + anode.electric_potential = 0. + elyte.electric_potential = 3. + + species_productions = anode_int.get_net_production_rates(elyte) + species_charges = elyte.charges + + method = anode_int.interface_current(elyte) + manual = sum(species_productions*species_charges)*ct.faraday + + self.assertEqual(method,manual) + class TestDuplicateReactions(utilities.CanteraTest): infile = 'duplicate-reactions.yaml' From 3262435b8b4e92ab6a68370dec847b286ed2d9d8 Mon Sep 17 00:00:00 2001 From: Corey Randall Date: Thu, 6 Oct 2022 18:59:05 -0600 Subject: [PATCH 11/93] updated interface_current and test to be written in C++ --- AUTHORS | 1 + include/cantera/kinetics/InterfaceKinetics.h | 2 ++ interfaces/cython/cantera/kinetics.pxd | 1 + interfaces/cython/cantera/kinetics.pyx | 5 ++-- src/kinetics/InterfaceKinetics.cpp | 26 ++++++++++++++++++++ test/python/test_kinetics.py | 13 ++++++---- 6 files changed, 41 insertions(+), 7 deletions(-) diff --git a/AUTHORS b/AUTHORS index 472c500cc47..00a5705406d 100644 --- a/AUTHORS +++ b/AUTHORS @@ -31,6 +31,7 @@ Yuanjie Jiang Benjamin Kee (@lionkey) Gandhali Kogekar (@gkogekar) Daniel Korff (@korffdm), Colorado School of Mines +Corey R. Randall (@c-randall), Colorado School of Mines Jon Kristofer Samesh Lakothia (@sameshl) Kyle Linevitch, Jr. (@KyleLinevitchJr) diff --git a/include/cantera/kinetics/InterfaceKinetics.h b/include/cantera/kinetics/InterfaceKinetics.h index 3a6f2686eef..5ebec2fbfdc 100644 --- a/include/cantera/kinetics/InterfaceKinetics.h +++ b/include/cantera/kinetics/InterfaceKinetics.h @@ -287,6 +287,8 @@ class InterfaceKinetics : public Kinetics */ int phaseStability(const size_t iphase) const; + double InterfaceCurrent(int iPhase); + protected: //! Temporary work vector of length m_kk vector_fp m_grt; diff --git a/interfaces/cython/cantera/kinetics.pxd b/interfaces/cython/cantera/kinetics.pxd index eeab8046cdf..a5ffb09968b 100644 --- a/interfaces/cython/cantera/kinetics.pxd +++ b/interfaces/cython/cantera/kinetics.pxd @@ -69,6 +69,7 @@ cdef extern from "cantera/kinetics/InterfaceKinetics.h": cdef cppclass CxxInterfaceKinetics "Cantera::InterfaceKinetics": void advanceCoverages(double, double, double, double, size_t, size_t) except +translate_exception void solvePseudoSteadyStateProblem() except +translate_exception + double InterfaceCurrent(int) except +translate_exception cdef extern from "cantera/cython/kinetics_utils.h": diff --git a/interfaces/cython/cantera/kinetics.pyx b/interfaces/cython/cantera/kinetics.pyx index 3377a7d0e2d..55dff0e485f 100644 --- a/interfaces/cython/cantera/kinetics.pyx +++ b/interfaces/cython/cantera/kinetics.pyx @@ -885,9 +885,10 @@ cdef class InterfaceKinetics(Kinetics): """ The interface current is useful when charge transfer reactions occur at an interface. It is defined here as the net positive charge entering the - phase ``phase`` (Units: A/m^2). + phase ``phase`` (Units: A/m^2 for a surface, A/m for an edge reaction). """ - return sum(self.get_net_production_rates(phase)*phase.charges)*faraday + iPhase = self.phase_index(phase) + return (self.kinetics).InterfaceCurrent(iPhase) def write_yaml(self, filename, phases=None, units=None, precision=None, skip_user_defined=None): diff --git a/src/kinetics/InterfaceKinetics.cpp b/src/kinetics/InterfaceKinetics.cpp index 0e0f9be695a..9b50f22f973 100644 --- a/src/kinetics/InterfaceKinetics.cpp +++ b/src/kinetics/InterfaceKinetics.cpp @@ -595,4 +595,30 @@ void InterfaceKinetics::setPhaseStability(const size_t iphase, const int isStabl } } +double InterfaceKinetics::InterfaceCurrent(int iPhase) +{ + int sp = thermo(iPhase).nSpecies(); + double charge_k[sp]; + double net_k[sp]; + doublereal netprods[m_kk]; + + thermo(iPhase).getCharges(charge_k); + + getNetProductionRates(netprods); + + for(int k=0; k Date: Thu, 6 Oct 2022 19:45:59 -0600 Subject: [PATCH 12/93] removed unneeded constants import from cython interface kinetics.pyx file --- interfaces/cython/cantera/kinetics.pyx | 1 - 1 file changed, 1 deletion(-) diff --git a/interfaces/cython/cantera/kinetics.pyx b/interfaces/cython/cantera/kinetics.pyx index 55dff0e485f..48ea5065000 100644 --- a/interfaces/cython/cantera/kinetics.pyx +++ b/interfaces/cython/cantera/kinetics.pyx @@ -6,7 +6,6 @@ import warnings cimport numpy as np import numpy as np -from .constants import * from .reaction cimport * from ._utils cimport * from . import _utils From ba5d5f892dae9ebf1e00abdf574dec352dcb151c Mon Sep 17 00:00:00 2001 From: Corey Randall Date: Fri, 7 Oct 2022 13:11:17 -0600 Subject: [PATCH 13/93] modified syntax for consistency with other code, added comment for interface_current --- include/cantera/kinetics/InterfaceKinetics.h | 11 ++++++++- interfaces/cython/cantera/kinetics.pxd | 2 +- interfaces/cython/cantera/kinetics.pyx | 4 +-- src/kinetics/InterfaceKinetics.cpp | 26 ++++++++++---------- test/python/test_kinetics.py | 4 +-- 5 files changed, 28 insertions(+), 19 deletions(-) diff --git a/include/cantera/kinetics/InterfaceKinetics.h b/include/cantera/kinetics/InterfaceKinetics.h index 5ebec2fbfdc..b7de20efe2d 100644 --- a/include/cantera/kinetics/InterfaceKinetics.h +++ b/include/cantera/kinetics/InterfaceKinetics.h @@ -287,7 +287,16 @@ class InterfaceKinetics : public Kinetics */ int phaseStability(const size_t iphase) const; - double InterfaceCurrent(int iPhase); + //! Gets the interface current for the ith phaseExistence + /*! + * @param iphase Phase Id + * @return The double specifying the interface current. The interface Current + * is useful when charge transfer reactions occur at an interface. It + * is defined here as the net positive charge entering the phase + * specified by the Phase Id. (Units: A/m^2 for a surface reaction, + * A/m for an edge reaction). + */ + double InterfaceCurrent(const size_t iphase); protected: //! Temporary work vector of length m_kk diff --git a/interfaces/cython/cantera/kinetics.pxd b/interfaces/cython/cantera/kinetics.pxd index a5ffb09968b..fd8c6ecfe34 100644 --- a/interfaces/cython/cantera/kinetics.pxd +++ b/interfaces/cython/cantera/kinetics.pxd @@ -69,7 +69,7 @@ cdef extern from "cantera/kinetics/InterfaceKinetics.h": cdef cppclass CxxInterfaceKinetics "Cantera::InterfaceKinetics": void advanceCoverages(double, double, double, double, size_t, size_t) except +translate_exception void solvePseudoSteadyStateProblem() except +translate_exception - double InterfaceCurrent(int) except +translate_exception + double InterfaceCurrent(size_t) except +translate_exception cdef extern from "cantera/cython/kinetics_utils.h": diff --git a/interfaces/cython/cantera/kinetics.pyx b/interfaces/cython/cantera/kinetics.pyx index 48ea5065000..05b3e1c6de6 100644 --- a/interfaces/cython/cantera/kinetics.pyx +++ b/interfaces/cython/cantera/kinetics.pyx @@ -886,8 +886,8 @@ cdef class InterfaceKinetics(Kinetics): an interface. It is defined here as the net positive charge entering the phase ``phase`` (Units: A/m^2 for a surface, A/m for an edge reaction). """ - iPhase = self.phase_index(phase) - return (self.kinetics).InterfaceCurrent(iPhase) + i_phase = self.phase_index(phase) + return (self.kinetics).InterfaceCurrent(i_phase) def write_yaml(self, filename, phases=None, units=None, precision=None, skip_user_defined=None): diff --git a/src/kinetics/InterfaceKinetics.cpp b/src/kinetics/InterfaceKinetics.cpp index 9b50f22f973..7489a659fbc 100644 --- a/src/kinetics/InterfaceKinetics.cpp +++ b/src/kinetics/InterfaceKinetics.cpp @@ -595,30 +595,30 @@ void InterfaceKinetics::setPhaseStability(const size_t iphase, const int isStabl } } -double InterfaceKinetics::InterfaceCurrent(int iPhase) +double InterfaceKinetics::InterfaceCurrent(const size_t iphase) { - int sp = thermo(iPhase).nSpecies(); - double charge_k[sp]; - double net_k[sp]; - doublereal netprods[m_kk]; + int nSp = thermo(iphase).nSpecies(); + double charge_k[nSp]; + double sdot_k[nSp]; + doublereal netProdRates[m_kk]; - thermo(iPhase).getCharges(charge_k); + thermo(iphase).getCharges(charge_k); - getNetProductionRates(netprods); + getNetProductionRates(netProdRates); - for(int k=0; k Date: Sun, 9 Oct 2022 00:12:06 -0400 Subject: [PATCH 14/93] update C++ syntax to comply with community conventions --- include/cantera/kinetics/InterfaceKinetics.h | 4 +-- interfaces/cython/cantera/kinetics.pxd | 2 +- interfaces/cython/cantera/kinetics.pyx | 2 +- src/kinetics/InterfaceKinetics.cpp | 27 +++++++------------- 4 files changed, 13 insertions(+), 22 deletions(-) diff --git a/include/cantera/kinetics/InterfaceKinetics.h b/include/cantera/kinetics/InterfaceKinetics.h index b7de20efe2d..f29ebe273ef 100644 --- a/include/cantera/kinetics/InterfaceKinetics.h +++ b/include/cantera/kinetics/InterfaceKinetics.h @@ -287,7 +287,7 @@ class InterfaceKinetics : public Kinetics */ int phaseStability(const size_t iphase) const; - //! Gets the interface current for the ith phaseExistence + //! Gets the interface current for the ith phase /*! * @param iphase Phase Id * @return The double specifying the interface current. The interface Current @@ -296,7 +296,7 @@ class InterfaceKinetics : public Kinetics * specified by the Phase Id. (Units: A/m^2 for a surface reaction, * A/m for an edge reaction). */ - double InterfaceCurrent(const size_t iphase); + double interfaceCurrent(const size_t iphase); protected: //! Temporary work vector of length m_kk diff --git a/interfaces/cython/cantera/kinetics.pxd b/interfaces/cython/cantera/kinetics.pxd index fd8c6ecfe34..14e45b0ca4a 100644 --- a/interfaces/cython/cantera/kinetics.pxd +++ b/interfaces/cython/cantera/kinetics.pxd @@ -69,7 +69,7 @@ cdef extern from "cantera/kinetics/InterfaceKinetics.h": cdef cppclass CxxInterfaceKinetics "Cantera::InterfaceKinetics": void advanceCoverages(double, double, double, double, size_t, size_t) except +translate_exception void solvePseudoSteadyStateProblem() except +translate_exception - double InterfaceCurrent(size_t) except +translate_exception + double interfaceCurrent(size_t) except +translate_exception cdef extern from "cantera/cython/kinetics_utils.h": diff --git a/interfaces/cython/cantera/kinetics.pyx b/interfaces/cython/cantera/kinetics.pyx index 05b3e1c6de6..0798e859b5d 100644 --- a/interfaces/cython/cantera/kinetics.pyx +++ b/interfaces/cython/cantera/kinetics.pyx @@ -887,7 +887,7 @@ cdef class InterfaceKinetics(Kinetics): phase ``phase`` (Units: A/m^2 for a surface, A/m for an edge reaction). """ i_phase = self.phase_index(phase) - return (self.kinetics).InterfaceCurrent(i_phase) + return (self.kinetics).interfaceCurrent(i_phase) def write_yaml(self, filename, phases=None, units=None, precision=None, skip_user_defined=None): diff --git a/src/kinetics/InterfaceKinetics.cpp b/src/kinetics/InterfaceKinetics.cpp index 7489a659fbc..ab417e56e06 100644 --- a/src/kinetics/InterfaceKinetics.cpp +++ b/src/kinetics/InterfaceKinetics.cpp @@ -595,30 +595,21 @@ void InterfaceKinetics::setPhaseStability(const size_t iphase, const int isStabl } } -double InterfaceKinetics::InterfaceCurrent(const size_t iphase) +double InterfaceKinetics::interfaceCurrent(const size_t iphase) { - int nSp = thermo(iphase).nSpecies(); - double charge_k[nSp]; - double sdot_k[nSp]; - doublereal netProdRates[m_kk]; - - thermo(iphase).getCharges(charge_k); - - getNetProductionRates(netProdRates); - - for(int k=0; k Date: Sun, 9 Oct 2022 14:37:08 -0400 Subject: [PATCH 15/93] Update test/python/test_kinetics.py Co-authored-by: Ingmar Schoegl --- test/python/test_kinetics.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/test/python/test_kinetics.py b/test/python/test_kinetics.py index 525ad809777..7e9f79cf1c2 100644 --- a/test/python/test_kinetics.py +++ b/test/python/test_kinetics.py @@ -1087,9 +1087,9 @@ def test_interface_current(self): charges = p.charges method = anode_int.interface_current(p) - manual = sum(net_prod_rates*charges)*ct.faraday + manual = sum(net_prod_rates * charges) * ct.faraday - self.assertEqual(method,manual) + self.assertEqual(method, manual) class TestDuplicateReactions(utilities.CanteraTest): From 7cccb0cbb6fb170884a18bf64b7cf444519c5593 Mon Sep 17 00:00:00 2001 From: "Corey R. Randall" Date: Sun, 9 Oct 2022 14:37:40 -0400 Subject: [PATCH 16/93] Update src/kinetics/InterfaceKinetics.cpp Co-authored-by: Ingmar Schoegl --- src/kinetics/InterfaceKinetics.cpp | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/kinetics/InterfaceKinetics.cpp b/src/kinetics/InterfaceKinetics.cpp index ab417e56e06..6a288ed9e49 100644 --- a/src/kinetics/InterfaceKinetics.cpp +++ b/src/kinetics/InterfaceKinetics.cpp @@ -604,7 +604,7 @@ double InterfaceKinetics::interfaceCurrent(const size_t iphase) thermo(iphase).getCharges(charges.data()); getNetProductionRates(netProdRates.data()); - for(size_t k=0; k < thermo(iphase).nSpecies(); k++) + for (size_t k = 0; k < thermo(iphase).nSpecies(); k++) { dotProduct += charges[k] * netProdRates[m_start[iphase] + k]; } From f1d86d1b08dc3f55bcdd9db27965d409316f6f6c Mon Sep 17 00:00:00 2001 From: "Corey R. Randall" Date: Sun, 9 Oct 2022 14:38:02 -0400 Subject: [PATCH 17/93] Update test/python/test_kinetics.py Co-authored-by: Ingmar Schoegl --- test/python/test_kinetics.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/test/python/test_kinetics.py b/test/python/test_kinetics.py index 7e9f79cf1c2..44d91e3cc67 100644 --- a/test/python/test_kinetics.py +++ b/test/python/test_kinetics.py @@ -1069,10 +1069,10 @@ def test_interface_current(self): file = "lithium_ion_battery.yaml" # The 'elde' electrode phase is needed as a source/sink for electrons: - anode = ct.Solution(file,"anode") - elect = ct.Solution(file,"electron") - elyte = ct.Solution(file,"electrolyte") - anode_int = ct.Interface(file,"edge_anode_electrolyte",[anode,elect,elyte]) + anode = ct.Solution(file, "anode") + elect = ct.Solution(file, "electron") + elyte = ct.Solution(file, "electrolyte") + anode_int = ct.Interface(file, "edge_anode_electrolyte", [anode, elect, elyte]) anode.X = [0.9, 0.1] elyte.X = [0.4, 0.3, 0.15, 0.15] From bae69c00d401e842cf8f0c8ce73308276e4a24e5 Mon Sep 17 00:00:00 2001 From: Ray Speth Date: Wed, 28 Sep 2022 23:39:12 -0400 Subject: [PATCH 18/93] Fix sourcegen compatibility with Python 3.8 --- .../sourcegen/sourcegen/_HeaderFileParser.py | 3 ++- .../sourcegen/sourcegen/_SourceGenerator.py | 3 ++- interfaces/sourcegen/sourcegen/_dataclasses.py | 5 +++-- interfaces/sourcegen/sourcegen/_orchestrate.py | 5 +++-- .../sourcegen/csharp/_CSharpSourceGenerator.py | 17 +++++++++-------- .../sourcegen/sourcegen/csharp/_Config.py | 7 ++++--- .../sourcegen/sourcegen/csharp/_dataclasses.py | 3 ++- 7 files changed, 25 insertions(+), 18 deletions(-) diff --git a/interfaces/sourcegen/sourcegen/_HeaderFileParser.py b/interfaces/sourcegen/sourcegen/_HeaderFileParser.py index 02dc42e657b..f355f26ad24 100644 --- a/interfaces/sourcegen/sourcegen/_HeaderFileParser.py +++ b/interfaces/sourcegen/sourcegen/_HeaderFileParser.py @@ -3,6 +3,7 @@ from pathlib import Path import re +from typing import List from ._dataclasses import HeaderFile, Func, Param @@ -28,7 +29,7 @@ def _parse_func(cls, c_func: str) -> Func: name = front[-1] return Func(ret_type, name, params) - def __init__(self, path: Path, ignore_funcs: list[str] = None): + def __init__(self, path: Path, ignore_funcs: List[str] = None): self._path = path self._ignore_funcs = ignore_funcs diff --git a/interfaces/sourcegen/sourcegen/_SourceGenerator.py b/interfaces/sourcegen/sourcegen/_SourceGenerator.py index 368b909a043..9c97445a287 100644 --- a/interfaces/sourcegen/sourcegen/_SourceGenerator.py +++ b/interfaces/sourcegen/sourcegen/_SourceGenerator.py @@ -3,6 +3,7 @@ from abc import ABCMeta, abstractmethod from pathlib import Path +from typing import List from ._dataclasses import HeaderFile @@ -15,5 +16,5 @@ def __init__(self, out_dir: Path, config: dict): pass @abstractmethod - def generate_source(self, headers_files: list[HeaderFile]): + def generate_source(self, headers_files: List[HeaderFile]): pass diff --git a/interfaces/sourcegen/sourcegen/_dataclasses.py b/interfaces/sourcegen/sourcegen/_dataclasses.py index 02d5bd9aaf7..3743d9abaea 100644 --- a/interfaces/sourcegen/sourcegen/_dataclasses.py +++ b/interfaces/sourcegen/sourcegen/_dataclasses.py @@ -3,6 +3,7 @@ from dataclasses import dataclass from pathlib import Path +from typing import List import re from ._helpers import with_unpack_iter @@ -23,7 +24,7 @@ class Func: ret_type: str name: str - params: list[Param] + params: List[Param] @dataclass(frozen=True) @@ -32,4 +33,4 @@ class HeaderFile: """Represents information about a parsed C header file""" path: Path - funcs: list[Func] + funcs: List[Func] diff --git a/interfaces/sourcegen/sourcegen/_orchestrate.py b/interfaces/sourcegen/sourcegen/_orchestrate.py index bf000cff660..6ee68e56a5d 100644 --- a/interfaces/sourcegen/sourcegen/_orchestrate.py +++ b/interfaces/sourcegen/sourcegen/_orchestrate.py @@ -4,6 +4,7 @@ import importlib import inspect from pathlib import Path +from typing import List, Dict import ruamel.yaml from ._HeaderFileParser import HeaderFileParser @@ -24,8 +25,8 @@ def generate_source(lang: str, out_dir: str): with config_path.open() as config_file: config = ruamel.yaml.safe_load(config_file) - ignore_files: list[str] = config.get("ignore_files", []) - ignore_funcs: dict[str, list[str]] = config.get("ignore_funcs", {}) + ignore_files: List[str] = config.get("ignore_files", []) + ignore_funcs: Dict[str, List[str]] = config.get("ignore_funcs", {}) files = (HeaderFileParser(f, ignore_funcs.get(f.name, [])).parse() for f in _clib_path.glob("*.h") diff --git a/interfaces/sourcegen/sourcegen/csharp/_CSharpSourceGenerator.py b/interfaces/sourcegen/sourcegen/csharp/_CSharpSourceGenerator.py index 5d932cd5c8e..73ca7c5b00e 100644 --- a/interfaces/sourcegen/sourcegen/csharp/_CSharpSourceGenerator.py +++ b/interfaces/sourcegen/sourcegen/csharp/_CSharpSourceGenerator.py @@ -3,6 +3,7 @@ from itertools import starmap from pathlib import Path +from typing import List, Dict import re from ._dataclasses import CsFunc @@ -16,7 +17,7 @@ class CSharpSourceGenerator(SourceGenerator): """The SourceGenerator for scaffolding C# files for the .NET interface""" @staticmethod - def _join_params(params: list[Param]) -> str: + def _join_params(params: List[Param]) -> str: return ", ".join(p.p_type + " " + p.name for p in params) def _get_interop_func_text(self, func: CsFunc) -> str: @@ -49,7 +50,7 @@ def _get_derived_handle_text(derived_class_name: str, base_class_name: str) -> s def _get_property_text(self, clib_area: str, c_name: str, cs_name: str, - known_funcs: dict[str, CsFunc]) -> str: + known_funcs: Dict[str, CsFunc]) -> str: getter = known_funcs.get(clib_area + "_" + c_name) if getter: @@ -193,7 +194,7 @@ def _write_file(self, filename: str, contents: str): self._out_dir.joinpath(filename).write_text(contents) - def _scaffold_interop(self, header_file_path: Path, cs_funcs: list[CsFunc]): + def _scaffold_interop(self, header_file_path: Path, cs_funcs: List[CsFunc]): functions_text = "\n\n".join(map(self._get_interop_func_text, cs_funcs)) interop_text = normalize_indent(f""" @@ -212,7 +213,7 @@ def _scaffold_interop(self, header_file_path: Path, cs_funcs: list[CsFunc]): self._write_file("Interop.LibCantera." + header_file_path.name + ".g.cs", interop_text) - def _scaffold_handles(self, header_file_path: Path, handles: dict[str, str]): + def _scaffold_handles(self, header_file_path: Path, handles: Dict[str, str]): handles_text = "\n\n".join(starmap(self._get_base_handle_text, handles.items())) handles_text = normalize_indent(f""" @@ -240,8 +241,8 @@ def _scaffold_derived_handles(self): self._write_file("Interop.Handles.g.cs", derived_handles_text) - def _scaffold_wrapper_class(self, clib_area: str, props: dict[str, str], - known_funcs: dict[str, CsFunc]): + def _scaffold_wrapper_class(self, clib_area: str, props: Dict[str, str], + known_funcs: Dict[str, CsFunc]): wrapper_class_name = self._get_wrapper_class_name(clib_area) handle_class_name = self._get_handle_class_name(clib_area) @@ -277,10 +278,10 @@ def _scaffold_wrapper_class(self, clib_area: str, props: dict[str, str], self._write_file(wrapper_class_name + ".g.cs", wrapper_class_text) - def generate_source(self, headers_files: list[HeaderFile]): + def generate_source(self, headers_files: List[HeaderFile]): self._out_dir.mkdir(parents=True, exist_ok=True) - known_funcs: dict[str, list[CsFunc]] = {} + known_funcs: Dict[str, List[CsFunc]] = {} for header_file in headers_files: cs_funcs = list(map(self._convert_func, header_file.funcs)) diff --git a/interfaces/sourcegen/sourcegen/csharp/_Config.py b/interfaces/sourcegen/sourcegen/csharp/_Config.py index 0165f2f49a4..4d71bd097aa 100644 --- a/interfaces/sourcegen/sourcegen/csharp/_Config.py +++ b/interfaces/sourcegen/sourcegen/csharp/_Config.py @@ -2,6 +2,7 @@ # at https://cantera.org/license.txt for license and copyright information. from dataclasses import dataclass +from typing import Dict from .._helpers import get_preamble, normalize_indent @@ -31,11 +32,11 @@ class Config: preamble = "/*\n" + get_preamble() + "*/" # These we load from the parsed YAML config file - class_crosswalk: dict[str, str] + class_crosswalk: Dict[str, str] - derived_handles: dict[str, str] + derived_handles: Dict[str, str] - wrapper_classes: dict[str, dict[str, str]] + wrapper_classes: Dict[str, Dict[str, str]] @staticmethod def from_parsed(parsed_config_file: dict): diff --git a/interfaces/sourcegen/sourcegen/csharp/_dataclasses.py b/interfaces/sourcegen/sourcegen/csharp/_dataclasses.py index 2b91f585b67..a61dd575c8c 100644 --- a/interfaces/sourcegen/sourcegen/csharp/_dataclasses.py +++ b/interfaces/sourcegen/sourcegen/csharp/_dataclasses.py @@ -2,6 +2,7 @@ # at https://cantera.org/license.txt for license and copyright information. from dataclasses import dataclass +from typing import Union from .._helpers import with_unpack_iter from .._dataclasses import Func @@ -13,4 +14,4 @@ class CsFunc(Func): """Represents a C# interop method""" is_handle_release_func: bool - handle_class_name: str | None + handle_class_name: Union[str, None] From 3ddcea649ad938a5f32c015292be9761bce41dcf Mon Sep 17 00:00:00 2001 From: Ray Speth Date: Fri, 30 Sep 2022 22:13:46 -0400 Subject: [PATCH 19/93] [CI] Test .NET interface --- .github/workflows/main.yml | 89 ++++++++++++++++++++++++++++++++++++++ 1 file changed, 89 insertions(+) diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index b38f4f9e917..d8a3959e2d8 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -53,6 +53,13 @@ jobs: run: python3 -m pip install typing_extensions - name: Build Cantera run: python3 `which scons` build env_vars=all -j2 debug=n --debug=time + - name: Upload shared library + uses: actions/upload-artifact@v3 + if: matrix.python-version == '3.10' && matrix.os == 'ubuntu-22.04' + with: + path: build/lib/libcantera_shared.so + name: libcantera_shared.so + retention-days: 2 - name: Test Cantera run: python3 `which scons` test show_long_tests=yes verbose_tests=yes --debug=time @@ -122,6 +129,13 @@ jobs: run: python3 -m pip install typing_extensions - name: Build Cantera run: scons build env_vars=all -j3 debug=n --debug=time + - name: Upload shared library + uses: actions/upload-artifact@v3 + if: matrix.python-version == '3.10' + with: + path: build/lib/libcantera_shared.dylib + name: libcantera_shared.dylib + retention-days: 2 - name: Test Cantera run: scons test show_long_tests=yes verbose_tests=yes --debug=time @@ -153,6 +167,10 @@ jobs: - name: Install Python dependencies run: python3 -m pip install ruamel.yaml scons numpy cython h5py pandas scipy pytest pytest-github-actions-annotate-failures pytest-cov gcovr + - name: Setup .NET Core SDK + uses: actions/setup-dotnet@v2 + with: + dotnet-version: '6.x' - name: Build Cantera run: | python3 `which scons` build blas_lapack_libs=lapack,blas coverage=y \ @@ -161,6 +179,12 @@ jobs: - name: Test Cantera run: python3 `which scons` test show_long_tests=yes verbose_tests=yes --debug=time + - name: Build the .NET interface + run: dotnet build + working-directory: interfaces/dotnet + - name: Test the .NET interface + run: dotnet test + working-directory: interfaces/dotnet - name: Process coverage files run: | gcovr --root . --exclude-unreachable-branches --exclude-throw-branches \ @@ -422,6 +446,13 @@ jobs: run: scons build system_eigen=y system_yamlcpp=y logging=debug msvc_version=${{ matrix.vs-toolset }} f90_interface=n debug=n --debug=time -j2 shell: pwsh + - name: Upload shared library + uses: actions/upload-artifact@v3 + if: matrix.python-version == '3.10' + with: + path: build/lib/cantera_shared.dll + name: cantera_shared.dll + retention-days: 2 - name: Test Cantera run: scons test show_long_tests=yes verbose_tests=yes --debug=time shell: pwsh @@ -642,3 +673,61 @@ jobs: shell: cmd - name: Test Cantera run: scons test show_long_tests=yes verbose_tests=yes --debug=time + + dotnet: + name: .NET on ${{ matrix.os }} + strategy: + matrix: + os: [ubuntu-22.04, windows-2022, macos-11] + fail-fast: false + runs-on: ${{ matrix.os }} + needs: [ubuntu-multiple-pythons, macos-multiple-pythons, windows-2022] + timeout-minutes: 60 + steps: + - uses: actions/checkout@v2 + name: Checkout the repository + - name: Set up conda + uses: conda-incubator/setup-miniconda@v2 + with: + auto-update-conda: true + python-version: "3.10" + mamba-version: "*" + activate-environment: test + channels: conda-forge,defaults + channel-priority: true + - name: Install Python dependencies + run: python3 -m pip install ruamel.yaml + - name: Install library dependencies with Conda (Windows) + run: mamba install -q yaml-cpp mkl + shell: pwsh + if: matrix.os == 'windows-2022' + - name: Setup .NET Core SDK + uses: actions/setup-dotnet@v2 + with: + dotnet-version: '6.x' + - name: Download the Cantera shared library (.so) + uses: actions/download-artifact@v3 + with: + name: libcantera_shared.so + path: build/lib + - name: Download the Cantera shared library (.dylib) + uses: actions/download-artifact@v3 + with: + name: libcantera_shared.dylib + path: build/lib + - name: Download the Cantera shared library (.dll) + uses: actions/download-artifact@v3 + with: + name: cantera_shared.dll + path: build/lib + - name: Build the .NET interface + run: dotnet build + working-directory: interfaces/dotnet + - name: Test the .NET interface + run: dotnet test + working-directory: interfaces/dotnet + - name: Run the .NET samples + run: | + dotnet run --project examples/Application + dotnet run --project examples/SoundSpeed + working-directory: interfaces/dotnet From 923a52dddf6917cf31246ea206b4c0e8eb7cfa89 Mon Sep 17 00:00:00 2001 From: Ray Speth Date: Thu, 27 Oct 2022 22:59:08 -0400 Subject: [PATCH 20/93] [CI] Collect coverage info for .NET --- .github/workflows/main.yml | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index d8a3959e2d8..d3a4764f8d3 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -183,7 +183,10 @@ jobs: run: dotnet build working-directory: interfaces/dotnet - name: Test the .NET interface - run: dotnet test + # Collect coverage info using Coverlet (identified by magic string below) + run: | + dotnet test --collect:"XPlat Code Coverage" + mv Cantera.Tests/TestResults/*/coverage.cobertura.xml . working-directory: interfaces/dotnet - name: Process coverage files run: | @@ -207,7 +210,7 @@ jobs: uses: codecov/codecov-action@v3 with: verbose: true - files: ./coverage.xml,./build/pycov.xml + files: ./coverage.xml,./build/pycov.xml,./interfaces/dotnet/coverage.cobertura.xml fail_ci_if_error: true example-keywords: From 9f4e644285483bc3ed0a46f6d58fc004cb1a843a Mon Sep 17 00:00:00 2001 From: Ray Speth Date: Sat, 29 Oct 2022 10:48:24 -0400 Subject: [PATCH 21/93] [CI] Save HTML coverage report for .NET interface --- .github/workflows/main.yml | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index d3a4764f8d3..1412c54b3f6 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -187,6 +187,9 @@ jobs: run: | dotnet test --collect:"XPlat Code Coverage" mv Cantera.Tests/TestResults/*/coverage.cobertura.xml . + dotnet new tool-manifest + dotnet tool install --local dotnet-reportgenerator-globaltool + dotnet reportgenerator -reports:"coverage.cobertura.xml" -targetdir:"coveragereport" -reporttypes:Html working-directory: interfaces/dotnet - name: Process coverage files run: | @@ -206,6 +209,12 @@ jobs: name: python-coverage-report path: build/python-coverage* retention-days: 5 + - name: Archive .NET coverage results + uses: actions/upload-artifact@v2 + with: + name: dotnet-coverage-report + path: interfaces/dotnet/coveragereport* + retention-days: 5 - name: Upload Coverage to Codecov uses: codecov/codecov-action@v3 with: From ce75be4e153b5c7665ae8c8a149b9c9f95264dda Mon Sep 17 00:00:00 2001 From: Bryan Weber Date: Mon, 12 Sep 2022 06:48:00 -0400 Subject: [PATCH 22/93] [SCons] Integrate MSVC options introduced in SCons 4.4.0 - Include changes suggested by jcbrill --- .github/workflows/main.yml | 9 +++++---- SConstruct | 35 +++++++++++++++++++++++++++-------- 2 files changed, 32 insertions(+), 12 deletions(-) diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index 1412c54b3f6..1ca30a6e1d2 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -428,7 +428,7 @@ jobs: strategy: matrix: os: ["windows-2022"] - vs-toolset: ["14.3"] # 'cl' is not recognized for earlier toolsets + vs-toolset: ["14.1", "14.2", "14.3"] # 'cl' is not recognized for earlier toolsets python-version: [ "3.7", "3.9", "3.10" ] fail-fast: false steps: @@ -448,15 +448,16 @@ jobs: - name: Install conda dependencies # See https://github.com/conda-forge/boost-cpp-feedstock/issues/41 for why we # use boost-cpp rather than boost from conda-forge - run: mamba install -q scons numpy cython ruamel.yaml boost-cpp eigen yaml-cpp - h5py pandas pytest mkl mkl-devel + # Install SCons >=4.4.0 to make sure that MSVC_TOOLSET_VERSION variable is present + run: | + mamba install -q '"scons<4.4.0"' numpy cython ruamel.yaml boost-cpp eigen yaml-cpp h5py pandas pytest shell: pwsh - name: Install typing_extensions for Python 3.7 if: matrix.python-version == '3.7' run: mamba install -q typing_extensions - name: Build Cantera run: scons build system_eigen=y system_yamlcpp=y logging=debug - msvc_version=${{ matrix.vs-toolset }} f90_interface=n debug=n --debug=time -j2 + msvc_toolset_version=${{ matrix.vs-toolset }} msvc_version=14.3 f90_interface=n debug=n --debug=time -j2 shell: pwsh - name: Upload shared library uses: actions/upload-artifact@v3 diff --git a/SConstruct b/SConstruct index fc3cf277b36..0455a420235 100644 --- a/SConstruct +++ b/SConstruct @@ -167,11 +167,25 @@ logger.info( windows_options = [ Option( "msvc_version", - """Version of Visual Studio to use. The default is the newest - installed version. Specify '14.1' ('14.1x') Visual Studio 2017, '14.2' - ('14.2x') for Visual Studio 2019, or '14.3' ('14.3x') for - Visual Studio 2022. For version numbers in parentheses, - 'x' is a placeholder for a minor version number. Windows MSVC only.""", + """Version of Visual Studio to use. The default is the newest installed version. + Note that since multiple MSVC toolsets can be installed for a single version of + Visual Studio, you probably want to use ``msvc_toolset_version`` unless you + specifically installed multiple versions of Visual Studio. Windows MSVC only. + """, + ""), + Option( + "msvc_toolset_version", + """Version of the MSVC toolset to use. The default is the default version for + the given ``msvc_version``. Note that the toolset selected here must be + installed in the MSVC version selected by ``msvc_version``. The default + toolsets associated with various Visual Studio versions are: + + * '14.1' ('14.1x'): Visual Studio 2017 + * '14.2' ('14.2x'): Visual Studio 2019 + * '14.3' ('14.3x'): Visual Studio 2022. + + For version numbers in parentheses, 'x' is a placeholder for a minor version + number. Windows MSVC only.""", ""), EnumOption( "target_arch", @@ -722,7 +736,7 @@ if os.name == "nt": if "64 bit" not in sys.version: config["target_arch"].default = "x86" - opts.AddVariables(*config.to_scons(("msvc_version", "target_arch"))) + opts.AddVariables(*config.to_scons(("msvc_version", "msvc_toolset_version", "target_arch"))) windows_compiler_env = Environment() opts.Update(windows_compiler_env) @@ -731,7 +745,7 @@ if os.name == "nt": if which("g++") and not which("cl.exe"): config["toolchain"].default = "mingw" - if windows_compiler_env["msvc_version"]: + if windows_compiler_env["msvc_version"] or windows_compiler_env["msvc_toolset_version"]: config["toolchain"].default = "msvc" opts.AddVariables(*config.to_scons("toolchain")) @@ -741,9 +755,14 @@ if os.name == "nt": toolchain = ["default"] if windows_compiler_env["msvc_version"]: extraEnvArgs["MSVC_VERSION"] = windows_compiler_env["msvc_version"] + if windows_compiler_env["msvc_toolset_version"]: + extraEnvArgs["MSVC_TOOLSET_VERSION"] = windows_compiler_env["msvc_toolset_version"] msvc_version = (windows_compiler_env["msvc_version"] or windows_compiler_env["MSVC_VERSION"]) - logger.info(f"Compiling with MSVC {msvc_version}", print_level=False) + logger.info(f"Compiling with MSVC version {msvc_version}", print_level=False) + msvc_toolset = (windows_compiler_env["msvc_toolset_version"] or + windows_compiler_env["MSVC_TOOLSET_VERSION"]) + logger.info(f"Compiling with MSVC Toolset {msvc_toolset}", print_level=False) elif windows_compiler_env["toolchain"] == "mingw": toolchain = ["mingw", "f90"] From 18784fd9a826acdfa75774d32fcfa1112ed4e73e Mon Sep 17 00:00:00 2001 From: Ingmar Schoegl Date: Thu, 3 Nov 2022 08:29:38 -0500 Subject: [PATCH 23/93] [SCons] Fix logic and improve logging --- SConstruct | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/SConstruct b/SConstruct index 0455a420235..118c0c63084 100644 --- a/SConstruct +++ b/SConstruct @@ -758,11 +758,12 @@ if os.name == "nt": if windows_compiler_env["msvc_toolset_version"]: extraEnvArgs["MSVC_TOOLSET_VERSION"] = windows_compiler_env["msvc_toolset_version"] msvc_version = (windows_compiler_env["msvc_version"] or - windows_compiler_env["MSVC_VERSION"]) + windows_compiler_env.get("MSVC_VERSION")) logger.info(f"Compiling with MSVC version {msvc_version}", print_level=False) msvc_toolset = (windows_compiler_env["msvc_toolset_version"] or - windows_compiler_env["MSVC_TOOLSET_VERSION"]) - logger.info(f"Compiling with MSVC Toolset {msvc_toolset}", print_level=False) + windows_compiler_env.get("MSVC_TOOLSET_VERSION") or + f"{msvc_version} (default)") + logger.info(f"Compiling with MSVC toolset {msvc_toolset}", print_level=False) elif windows_compiler_env["toolchain"] == "mingw": toolchain = ["mingw", "f90"] From 577455cf143178407729f1fdcb9085570dc84b3d Mon Sep 17 00:00:00 2001 From: Ingmar Schoegl Date: Thu, 3 Nov 2022 08:47:06 -0500 Subject: [PATCH 24/93] [CI] Probe different SCons versions --- .github/workflows/main.yml | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index 1ca30a6e1d2..ea89c191322 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -428,7 +428,7 @@ jobs: strategy: matrix: os: ["windows-2022"] - vs-toolset: ["14.1", "14.2", "14.3"] # 'cl' is not recognized for earlier toolsets + vs-toolset: ["14.1", "14.3"] python-version: [ "3.7", "3.9", "3.10" ] fail-fast: false steps: @@ -450,14 +450,14 @@ jobs: # use boost-cpp rather than boost from conda-forge # Install SCons >=4.4.0 to make sure that MSVC_TOOLSET_VERSION variable is present run: | - mamba install -q '"scons<4.4.0"' numpy cython ruamel.yaml boost-cpp eigen yaml-cpp h5py pandas pytest + mamba install -q '"scons>=4.4.0"' numpy cython ruamel.yaml boost-cpp eigen yaml-cpp h5py pandas pytest shell: pwsh - name: Install typing_extensions for Python 3.7 if: matrix.python-version == '3.7' run: mamba install -q typing_extensions - name: Build Cantera run: scons build system_eigen=y system_yamlcpp=y logging=debug - msvc_toolset_version=${{ matrix.vs-toolset }} msvc_version=14.3 f90_interface=n debug=n --debug=time -j2 + msvc_toolset_version=${{ matrix.vs-toolset }} f90_interface=n debug=n --debug=time -j2 shell: pwsh - name: Upload shared library uses: actions/upload-artifact@v3 @@ -516,7 +516,7 @@ jobs: - name: Install Python dependencies run: | python -m pip install -U pip setuptools wheel - python -m pip install scons pypiwin32 numpy ruamel.yaml cython h5py pandas pytest pytest-github-actions-annotate-failures + python -m pip install '"scons<4.4.0"' pypiwin32 numpy ruamel.yaml cython h5py pandas pytest pytest-github-actions-annotate-failures - name: Install typing_extensions for Python 3.7 if: matrix.python-version == '3.7' run: python -m pip install typing_extensions From b0f141e9cdef324360210c39cbbd7b9e4b4521fd Mon Sep 17 00:00:00 2001 From: Ray Speth Date: Mon, 14 Nov 2022 23:12:55 -0500 Subject: [PATCH 25/93] Replace usage of internal '_PyNamespace_New' function This function was removed from Python.h in Python 3.11. See https://github.com/python/cpython/pull/28970 --- src/extensions/PythonExtensionManager.cpp | 67 ++++++++++++----------- 1 file changed, 35 insertions(+), 32 deletions(-) diff --git a/src/extensions/PythonExtensionManager.cpp b/src/extensions/PythonExtensionManager.cpp index 55204450086..fda94c2c72b 100644 --- a/src/extensions/PythonExtensionManager.cpp +++ b/src/extensions/PythonExtensionManager.cpp @@ -63,6 +63,18 @@ std::string getPythonExceptionInfo() return message; } +void checkPythonError(bool condition, const std::string& message) { + if (condition) { + if (PyErr_Occurred()) { + PyErr_PrintEx(0); + } + throw Cantera::CanteraError( + "PythonExtensionManager::PythonExtensionManager", + message + ); + } +} + } // end anonymous namespace namespace Cantera @@ -115,40 +127,31 @@ PythonExtensionManager::PythonExtensionManager() "Failed to import 'pythonExtensions' module"); } - // Following example creation of minimal ModuleSpec from Python's import.c - PyObject *attrs = Py_BuildValue("{ss}", "name", "pythonExtensions"); - if (attrs == nullptr) { - if (PyErr_Occurred()) { - PyErr_PrintEx(0); - } - throw CanteraError("PythonExtensionManager::PythonExtensionManager", - "Py_BuildValue failed"); - } - PyObject *spec = _PyNamespace_New(attrs); - Py_DECREF(attrs); - if (spec == nullptr) { - if (PyErr_Occurred()) { - PyErr_PrintEx(0); - } - throw CanteraError("PythonExtensionManager::PythonExtensionManager", - "_PyNamespace_New failed"); - } + // Create a minimal ModuleSpec + PyObject* typesModule = PyImport_ImportModule("types"); + checkPythonError(typesModule == nullptr, "'import types' failed"); + PyObject* simpleNamespaceType = PyObject_GetAttrString(typesModule, + "SimpleNamespace"); + checkPythonError(simpleNamespaceType == nullptr, + "'Get SimpleNamespace type failed"); + Py_DecRef(simpleNamespaceType); + Py_DecRef(typesModule); + PyObject* empty_tuple = PyTuple_New(0); + PyObject* kwargs = PyDict_New(); + PyObject* strArg = PyUnicode_FromString("pythonExtensions"); + PyDict_SetItemString(kwargs, "name", strArg); + PyObject* spec = PyObject_Call(simpleNamespaceType, empty_tuple, kwargs); + checkPythonError(spec == nullptr, "Creating SimpleNamespace failed"); + Py_DecRef(empty_tuple); + Py_DecRef(kwargs); + Py_DecRef(empty_tuple); + Py_DecRef(strArg); + + // Build the module definition and execute it PyObject* pyModule = PyModule_FromDefAndSpec(modDef, spec); - if (pyModule == nullptr) { - if (PyErr_Occurred()) { - PyErr_PrintEx(0); - } - CanteraError("PythonExtensionManager::PythonExtensionManager", - "PyModule_FromDefAndSpec failed"); - } + checkPythonError(pyModule == nullptr, "PyModule_FromDefAndSpec failed"); int code = PyModule_ExecDef(pyModule, modDef); - if (code) { - if (PyErr_Occurred()) { - PyErr_PrintEx(0); - } - CanteraError("PythonExtensionManager::PythonExtensionManager", - "PyModule_ExecDef failed"); - } + checkPythonError(code, "PyModule_ExecDef failed"); Py_DECREF(spec); Py_DECREF(pyModule); s_imported = true; From 2fafee813d8728312f0cb6cea0322cce8ee295af Mon Sep 17 00:00:00 2001 From: Ray Speth Date: Tue, 15 Nov 2022 16:37:59 -0500 Subject: [PATCH 26/93] Drop support for Python 3.7 --- CONTRIBUTING.md | 4 ++-- SConstruct | 4 ++-- interfaces/cython/cantera/ctml2yaml.py | 5 +---- interfaces/cython/cantera/yaml2ck.py | 8 +------- interfaces/cython/setup.cfg.in | 3 +-- interfaces/python_minimal/setup.cfg.in | 5 ++--- interfaces/python_sdist/setup.cfg.in | 5 ++--- site_scons/buildutils.py | 4 ---- 8 files changed, 11 insertions(+), 27 deletions(-) diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 5607e581529..a604b7e9af3 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -102,8 +102,8 @@ * Style generally follows PEP8 (https://www.python.org/dev/peps/pep-0008/) * Code in `.py` and `.pyx` files needs to be written to work with Python 3 -* The minimum Python version that Cantera supports is Python 3.7, so code should only - use features added in Python 3.7 or earlier +* The minimum Python version that Cantera supports is Python 3.8, so code should only + use features added in Python 3.8 or earlier * Indicate the version added for new functions and classes with an annotation like `.. versionadded:: X.Y` where `X.Y` is the next Cantera version. Significant changes in behavior should be indicated with `.. versionchanged:: X.Y`. diff --git a/SConstruct b/SConstruct index 118c0c63084..bd3b21b53ef 100644 --- a/SConstruct +++ b/SConstruct @@ -65,7 +65,7 @@ Additional command options: # and simplest option that will reliably trigger an error in Python 2 # and provide actionable feedback for users. f""" -Cantera must be built using Python 3.7 or higher. You can invoke SCons by executing +Cantera must be built using Python 3.8 or higher. You can invoke SCons by executing python3 `which scons` followed by any desired options. """ @@ -1578,7 +1578,7 @@ logger.debug("\n".join(debug_message), print_level=False) env['python_cmd_esc'] = quoted(env['python_cmd']) # Python Package Settings -python_min_version = parse_version("3.7") +python_min_version = parse_version("3.8") # The string is used to set python_requires in setup.cfg.in env['py_min_ver_str'] = str(python_min_version) # Note: cython_min_version is redefined below if the Python version is 3.8 or higher diff --git a/interfaces/cython/cantera/ctml2yaml.py b/interfaces/cython/cantera/ctml2yaml.py index 3a000d2bf98..6bc8a96c2a2 100644 --- a/interfaces/cython/cantera/ctml2yaml.py +++ b/interfaces/cython/cantera/ctml2yaml.py @@ -21,7 +21,7 @@ import warnings import copy -from typing import Any, Dict, Union, Iterable, Optional, List, Tuple +from typing import Any, Dict, Union, Iterable, Optional, List, Tuple, TypedDict from typing import TYPE_CHECKING import numpy as np @@ -47,9 +47,6 @@ ) if TYPE_CHECKING: - # This is available in the built-in typing module in Python 3.8 - from typing_extensions import TypedDict - QUANTITY = Union[float, str] RK_EOS_DICT = TypedDict( diff --git a/interfaces/cython/cantera/yaml2ck.py b/interfaces/cython/cantera/yaml2ck.py index 81a8797a6d5..1e8327da444 100644 --- a/interfaces/cython/cantera/yaml2ck.py +++ b/interfaces/cython/cantera/yaml2ck.py @@ -61,13 +61,7 @@ from textwrap import fill, dedent, TextWrapper import cantera as ct from email.utils import formatdate -from typing import Optional, Iterable - -try: - from typing import Literal -except ImportError: - # Needed for Python 3.7 support - from typing_extensions import Literal +from typing import Optional, Iterable, Literal if sys.version_info < (3, 9): class BooleanOptionalAction(argparse.Action): diff --git a/interfaces/cython/setup.cfg.in b/interfaces/cython/setup.cfg.in index e857de7f141..642a9b2ef31 100644 --- a/interfaces/cython/setup.cfg.in +++ b/interfaces/cython/setup.cfg.in @@ -23,10 +23,10 @@ classifiers = Programming Language :: Cython Programming Language :: Fortran Programming Language :: Python :: 3 :: Only - Programming Language :: Python :: 3.7 Programming Language :: Python :: 3.8 Programming Language :: Python :: 3.9 Programming Language :: Python :: 3.10 + Programming Language :: Python :: 3.11 Programming Language :: Python :: Implementation :: CPython Topic :: Scientific/Engineering :: Chemistry Topic :: Scientific/Engineering :: Physics @@ -42,7 +42,6 @@ include_package_data = True install_requires = numpy >= 1.12.0 ruamel.yaml >= 0.15.34 - typing_extensions >=4.2.0,<4.3.0;python_version<'3.8' python_requires = >=@py_min_ver_str@ packages = cantera diff --git a/interfaces/python_minimal/setup.cfg.in b/interfaces/python_minimal/setup.cfg.in index 97987f49cd5..3e704e335dd 100644 --- a/interfaces/python_minimal/setup.cfg.in +++ b/interfaces/python_minimal/setup.cfg.in @@ -19,10 +19,10 @@ classifiers = Operating System :: Microsoft :: Windows Operating System :: POSIX :: Linux Programming Language :: Python :: 3 :: Only - Programming Language :: Python :: 3.6 - Programming Language :: Python :: 3.7 Programming Language :: Python :: 3.8 Programming Language :: Python :: 3.9 + Programming Language :: Python :: 3.10 + Programming Language :: Python :: 3.11 Topic :: Scientific/Engineering :: Chemistry Topic :: Scientific/Engineering :: Physics project_urls = @@ -36,7 +36,6 @@ zip_safe = True install_requires = numpy >= 1.12.0 ruamel.yaml >= 0.15.34 - typing_extensions >=4.2.0,<4.3.0;python_version<'3.8' python_requires = >=@py_min_ver_str@ packages = cantera diff --git a/interfaces/python_sdist/setup.cfg.in b/interfaces/python_sdist/setup.cfg.in index 0d480cacebc..db4c046355e 100644 --- a/interfaces/python_sdist/setup.cfg.in +++ b/interfaces/python_sdist/setup.cfg.in @@ -23,10 +23,10 @@ classifiers = Programming Language :: Cython Programming Language :: Fortran Programming Language :: Python :: 3 :: Only - Programming Language :: Python :: 3.6 - Programming Language :: Python :: 3.7 Programming Language :: Python :: 3.8 Programming Language :: Python :: 3.9 + Programming Language :: Python :: 3.10 + Programming Language :: Python :: 3.11 Programming Language :: Python :: Implementation :: CPython Topic :: Scientific/Engineering :: Chemistry Topic :: Scientific/Engineering :: Physics @@ -42,7 +42,6 @@ include_package_data = True install_requires = numpy >= 1.12.0 ruamel.yaml >= 0.15.34 - typing_extensions >=4.2.0,<4.3.0;python_version<'3.8' python_requires = >=@py_min_ver_str@ packages = cantera diff --git a/site_scons/buildutils.py b/site_scons/buildutils.py index 06eabdda671..bfc234293e3 100644 --- a/site_scons/buildutils.py +++ b/site_scons/buildutils.py @@ -1327,10 +1327,6 @@ def setup_python_env(env): env.Append(LIBS=f"python{py_version_nodot}") if env['OS_BITS'] == 64: env.Append(CPPDEFINES='MS_WIN64') - # Fix for https://bugs.python.org/issue11566. Fixed in 3.7.3 and higher. - # See https://github.com/python/cpython/pull/11283 - if py_version_full < parse_version("3.7.3"): - env.Append(CPPDEFINES={"_hypot": "hypot"}) if "numpy_1_7_API" in env: env.Append(CPPDEFINES="NPY_NO_DEPRECATED_API=NPY_1_7_API_VERSION") From d5068c7f55c38dd09592af02735dafeaa5ea1395 Mon Sep 17 00:00:00 2001 From: Ray Speth Date: Sun, 20 Nov 2022 10:45:28 -0500 Subject: [PATCH 27/93] [CI] Test with Python 3.11 --- .github/workflows/main.yml | 27 ++++++--------------------- 1 file changed, 6 insertions(+), 21 deletions(-) diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index ea89c191322..2240df04735 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -25,7 +25,7 @@ jobs: timeout-minutes: 60 strategy: matrix: - python-version: [ '3.7', '3.9', '3.10' ] + python-version: ['3.8', '3.10', '3.11'] os: ['ubuntu-20.04', 'ubuntu-22.04'] fail-fast: false steps: @@ -48,9 +48,6 @@ jobs: - name: Install Python dependencies run: python3 -m pip install ruamel.yaml scons==3.1.2 numpy cython h5py pandas pytest pytest-github-actions-annotate-failures - - name: Install typing_extensions for Python 3.7 - if: matrix.python-version == '3.7' - run: python3 -m pip install typing_extensions - name: Build Cantera run: python3 `which scons` build env_vars=all -j2 debug=n --debug=time - name: Upload shared library @@ -101,7 +98,7 @@ jobs: timeout-minutes: 90 strategy: matrix: - python-version: [ '3.7', '3.9', '3.10' ] + python-version: ['3.8', '3.10', '3.11'] fail-fast: false steps: # Attempt to fix intermittent cloning errors. The error message says something like @@ -124,9 +121,6 @@ jobs: - name: Install Python dependencies run: python3 -m pip install ruamel.yaml numpy cython h5py pandas pytest pytest-github-actions-annotate-failures - - name: Install typing_extensions for Python 3.7 - if: matrix.python-version == '3.7' - run: python3 -m pip install typing_extensions - name: Build Cantera run: scons build env_vars=all -j3 debug=n --debug=time - name: Upload shared library @@ -311,7 +305,7 @@ jobs: timeout-minutes: 60 strategy: matrix: - python-version: ['3.7', '3.9', '3.10'] + python-version: ['3.8', '3.10', '3.11'] fail-fast: false steps: - uses: actions/checkout@v2 @@ -331,9 +325,6 @@ jobs: run: python3 -m pip install -U pip setuptools wheel - name: Install Python dependencies run: python3 -m pip install ruamel.yaml scons numpy cython h5py pandas matplotlib scipy - - name: Install typing_extensions for Python 3.7 - if: matrix.python-version == '3.7' - run: python3 -m pip install typing_extensions - name: Build Cantera # compile with GCC 9.4.0 on ubuntu-20.04 as an alternative to the default # (GCC 7.5.0 is both default and oldest supported version) @@ -429,7 +420,7 @@ jobs: matrix: os: ["windows-2022"] vs-toolset: ["14.1", "14.3"] - python-version: [ "3.7", "3.9", "3.10" ] + python-version: ["3.8", "3.10", "3.11"] fail-fast: false steps: - uses: actions/checkout@v2 @@ -452,9 +443,6 @@ jobs: run: | mamba install -q '"scons>=4.4.0"' numpy cython ruamel.yaml boost-cpp eigen yaml-cpp h5py pandas pytest shell: pwsh - - name: Install typing_extensions for Python 3.7 - if: matrix.python-version == '3.7' - run: mamba install -q typing_extensions - name: Build Cantera run: scons build system_eigen=y system_yamlcpp=y logging=debug msvc_toolset_version=${{ matrix.vs-toolset }} f90_interface=n debug=n --debug=time -j2 @@ -501,7 +489,7 @@ jobs: strategy: matrix: vs-toolset: ['14.2'] - python-version: [ "3.7", "3.9", "3.10" ] + python-version: ["3.8", "3.10", "3.11"] fail-fast: false steps: - uses: actions/checkout@v2 @@ -516,10 +504,7 @@ jobs: - name: Install Python dependencies run: | python -m pip install -U pip setuptools wheel - python -m pip install '"scons<4.4.0"' pypiwin32 numpy ruamel.yaml cython h5py pandas pytest pytest-github-actions-annotate-failures - - name: Install typing_extensions for Python 3.7 - if: matrix.python-version == '3.7' - run: python -m pip install typing_extensions + python -m pip install '"scons<4.4.0"' pypiwin32 numpy ruamel.yaml cython pandas pytest pytest-github-actions-annotate-failures - name: Restore Boost cache uses: actions/cache@v2 id: cache-boost From f04d48222351785328fbc75542ebd73d80de1d46 Mon Sep 17 00:00:00 2001 From: Ray Speth Date: Tue, 15 Nov 2022 17:17:57 -0500 Subject: [PATCH 28/93] Eliminate use of deprecated Py_SetProgramName function --- src/extensions/PythonExtensionManager.cpp | 10 ++++++++-- 1 file changed, 8 insertions(+), 2 deletions(-) diff --git a/src/extensions/PythonExtensionManager.cpp b/src/extensions/PythonExtensionManager.cpp index fda94c2c72b..43aa6522211 100644 --- a/src/extensions/PythonExtensionManager.cpp +++ b/src/extensions/PythonExtensionManager.cpp @@ -88,6 +88,9 @@ PythonExtensionManager::PythonExtensionManager() // Update the path to include the virtual environment, if one is active const char* venv_path = getenv("VIRTUAL_ENV"); if (venv_path != nullptr) { + PyConfig pyconf; + PyConfig_InitPythonConfig(&pyconf); + #ifdef _WIN32 string suffix = "\\Scripts\\python.exe"; #else @@ -96,7 +99,10 @@ PythonExtensionManager::PythonExtensionManager() string path(venv_path); path += suffix; wstring wpath = wstring_convert>().from_bytes(path); - Py_SetProgramName(wpath.c_str()); + PyStatus status = PyConfig_SetString(&pyconf, &pyconf.program_name, + wpath.c_str()); + checkPythonError(PyStatus_Exception(status), "PyConfig_SetString failed"); + Py_InitializeFromConfig(&pyconf); } else { #if defined(CT_PYTHONHOME) && defined(_WIN32) const char* old_pythonhome = getenv("PYTHONHOME"); @@ -106,8 +112,8 @@ PythonExtensionManager::PythonExtensionManager() _putenv(pythonhome.c_str()); } #endif + Py_Initialize(); } - Py_Initialize(); } if (s_imported) { From fbe7e71b3066cf9e6dfc659fdf7c62dacc3513a7 Mon Sep 17 00:00:00 2001 From: Ray Speth Date: Wed, 16 Nov 2022 10:06:51 -0500 Subject: [PATCH 29/93] Resolve rename of Sundials bandGBTRF/S The functions 'bandGBTRF' and 'bandGBTRS' were renamed in Sundials 6.0, with the old names deprecated and slated for removal in Sundials 7.0. The new names are 'SUNDlsMat_bandGBTRF' and 'SUNDlsMat_bandGBTRS'. --- src/numerics/BandMatrix.cpp | 18 +++++++++++++++--- 1 file changed, 15 insertions(+), 3 deletions(-) diff --git a/src/numerics/BandMatrix.cpp b/src/numerics/BandMatrix.cpp index 8c8b66db839..120da790110 100644 --- a/src/numerics/BandMatrix.cpp +++ b/src/numerics/BandMatrix.cpp @@ -244,8 +244,14 @@ int BandMatrix::factor() long int nu = static_cast(nSuperDiagonals()); long int nl = static_cast(nSubDiagonals()); long int smu = nu + nl; - m_info = bandGBTRF(m_lu_col_ptrs.data(), static_cast(nColumns()), - nu, nl, smu, m_ipiv->data.data()); + #if CT_SUNDIALS_VERSION >= 60 + m_info = SUNDlsMat_bandGBTRF(m_lu_col_ptrs.data(), + static_cast(nColumns()), + nu, nl, smu, m_ipiv->data.data()); + #else + m_info = bandGBTRF(m_lu_col_ptrs.data(), static_cast(nColumns()), + nu, nl, smu, m_ipiv->data.data()); + #endif #endif if (m_info != 0) { throw Cantera::CanteraError("BandMatrix::factor", @@ -278,7 +284,13 @@ int BandMatrix::solve(doublereal* b, size_t nrhs, size_t ldb) long int nl = static_cast(nSubDiagonals()); long int smu = nu + nl; double** a = m_lu_col_ptrs.data(); - bandGBTRS(a, static_cast(nColumns()), smu, nl, m_ipiv->data.data(), b); + #if CT_SUNDIALS_VERSION >= 60 + SUNDlsMat_bandGBTRS(a, static_cast(nColumns()), smu, nl, + m_ipiv->data.data(), b); + #else + bandGBTRS(a, static_cast(nColumns()), smu, nl, + m_ipiv->data.data(), b); + #endif m_info = 0; #endif From b0932bb446ce463f03b101eb9b433c20340a35df Mon Sep 17 00:00:00 2001 From: Ray Speth Date: Wed, 16 Nov 2022 10:11:02 -0500 Subject: [PATCH 30/93] [CI] Test with Sundials 6.4.1 --- .github/workflows/main.yml | 2 +- SConstruct | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index 2240df04735..61ba0d53a1c 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -360,7 +360,7 @@ jobs: shell: bash -l {0} strategy: matrix: - sundials-ver: [ 3, 4, 5.8, 6.2 ] + sundials-ver: [ 3, 4, 5.8, 6.4.1 ] fail-fast: false steps: - uses: actions/checkout@v2 diff --git a/SConstruct b/SConstruct index bd3b21b53ef..d408863ed0c 100644 --- a/SConstruct +++ b/SConstruct @@ -1462,7 +1462,7 @@ if env['system_sundials'] == 'y': if sundials_ver < parse_version("3.0") or sundials_ver >= parse_version("7.0"): logger.error(f"Sundials version {env['sundials_version']!r} is not supported.") sys.exit(1) - elif sundials_ver > parse_version("6.2"): + elif sundials_ver > parse_version("6.4.1"): logger.warning(f"Sundials version {env['sundials_version']!r} has not been tested.") logger.info(f"Using system installation of Sundials version {sundials_version!r}.") From e43cf38940b99a73dc6d69e0f12b3b939b9539c9 Mon Sep 17 00:00:00 2001 From: Ray Speth Date: Wed, 16 Nov 2022 20:21:22 -0500 Subject: [PATCH 31/93] [CI] Use correct Python version on macOS Linking to libpython with GitHub's Python 3.8 and 3.10 requires linking to libintl, but no compatible version of this library is present in the GitHub actions images (the Homebrew one has the wrong "macOS deployment target"). This problem is fixed in GitHub's build of Python 3.11. Scons 4.4.0 (and maybe other versions) can't be pip installed for the Homebrew Python, since it tries to write man pages into an incorrect, read-only directory. The Homebrew-installed Python was never actually being used. All macOS builds are done using Python cached in the GitHub actions image. --- .github/workflows/main.yml | 26 +++++++++++++++++++++----- 1 file changed, 21 insertions(+), 5 deletions(-) diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index 61ba0d53a1c..7e8392f9726 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -100,6 +100,8 @@ jobs: matrix: python-version: ['3.8', '3.10', '3.11'] fail-fast: false + env: + PYTHON_CMD: "python${{ matrix.python-version }}" steps: # Attempt to fix intermittent cloning errors. The error message says something like # error: RPC failed; curl 18 transfer closed with outstanding read data remaining @@ -113,16 +115,30 @@ jobs: name: Checkout the repository with: submodules: recursive + - name: Setup GH Python + uses: actions/setup-python@v4 + with: + python-version: 3.11 + if: matrix.python-version == '3.11' - name: Install Brew dependencies + run: brew install boost libomp + - name: Setup Homebrew Python + # This path should work for future Python versions as well + if: matrix.python-version != '3.11' run: | - brew install boost libomp scons python@${{ matrix.python-version }} + brew install python@${{ matrix.python-version }} + brew link --force --overwrite python@${{ matrix.python-version }} + brew install scons - name: Upgrade pip - run: python3 -m pip install -U pip 'setuptools>=47.0.0,<48' wheel + run: $PYTHON_CMD -m pip install -U pip 'setuptools>=47.0.0,<48' wheel - name: Install Python dependencies - run: python3 -m pip install ruamel.yaml numpy cython h5py pandas pytest - pytest-github-actions-annotate-failures + run: $PYTHON_CMD -m pip install ruamel.yaml numpy cython h5py pandas pytest pytest-github-actions-annotate-failures + - name: Install Python dependencies for GH Python + if: matrix.python-version == '3.11' + run: + $PYTHON_CMD -m pip install scons - name: Build Cantera - run: scons build env_vars=all -j3 debug=n --debug=time + run: scons build env_vars=all -j3 python_cmd=$PYTHON_CMD debug=n --debug=time - name: Upload shared library uses: actions/upload-artifact@v3 if: matrix.python-version == '3.10' From 7d86e2611224879d1666deb1a24de859b5175061 Mon Sep 17 00:00:00 2001 From: Ray Speth Date: Sun, 20 Nov 2022 11:08:06 -0500 Subject: [PATCH 32/93] [CI] Make h5py optional for some builds PyPI packages are not always available for the most recent Python version on all platforms. --- .github/workflows/main.yml | 14 ++++++++++---- 1 file changed, 10 insertions(+), 4 deletions(-) diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index 7e8392f9726..c712e5df529 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -46,8 +46,9 @@ jobs: - name: Upgrade pip run: python3 -m pip install -U pip setuptools wheel - name: Install Python dependencies - run: python3 -m pip install ruamel.yaml scons==3.1.2 numpy cython h5py pandas - pytest pytest-github-actions-annotate-failures + run: | + python3 -m pip install ruamel.yaml scons==3.1.2 numpy cython pandas pytest pytest-github-actions-annotate-failures + python3 -m pip install h5py || true - name: Build Cantera run: python3 `which scons` build env_vars=all -j2 debug=n --debug=time - name: Upload shared library @@ -132,7 +133,10 @@ jobs: - name: Upgrade pip run: $PYTHON_CMD -m pip install -U pip 'setuptools>=47.0.0,<48' wheel - name: Install Python dependencies - run: $PYTHON_CMD -m pip install ruamel.yaml numpy cython h5py pandas pytest pytest-github-actions-annotate-failures + # h5py is optional; may fail if no wheel is present for a given OS/Python version + run: | + $PYTHON_CMD -m pip install ruamel.yaml numpy cython pandas pytest pytest-github-actions-annotate-failures + $PYTHON_CMD -m pip install h5py || true - name: Install Python dependencies for GH Python if: matrix.python-version == '3.11' run: @@ -340,7 +344,9 @@ jobs: - name: Upgrade pip run: python3 -m pip install -U pip setuptools wheel - name: Install Python dependencies - run: python3 -m pip install ruamel.yaml scons numpy cython h5py pandas matplotlib scipy + run: | + python3 -m pip install ruamel.yaml scons numpy cython pandas matplotlib scipy + python3 -m pip install h5py || true - name: Build Cantera # compile with GCC 9.4.0 on ubuntu-20.04 as an alternative to the default # (GCC 7.5.0 is both default and oldest supported version) From 6042429d9f9f04daad63ca077587392004838329 Mon Sep 17 00:00:00 2001 From: ssun30 Date: Fri, 9 Dec 2022 20:02:28 -0500 Subject: [PATCH 33/93] Removed redundant error checks. --- src/clib/ct.cpp | 6 ------ 1 file changed, 6 deletions(-) diff --git a/src/clib/ct.cpp b/src/clib/ct.cpp index d9f133c7825..daf0f7591ce 100644 --- a/src/clib/ct.cpp +++ b/src/clib/ct.cpp @@ -100,9 +100,6 @@ extern "C" { int thermo_setDensity(int n, double rho) { - if (rho < 0.0) { - return -1; - } try { ThermoCabinet::item(n).setDensity(rho); } catch (...) { @@ -122,9 +119,6 @@ extern "C" { int thermo_setMolarDensity(int n, double ndens) { - if (ndens < 0.0) { - return -1; - } try { ThermoCabinet::item(n).setMolarDensity(ndens); } catch (...) { From 2ce92eac2e01e2f208094582d203531c13dc9e2e Mon Sep 17 00:00:00 2001 From: ssun30 Date: Wed, 14 Dec 2022 21:57:24 -0500 Subject: [PATCH 34/93] Fixed Matlab legacy examples and added missing blank lines. Added Su Sun to AUTHORS. --- AUTHORS | 1 + .../toolbox/@ThermoPhase/private/phase_get.m | 2 +- .../toolbox/@ThermoPhase/private/phase_set.m | 2 +- .../toolbox/@ThermoPhase/private/thermo_set.m | 2 +- samples/matlab/ignite_hp.m | 2 +- samples/matlab/ignite_uv.m | 2 +- samples/matlab/prandtl1.m | 2 +- samples/matlab/prandtl2.m | 2 +- samples/matlab/reactor1.m | 2 +- samples/matlab/reactor2.m | 2 +- samples/matlab/surfreactor.m | 6 +++--- samples/matlab/test_examples.m | 19 ++++++++++++++++++- 12 files changed, 31 insertions(+), 13 deletions(-) diff --git a/AUTHORS b/AUTHORS index 00a5705406d..e7078c2ddcd 100644 --- a/AUTHORS +++ b/AUTHORS @@ -64,3 +64,4 @@ Armin Wehrfritz (@awehrfritz) Richard West (@rwest), Northeastern University Chao Xu (@12Chao), Northeastern University Thorsten Zirwes (@g3bk47), Karlsruhe Institute of Technology +Su Sun (@ssun30), Northeastern University diff --git a/interfaces/matlab/toolbox/@ThermoPhase/private/phase_get.m b/interfaces/matlab/toolbox/@ThermoPhase/private/phase_get.m index 0a2d17e6d31..28acd9c2f41 100644 --- a/interfaces/matlab/toolbox/@ThermoPhase/private/phase_get.m +++ b/interfaces/matlab/toolbox/@ThermoPhase/private/phase_get.m @@ -5,4 +5,4 @@ i = ctmethods(30, n, job, a); else i = ctmethods(30, n, job, a, b); -end \ No newline at end of file +end diff --git a/interfaces/matlab/toolbox/@ThermoPhase/private/phase_set.m b/interfaces/matlab/toolbox/@ThermoPhase/private/phase_set.m index 7c5b9bb6029..186590936f1 100644 --- a/interfaces/matlab/toolbox/@ThermoPhase/private/phase_set.m +++ b/interfaces/matlab/toolbox/@ThermoPhase/private/phase_set.m @@ -5,4 +5,4 @@ function phase_set(n, job, a, b) ctmethods(30, n, -job, a); else ctmethods(30, n,-job, a, b); -end \ No newline at end of file +end diff --git a/interfaces/matlab/toolbox/@ThermoPhase/private/thermo_set.m b/interfaces/matlab/toolbox/@ThermoPhase/private/thermo_set.m index 6377c4bc057..b4ca7e2d862 100644 --- a/interfaces/matlab/toolbox/@ThermoPhase/private/thermo_set.m +++ b/interfaces/matlab/toolbox/@ThermoPhase/private/thermo_set.m @@ -13,4 +13,4 @@ i = ctmethods(20, n, -job, a, b, c, d, e); elseif nargin == 8 i = ctmethods(20, n, -job, a, b, c, d, e, f); -end \ No newline at end of file +end diff --git a/samples/matlab/ignite_hp.m b/samples/matlab/ignite_hp.m index 506cd3ae6c3..e8c9b1d97b0 100644 --- a/samples/matlab/ignite_hp.m +++ b/samples/matlab/ignite_hp.m @@ -7,7 +7,7 @@ function ignite_hp(gas) help ignite_hp if nargin == 0 - gas = Solution('h2o2.yaml', 'gas', 'None'); + gas = Solution('h2o2.yaml', 'ohmech', 'None'); end mw = molecularWeights(gas); diff --git a/samples/matlab/ignite_uv.m b/samples/matlab/ignite_uv.m index 2a33313e1ea..c1d718add60 100644 --- a/samples/matlab/ignite_uv.m +++ b/samples/matlab/ignite_uv.m @@ -7,7 +7,7 @@ function ignite_uv(gas) help ignite_uv if nargin == 0 - gas = Solution('h2o2.yaml', 'gas', 'None'); + gas = Solution('h2o2.yaml', 'ohmech', 'None'); end mw = molecularWeights(gas); diff --git a/samples/matlab/prandtl1.m b/samples/matlab/prandtl1.m index a4be4a49e64..3aadcd85b95 100644 --- a/samples/matlab/prandtl1.m +++ b/samples/matlab/prandtl1.m @@ -12,7 +12,7 @@ function prandtl1(g) if nargin == 1 gas = g; else - gas = Solution('h2o2.yaml', 'gas', 'Mix'); + gas = Solution('h2o2.yaml', 'ohmech', 'Mix'); end pr = zeros(31,31); diff --git a/samples/matlab/prandtl2.m b/samples/matlab/prandtl2.m index 8b119286277..e8b8dda2c3c 100644 --- a/samples/matlab/prandtl2.m +++ b/samples/matlab/prandtl2.m @@ -11,7 +11,7 @@ function prandtl2(g) if nargin == 1 gas = g; else - gas = Solution('h2o2.yaml', 'gas', 'Multi'); + gas = Solution('h2o2.yaml', 'ohmech', 'Multi'); end pr = zeros(31,31); diff --git a/samples/matlab/reactor1.m b/samples/matlab/reactor1.m index 4d11aa8c7db..035be3f5cd8 100644 --- a/samples/matlab/reactor1.m +++ b/samples/matlab/reactor1.m @@ -16,7 +16,7 @@ function reactor1(g) if nargin == 1 gas = g; else - gas = Solution('h2o2.yaml', 'gas', 'None'); + gas = Solution('h2o2.yaml', 'ohmech', 'None'); end P = oneatm; diff --git a/samples/matlab/reactor2.m b/samples/matlab/reactor2.m index 04277145e42..6c2c863625a 100644 --- a/samples/matlab/reactor2.m +++ b/samples/matlab/reactor2.m @@ -12,7 +12,7 @@ function reactor2(g) if nargin == 1 gas = g; else - gas = Solution('h2o2.yaml', 'gas', 'None'); + gas = Solution('h2o2.yaml', 'ohmech', 'None'); end % set the initial conditions diff --git a/samples/matlab/surfreactor.m b/samples/matlab/surfreactor.m index 64cfcb0fcd8..36dce2d379d 100644 --- a/samples/matlab/surfreactor.m +++ b/samples/matlab/surfreactor.m @@ -57,9 +57,9 @@ p0 = pressure(r); names = {'CH4','CO','CO2','H2O'}; x = zeros([nSteps 4]); -tim = zeros(nSteps); -temp = zeros(nSteps); -pres = zeros(nSteps); +tim = zeros(nSteps, 1); +temp = zeros(nSteps, 1); +pres = zeros(nSteps, 1); cov = zeros([nSteps nSurfSp]); t = 0; dt = 0.1; diff --git a/samples/matlab/test_examples.m b/samples/matlab/test_examples.m index b1c6bf75317..60a0105e6d1 100644 --- a/samples/matlab/test_examples.m +++ b/samples/matlab/test_examples.m @@ -1,12 +1,29 @@ % runs selected examples without pausing + +clear all +close all +cleanup + equil(); isentropic(); reactor1(); reactor2(); surfreactor; periodic_cstr; +Plug_Flow_Reactor; +lithium_ion_battery rankine(300.0, 2.0*oneatm, 0.8, 0.7); prandtl1(); +prandtl2(); flame1; +flame2; catcomb; -exit; +clear all +close all +diffflame; +ignite_hp; +ignite_uv; + +clear all +close all +cleanup From 957dd2414bf5a6a0cd0325036ec3bb5382c8cfce Mon Sep 17 00:00:00 2001 From: Ingmar Schoegl Date: Sat, 31 Dec 2022 12:33:32 +0100 Subject: [PATCH 35/93] [yaml2ck] Fix explicit third body reaction output Prior to this fix, third body efficiencies were erroneously specified for third body reactions with explicit collision partners (see #1415). --- interfaces/cython/cantera/yaml2ck.py | 3 +- test/data/explicit-third-bodies.inp | 3 ++ test/data/explicit-third-bodies.xml | 42 ++++++++++++++++++---------- test/data/explicit-third-bodies.yaml | 3 ++ test/python/test_convert.py | 11 ++++++-- 5 files changed, 45 insertions(+), 17 deletions(-) diff --git a/interfaces/cython/cantera/yaml2ck.py b/interfaces/cython/cantera/yaml2ck.py index 1e8327da444..5b7310dae3a 100644 --- a/interfaces/cython/cantera/yaml2ck.py +++ b/interfaces/cython/cantera/yaml2ck.py @@ -472,7 +472,8 @@ def build_reactions_text(reactions: Iterable[ct.Reaction]): else: raise ValueError(f"Unknown reaction type: '{reac.reaction_type}'") - if reac.third_body is not None: + third = reac.third_body + if third is not None and third.name == "M" and len(third.efficiencies): reaction_lines.append( " ".join( f"{spec}/{value:.3E}/" diff --git a/test/data/explicit-third-bodies.inp b/test/data/explicit-third-bodies.inp index eed21f0d8f7..7c6bcaf9573 100644 --- a/test/data/explicit-third-bodies.inp +++ b/test/data/explicit-third-bodies.inp @@ -21,6 +21,9 @@ REACTIONS R1A+R1B+m = P1+H+M 3.0E19 -2.0 1900 ! An end of line comment DUPLICATE +R1A+R1B+R2 = P1+H+R2 3.0E19 -2.0 1900 ! An end of line comment + DUPLICATE + R1A+R1B(+ M ) = P1+H(+m) 1.0E18 -2.0 1000 LOW/4.0E25 -3.0 0/ R2/0.0/ SP)X/0/ diff --git a/test/data/explicit-third-bodies.xml b/test/data/explicit-third-bodies.xml index ed91bf36641..6a91167c508 100644 --- a/test/data/explicit-third-bodies.xml +++ b/test/data/explicit-third-bodies.xml @@ -6,7 +6,7 @@ H C Ar - H R1A R1B P1 R2 + H R1A R1B P1 R2 SP)X @@ -27,12 +27,12 @@ - 2.500000000E+00, 7.053328190E-13, -1.995919640E-15, 2.300816320E-18, + 2.500000000E+00, 7.053328190E-13, -1.995919640E-15, 2.300816320E-18, -9.277323320E-22, 2.547365990E+04, -4.466828530E-01 - 2.500000010E+00, -2.308429730E-11, 1.615619480E-14, -4.735152350E-18, + 2.500000010E+00, -2.308429730E-11, 1.615619480E-14, -4.735152350E-18, 4.981973570E-22, 2.547365990E+04, -4.466829140E-01 @@ -44,12 +44,12 @@ - 5.149876130E+00, -1.367097880E-02, 4.918005990E-05, -4.847430260E-08, + 5.149876130E+00, -1.367097880E-02, 4.918005990E-05, -4.847430260E-08, 1.666939560E-11, -1.024664760E+04, -4.641303760E+00 - 7.485149500E-02, 1.339094670E-02, -5.732858090E-06, 1.222925350E-09, + 7.485149500E-02, 1.339094670E-02, -5.732858090E-06, 1.222925350E-09, -1.018152300E-13, -9.468344590E+03, 1.843731800E+01 @@ -61,12 +61,12 @@ - 5.149876130E+00, -1.367097880E-02, 4.918005990E-05, -4.847430260E-08, + 5.149876130E+00, -1.367097880E-02, 4.918005990E-05, -4.847430260E-08, 1.666939560E-11, -1.024664760E+04, -4.641303760E+00 - 7.485149500E-02, 1.339094670E-02, -5.732858090E-06, 1.222925350E-09, + 7.485149500E-02, 1.339094670E-02, -5.732858090E-06, 1.222925350E-09, -1.018152300E-13, -9.468344590E+03, 1.843731800E+01 @@ -78,12 +78,12 @@ - 5.149876130E+00, -1.367097880E-02, 4.918005990E-05, -4.847430260E-08, + 5.149876130E+00, -1.367097880E-02, 4.918005990E-05, -4.847430260E-08, 1.666939560E-11, -1.024664760E+04, -4.641303760E+00 - 7.485149500E-02, 1.339094670E-02, -5.732858090E-06, 1.222925350E-09, + 7.485149500E-02, 1.339094670E-02, -5.732858090E-06, 1.222925350E-09, -1.018152300E-13, -9.468344590E+03, 1.843731800E+01 @@ -95,12 +95,12 @@ - 5.149876130E+00, -1.367097880E-02, 4.918005990E-05, -4.847430260E-08, + 5.149876130E+00, -1.367097880E-02, 4.918005990E-05, -4.847430260E-08, 1.666939560E-11, -1.024664760E+04, -4.641303760E+00 - 7.485149500E-02, 1.339094670E-02, -5.732858090E-06, 1.222925350E-09, + 7.485149500E-02, 1.339094670E-02, -5.732858090E-06, 1.222925350E-09, -1.018152300E-13, -9.468344590E+03, 1.843731800E+01 @@ -112,12 +112,12 @@ - 5.149876130E+00, -1.367097880E-02, 4.918005990E-05, -4.847430260E-08, + 5.149876130E+00, -1.367097880E-02, 4.918005990E-05, -4.847430260E-08, 1.666939560E-11, -1.024664760E+04, -4.641303760E+00 - 7.485149500E-02, 1.339094670E-02, -5.732858090E-06, 1.222925350E-09, + 7.485149500E-02, 1.339094670E-02, -5.732858090E-06, 1.222925350E-09, -1.018152300E-13, -9.468344590E+03, 1.843731800E+01 @@ -125,7 +125,7 @@ - + R1A + R1B + M [=] P1 + H + M @@ -139,6 +139,20 @@ H:1 P1:1.0 + + + R1A + R1B + R2 [=] P1 + H + R2 + + + 3.000000E+13 + -2.0 + 1900.000000 + + + R1B:1 R1A:1.0 + H:1 P1:1.0 + + R1A + R1B (+ M) [=] P1 + H (+ M) diff --git a/test/data/explicit-third-bodies.yaml b/test/data/explicit-third-bodies.yaml index 0115a3f8a80..0fe8e3b433a 100644 --- a/test/data/explicit-third-bodies.yaml +++ b/test/data/explicit-third-bodies.yaml @@ -79,6 +79,9 @@ reactions: type: three-body rate-constant: {A: 3.0e+13, b: -2.0, Ea: 1900.0 cal/mol} duplicate: true +- equation: R1A + R1B + R2 <=> P1 + H + R2 + rate-constant: {A: 3.0e+13, b: -2.0, Ea: 1900.0 cal/mol} + duplicate: true - equation: R1A + R1B (+ M) <=> P1 + H (+ M) type: falloff high-P-rate-constant: {A: 1.0e+15, b: -2.0, Ea: 1000.0 cal/mol} diff --git a/test/python/test_convert.py b/test/python/test_convert.py index 06cdfb22179..9f871fb85e8 100644 --- a/test/python/test_convert.py +++ b/test/python/test_convert.py @@ -559,7 +559,7 @@ def convert( thermo: str | Path | None = None, transport: str | Path | None = None, permissive: bool = False, - ) -> None: + ) -> str: if mech is not None: mech, thermo, transport = self._convert_to_ck( input_file, @@ -578,6 +578,7 @@ def convert( quiet=True, permissive=permissive, ) + return mech def check_conversion(self, basename, cls=ct.Solution, **kwargs): # The round-trip YAML->CK->YAML will always have the single phase name 'gas' @@ -689,7 +690,13 @@ def test_phase_id(self): def test_third_body_reactions(self): input_file = self.test_data_path / "explicit-third-bodies.yaml" - self.convert(input_file) + mech = self.convert(input_file) + with open(mech) as fid: + lines = fid.readlines() + for i, line in enumerate(lines): + if line.startswith("R1A + R1B"): + next = lines[i + 1] + assert next.startswith("LOW") or next.strip() == "DUPLICATE" ck_phase, yaml_phase = self.check_conversion(input_file) self.check_kinetics( ck_phase, yaml_phase, [300, 800, 1450, 2800], [5e3, 1e5, 2e6] From a5327f37ca35d83a2ada34d67d629ba30de51a37 Mon Sep 17 00:00:00 2001 From: Ingmar Schoegl Date: Wed, 27 Jul 2022 10:14:13 -0500 Subject: [PATCH 36/93] Add basic C++ HighFive HDF infrastructure --- SConstruct | 7 +++++++ include/cantera/base/config.h.in | 7 +++++-- include/cantera/base/global.h | 3 +++ src/base/global.cpp | 9 +++++++++ src/oneD/Sim1D.cpp | 8 ++++++++ 5 files changed, 32 insertions(+), 2 deletions(-) diff --git a/SConstruct b/SConstruct index d408863ed0c..f28dc703240 100644 --- a/SConstruct +++ b/SConstruct @@ -1497,6 +1497,9 @@ else: # env['system_sundials'] == 'n' env['sundials_version'] = '5.3' env['has_sundials_lapack'] = int(env['use_lapack']) +env["has_highfive"] = conf.CheckLibWithHeader( + "hdf5", "highfive/H5File.hpp", language="C++", autoadd=False) + def set_fortran(pattern, value): # Set compiler / flags for all Fortran versions to be the same for version in ("FORTRAN", "F77", "F90", "F95", "F03", "F08"): @@ -2029,6 +2032,7 @@ cdefine('CT_USE_SYSTEM_FMT', 'system_fmt') cdefine('CT_USE_SYSTEM_YAMLCPP', 'system_yamlcpp') cdefine('CT_USE_DEMANGLE', 'has_demangle') cdefine('CT_HAS_PYTHON', 'python_package', 'full') +cdefine("CT_USE_HIGHFIVE_HDF", "has_highfive") config_h_build = env.Command('build/src/config.h.build', 'include/cantera/base/config.h.in', @@ -2112,6 +2116,9 @@ else: env["external_libs"] = [] env["external_libs"].extend(env["sundials_libs"]) +if env["has_highfive"]: + env["external_libs"].append("hdf5") + if env["system_fmt"]: env["external_libs"].append("fmt") diff --git a/include/cantera/base/config.h.in b/include/cantera/base/config.h.in index 00471f50251..2a28e45c9e0 100644 --- a/include/cantera/base/config.h.in +++ b/include/cantera/base/config.h.in @@ -62,8 +62,11 @@ typedef int ftnlen; // Fortran hidden string length type //-------------- Optional Cantera Capabilities ---------------------- -// Enable Sundials to use an external BLAS/LAPACK library if it was -// built to use this option +// Enable Sundials to use an external BLAS/LAPACK library if it was +// built to use this option {CT_SUNDIALS_USE_LAPACK!s} +// Enable export/import of HDF data via C++ HighFive +{CT_USE_HIGHFIVE_HDF!s} + #endif diff --git a/include/cantera/base/global.h b/include/cantera/base/global.h index 914f49a5a92..f4459f848dd 100644 --- a/include/cantera/base/global.h +++ b/include/cantera/base/global.h @@ -105,6 +105,9 @@ std::string gitCommit(); //! preprocessor macro is defined. bool debugModeEnabled(); +//! Returns true if Cantera was compiled with C++ HighFive HDF support. +bool usesHighFive(); + /*! * @defgroup logs Diagnostic Output * diff --git a/src/base/global.cpp b/src/base/global.cpp index 1ac3a2b2ec6..0f64d08478c 100644 --- a/src/base/global.cpp +++ b/src/base/global.cpp @@ -170,6 +170,15 @@ bool debugModeEnabled() #endif } +bool usesHighFive() +{ +#if CT_USE_HIGHFIVE_HDF + return true; +#else + return false; +#endif +} + std::vector FactoryBase::s_vFactoryRegistry; std::string demangle(const std::type_info& type) diff --git a/src/oneD/Sim1D.cpp b/src/oneD/Sim1D.cpp index b0c6f472784..e7f94b1ff05 100644 --- a/src/oneD/Sim1D.cpp +++ b/src/oneD/Sim1D.cpp @@ -159,6 +159,14 @@ void Sim1D::restore(const std::string& fname, const std::string& id, if (extension == "xml") { throw CanteraError("Sim1D::restore", "Restoring from XML is no longer supported."); + } else if (extension == "h5" || extension == "hdf") { +#if CT_USE_HIGHFIVE_HDF + throw CanteraError("Sim1D::restore", + "Not yet implemented."); +#else + throw CanteraError("Sim1D::restore", + "Restoring from HDF requires HighFive installation."); +#endif } AnyMap root = AnyMap::fromYamlFile(fname); if (!root.hasKey(id)) { From 65502383374b3622c2f6b8e41e8f1cdcf1eb1498 Mon Sep 17 00:00:00 2001 From: Ingmar Schoegl Date: Wed, 27 Jul 2022 21:18:53 -0500 Subject: [PATCH 37/93] [CI] Add highfive HDF to workflows --- .github/workflows/main.yml | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index c712e5df529..531e08d8906 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -401,7 +401,8 @@ jobs: # use boost-cpp rather than boost from conda-forge run: | conda install -q sundials=${{ matrix.sundials-ver }} scons numpy ruamel.yaml \ - cython boost-cpp fmt eigen yaml-cpp h5py pandas libgomp openblas pytest + cython boost-cpp fmt eigen yaml-cpp h5py pandas libgomp openblas pytest \ + highfive - name: Build Cantera run: | scons build system_fmt=y system_eigen=y system_yamlcpp=y system_sundials=y \ @@ -463,7 +464,7 @@ jobs: # use boost-cpp rather than boost from conda-forge # Install SCons >=4.4.0 to make sure that MSVC_TOOLSET_VERSION variable is present run: | - mamba install -q '"scons>=4.4.0"' numpy cython ruamel.yaml boost-cpp eigen yaml-cpp h5py pandas pytest + mamba install -q '"scons>=4.4.0"' numpy cython ruamel.yaml boost-cpp eigen yaml-cpp h5py pandas pytest highfive shell: pwsh - name: Build Cantera run: scons build system_eigen=y system_yamlcpp=y logging=debug From f51fb653c87e28ef66da8842eed3560f26c41a73 Mon Sep 17 00:00:00 2001 From: Ingmar Schoegl Date: Wed, 27 Jul 2022 15:36:43 -0500 Subject: [PATCH 38/93] [OneD] Check groups and data size in HDF import file --- src/oneD/Sim1D.cpp | 80 +++++++++++++++++++++++++++++++++------------- 1 file changed, 58 insertions(+), 22 deletions(-) diff --git a/src/oneD/Sim1D.cpp b/src/oneD/Sim1D.cpp index e7f94b1ff05..756b2a3591f 100644 --- a/src/oneD/Sim1D.cpp +++ b/src/oneD/Sim1D.cpp @@ -18,6 +18,14 @@ using namespace std; +#if CT_USE_HIGHFIVE_HDF +#include +#include +#include + +namespace h5 = HighFive; +#endif + namespace Cantera { @@ -161,34 +169,62 @@ void Sim1D::restore(const std::string& fname, const std::string& id, "Restoring from XML is no longer supported."); } else if (extension == "h5" || extension == "hdf") { #if CT_USE_HIGHFIVE_HDF - throw CanteraError("Sim1D::restore", - "Not yet implemented."); + h5::File file(fname, h5::File::ReadOnly); + if (!file.exist(id) || file.getObjectType(id) != h5::ObjectType::Group) { + throw CanteraError("Sim1D::restore", + "No solution with id '{}'", id); + } + h5::Group grp = file.getGroup(id); + for (auto dom : m_dom) { + std::string dn = dom->id(); + if (!grp.exist(dn)|| grp.getObjectType(dn) != h5::ObjectType::Group) { + throw CanteraError("Sim1D::restore", + "Saved state '{}' does not contain a domain named '{}'.", id, dn); + } + size_t points; + try { + // determine size based on stored temperature + points = grp.getGroup(dn).getDataSet("T").getElementCount(); + } catch (exception& err) { + throw CanteraError("Sim1D::restore", + "Unable to determine domain size:\n{}", err.what()); + } + dom->resize(dom->nComponents(), points); + } + resize(); + m_xlast_ts.clear(); + + throw CanteraError("Sim1D::restore", "Work in progress."); #else throw CanteraError("Sim1D::restore", "Restoring from HDF requires HighFive installation."); #endif - } - AnyMap root = AnyMap::fromYamlFile(fname); - if (!root.hasKey(id)) { - throw InputFileError("Sim1D::restore", root, - "No solution with id '{}'", id); - } - const auto& state = root[id]; - for (auto dom : m_dom) { - if (!state.hasKey(dom->id())) { - throw InputFileError("Sim1D::restore", state, - "Saved state '{}' does not contain a domain named '{}'.", - id, dom->id()); + } else if (extension == "yaml" || extension == "yml") { + AnyMap root = AnyMap::fromYamlFile(fname); + if (!root.hasKey(id)) { + throw InputFileError("Sim1D::restore", root, + "No solution with id '{}'", id); } - dom->resize(dom->nComponents(), state[dom->id()]["points"].asInt()); - } - resize(); - m_xlast_ts.clear(); - for (auto dom : m_dom) { - dom->restore(state[dom->id()].as(), m_x.data() + dom->loc(), - loglevel); + const auto& state = root[id]; + for (auto dom : m_dom) { + if (!state.hasKey(dom->id())) { + throw InputFileError("Sim1D::restore", state, + "Saved state '{}' does not contain a domain named '{}'.", + id, dom->id()); + } + dom->resize(dom->nComponents(), state[dom->id()]["points"].asInt()); + } + resize(); + m_xlast_ts.clear(); + for (auto dom : m_dom) { + dom->restore(state[dom->id()].as(), m_x.data() + dom->loc(), + loglevel); + } + finalize(); + } else { + throw CanteraError("Sim1D::restore", + "Unknown file extension '{}'", extension); } - finalize(); } void Sim1D::setFlatProfile(size_t dom, size_t comp, doublereal v) From 23c2ada86270f1141abe1baea6e8423d5b8d3fdf Mon Sep 17 00:00:00 2001 From: Ingmar Schoegl Date: Thu, 28 Jul 2022 18:41:04 -0500 Subject: [PATCH 39/93] [base] Add tokenizePath utility --- include/cantera/base/stringUtils.h | 14 ++++++++++++++ src/base/stringUtils.cpp | 7 +++++++ 2 files changed, 21 insertions(+) diff --git a/include/cantera/base/stringUtils.h b/include/cantera/base/stringUtils.h index 726ae654d10..ced0f648b08 100644 --- a/include/cantera/base/stringUtils.h +++ b/include/cantera/base/stringUtils.h @@ -102,6 +102,20 @@ doublereal fpValueCheck(const std::string& val); void tokenizeString(const std::string& oval, std::vector& v); +//! This function separates a string up into tokens according to the location of +//! path separators. +/*! + * White space includes the new line character. tokens are stripped of leading + * and trailing white space. + * + * The separate tokens are returned in a string vector, v. + * + * @param oval String to be broken up + * @param v Output vector of tokens. + */ +void tokenizePath(const std::string& oval, + std::vector& v); + //! Copy the contents of a std::string into a char array of a given length /*! * If *length* is less than the size of *source*, the string will be truncated diff --git a/src/base/stringUtils.cpp b/src/base/stringUtils.cpp index dd15e33a926..e9fa8f35425 100644 --- a/src/base/stringUtils.cpp +++ b/src/base/stringUtils.cpp @@ -195,6 +195,13 @@ void tokenizeString(const std::string& in_val, std::vector& v) ba::split(v, val, ba::is_space(), ba::token_compress_on); } +void tokenizePath(const std::string& in_val, std::vector& v) +{ + std::string val = ba::trim_copy(in_val); + v.clear(); + ba::split(v, val, ba::is_any_of("/\\:"), ba::token_compress_on); +} + size_t copyString(const std::string& source, char* dest, size_t length) { const char* c_src = source.c_str(); From d8cbc41b05b41d3eda50a14b05fe8cc144b8a2ea Mon Sep 17 00:00:00 2001 From: Ingmar Schoegl Date: Thu, 28 Jul 2022 11:34:55 -0500 Subject: [PATCH 40/93] [base] Add initial C++ SolutionArray implementation --- include/cantera/base/SolutionArray.h | 134 ++++++++++++++++++ src/base/SolutionArray.cpp | 201 +++++++++++++++++++++++++++ 2 files changed, 335 insertions(+) create mode 100644 include/cantera/base/SolutionArray.h create mode 100644 src/base/SolutionArray.cpp diff --git a/include/cantera/base/SolutionArray.h b/include/cantera/base/SolutionArray.h new file mode 100644 index 00000000000..c2a3aaa76c2 --- /dev/null +++ b/include/cantera/base/SolutionArray.h @@ -0,0 +1,134 @@ +//! @file SolutionArray.h + +// This file is part of Cantera. See License.txt in the top-level directory or +// at https://cantera.org/license.txt for license and copyright information. + +#ifndef CT_SOLUTIONARRAY_H +#define CT_SOLUTIONARRAY_H + +#include "cantera/base/global.h" +#include "cantera/base/AnyMap.h" + +#if CT_USE_HIGHFIVE_HDF +namespace HighFive +{ + class File; +} +#endif + +namespace Cantera +{ + +class Solution; + +/** + * A container class providing a convenient interface for representing many + * thermodynamic states using the same Solution object. C++ SolutionArray objects are + * one-dimensional by design; extensions to multi-dimensional arrays need to be + * implemented in high-level API's. + */ +class SolutionArray +{ +private: + SolutionArray(const shared_ptr& sol, + size_t size, + const AnyMap& meta); + +public: + virtual ~SolutionArray() {} + + static shared_ptr create(const shared_ptr& sol, + size_t size=0, + const AnyMap& meta={}) + { + return shared_ptr( + new SolutionArray(sol, size, meta)); + } + + /** + * Initialize SolutionArray with independent memory management + * + * @param extra Names of auxiliary data + */ + void initialize(const std::vector& extra={}); + + /** + * Initialize SolutionArray object with mapped memory + * + * @param data Pointer to mapped memory address + * @param size Number of entries in SolutionArray + * @param stride An integer indicating the stride between entries + * @param offsets A vector of pairs containing offsets within the mapped memory + */ + void initialize(double* data, + size_t size, + size_t stride, + const std::vector>& offsets); + + /** + * Size of SolutionArray (number of entries) + */ + int size() const { + return m_size; + } + + /** + * Save the current SolutionArray to a container file. + * + * @param fname Name of output container file + * @param id Identifier of SolutionArray within the container file + */ + void save(const std::string& fname, const std::string& id); + + /** + * Restore SolutionArray from a container file. + * + * @param fname Name of container file + * @param id Identifier of SolutionArray within the container file + */ + void restore(const std::string& fname, const std::string& id); + + void restore(const AnyMap& root, const std::string& id); + +#if CT_USE_HIGHFIVE_HDF + void restore(const HighFive::File& file, const std::string& id); +#endif + +protected: + shared_ptr m_sol; //!< Solution object associated with state data + size_t m_size; //!< Number of entries in SolutionArray + size_t m_stride; //!< Stride between SolutionArray entries + AnyMap m_meta; //!< Metadata + bool m_managed = false; //!< Flag indicating whether memory is externally managed + + shared_ptr m_work; //!< Work vector holding states (if not managed) + double* m_data; //!< Memory location holding state information (may be augmented) + std::map> m_other; //!< Auxiliary data + std::map m_offsets; //!< Map of offsets in state vector + std::map m_extra; //!< Map of offsets in auxiliary data +}; + + +// /** +// * Create a SolutionArray object with independent memory management +// * +// * @param sol The Solution object associated with state information +// * @param max_size Expected maximum number of entries in SolutionArray +// * @param extra A vector of additional entries +// * @param meta Metadata +// * @return shared_ptr +// */ +// shared_ptr newSolutionArray( +// const shared_ptr& sol, +// size_t max_size, +// const std::vector& extra={}, +// const AnyMap& meta={}) +// { +// shared_ptr arr = SolutionArray::create(sol, max_size, meta); +// arr->initialize(extra); +// return arr; +// } + +} + +#endif diff --git a/src/base/SolutionArray.cpp b/src/base/SolutionArray.cpp new file mode 100644 index 00000000000..cfa7c620b14 --- /dev/null +++ b/src/base/SolutionArray.cpp @@ -0,0 +1,201 @@ +/** + * @file SolutionArray.cpp + * Definition file for class SolutionArray. + */ + +// This file is part of Cantera. See License.txt in the top-level directory or +// at https://cantera.org/license.txt for license and copyright information. + +#include "cantera/base/SolutionArray.h" +#include "cantera/base/Solution.h" +#include "cantera/base/stringUtils.h" +#include "cantera/thermo/ThermoPhase.h" + +#if CT_USE_HIGHFIVE_HDF +#include +#include +#include +#include + +namespace h5 = HighFive; +#endif + +namespace Cantera +{ + +SolutionArray::SolutionArray( + const shared_ptr& sol, + size_t size, + const AnyMap& meta) + : m_sol(sol) + , m_size(size) + , m_meta(meta) +{ + if (!m_sol) { + throw CanteraError("SolutionArray::SolutionArray", + "Unable to create SolutionArray from invalid Solution object."); + } +} + +void SolutionArray::initialize(const std::vector& extra) +{ + size_t count = 0; + for (auto& key : extra) { + m_extra.emplace(key, count); + count++; + } + + m_offsets = m_sol->thermo()->nativeState(); + m_stride = m_sol->thermo()->stateSize(); + m_work.reset(new vector_fp(m_size * m_stride)); + m_data = m_work->data(); + m_managed = false; + for (auto& key : extra) { + m_other.emplace(key, std::make_shared(m_size)); + } +} + +void SolutionArray::initialize( + double* data, + size_t size, + size_t stride, + const std::vector>& offsets) +{ + // check that offsets match order of native thermodynamic state properties + std::map flipped; + for (const auto& item : m_sol->thermo()->nativeState()) { + // flipped map will be sorted by native property offset within state + flipped.emplace(item.second, item.first); + } + std::map mapped; // searchable offset map + for (const auto& item : offsets) { + mapped.emplace(item.first, (int)(item.second)); + } + std::string key0 = flipped.at(0); + for (auto& prop : flipped) { + if (!mapped.count(prop.second)) { + throw CanteraError("SolutionArray::initialize", + "Native property '{}' not found in offset mapping.", prop.second); + } + int diffOffset = mapped.at(prop.second) - mapped.at(key0); + if (diffOffset != (int)(prop.first)) { + throw CanteraError("SolutionArray::initialize", + "Offset for property '{}' is incompatible with order of native state " + "properties", prop.second); + } + } + + // assign managed memory + m_work.reset(); + m_data = data; + m_managed = true; + m_size = size; + m_stride = stride; + + size_t count = 0; + for (auto& item : offsets) { + auto& key = item.first; + if (item.second != npos) { + m_offsets[key] = item.second; + } else { + m_other.emplace(key, std::make_shared(m_size)); + m_extra.emplace(key, count); + count++; + } + } +} + +void SolutionArray::save(const std::string& fname, const std::string& id) +{ + throw CanteraError("SolutionArray::save", "Not implemented."); +} + +void SolutionArray::restore(const std::string& fname, const std::string& id) +{ + size_t dot = fname.find_last_of("."); + std::string extension = (dot != npos) ? toLowerCopy(fname.substr(dot + 1)) : ""; + if (extension == "h5" || extension == "hdf") { +#if CT_USE_HIGHFIVE_HDF + restore(h5::File(fname, h5::File::ReadOnly), id); +#else + throw CanteraError("SolutionArray::restore", + "Restoring from HDF requires HighFive installation."); +#endif + } else if (extension == "yaml" || extension == "yml") { + restore(AnyMap::fromYamlFile(fname), id); + } else { + throw CanteraError("SolutionArray::restore", + "Unknown file extension '{}'", extension); + } +} + +#if CT_USE_HIGHFIVE_HDF +void SolutionArray::restore(const h5::File& file, const std::string& id) +{ + std::vector tokens; + tokenizePath(id, tokens); + std::string grp = tokens[0]; + if (!file.exist(grp) || file.getObjectType(grp) != h5::ObjectType::Group) { + throw CanteraError("SolutionArray::restore", + "No group or solution with id '{}'", grp); + } + + std::string path = grp; + h5::Group sub = file.getGroup(grp); + tokens.erase(tokens.begin()); + for (auto& grp : tokens) { + path += "/" + grp; + if (!sub.exist(grp) || sub.getObjectType(grp) != h5::ObjectType::Group) { + throw CanteraError("SolutionArray::restore", + "No group or solution with id '{}'", path); + } + sub = sub.getGroup(grp); + } + + std::vector names; + size_t nDims = npos; + for (auto& name : sub.listObjectNames()) { + if (sub.getObjectType(name) == h5::ObjectType::Dataset) { + h5::DataSpace space = sub.getDataSet(name).getSpace(); + names.push_back(name); + if (space.getNumberDimensions() < nDims) { + nDims = space.getNumberDimensions(); + m_size = space.getElementCount(); + } + } + } + + // @todo: restore data +} +#endif + +void SolutionArray::restore(const AnyMap& root, const std::string& id) +{ + std::vector tokens; + tokenizePath(id, tokens); + std::string field = tokens[0]; + if (!root.hasKey(field) || !root[field].is()) { + throw InputFileError("SolutionArray::restore", root, + "No field or solution with id '{}'", field); + } + + const AnyMap* ptr = &root[field].as(); // use raw pointer to avoid copying + std::string path = field; + tokens.erase(tokens.begin()); + for (auto& field : tokens) { + path += "/" + field; + const AnyMap& sub = *ptr; + if (!sub.hasKey(field) || !sub[field].is()) { + throw CanteraError("SolutionArray::restore", + "No field or solution with id '{}'", path); + } + ptr = &sub[field].as(); // AnyMap lacks 'operator=' for const AnyMap + } + + const AnyMap& sub = *ptr; + m_size = sub.getInt("points", 1); + + // @todo: restore data +} + +} From 74cba84658e4685739487f9471bdaabcfbc2eb67 Mon Sep 17 00:00:00 2001 From: Ingmar Schoegl Date: Mon, 5 Sep 2022 08:06:55 -0500 Subject: [PATCH 41/93] [base] Implement import of SolutionArray from yaml --- src/base/SolutionArray.cpp | 147 +++++++++++++++++++++++++++++++++---- 1 file changed, 133 insertions(+), 14 deletions(-) diff --git a/src/base/SolutionArray.cpp b/src/base/SolutionArray.cpp index cfa7c620b14..e8c57fc782d 100644 --- a/src/base/SolutionArray.cpp +++ b/src/base/SolutionArray.cpp @@ -10,6 +10,8 @@ #include "cantera/base/Solution.h" #include "cantera/base/stringUtils.h" #include "cantera/thermo/ThermoPhase.h" +#include "cantera/thermo/SurfPhase.h" +#include #if CT_USE_HIGHFIVE_HDF #include @@ -47,7 +49,7 @@ void SolutionArray::initialize(const std::vector& extra) m_offsets = m_sol->thermo()->nativeState(); m_stride = m_sol->thermo()->stateSize(); - m_work.reset(new vector_fp(m_size * m_stride)); + m_work.reset(new vector_fp(m_size * m_stride, 0.)); m_data = m_work->data(); m_managed = false; for (auto& key : extra) { @@ -171,31 +173,148 @@ void SolutionArray::restore(const h5::File& file, const std::string& id) void SolutionArray::restore(const AnyMap& root, const std::string& id) { + // locate SolutionArray based on 'id' std::vector tokens; tokenizePath(id, tokens); - std::string field = tokens[0]; - if (!root.hasKey(field) || !root[field].is()) { - throw InputFileError("SolutionArray::restore", root, - "No field or solution with id '{}'", field); - } - - const AnyMap* ptr = &root[field].as(); // use raw pointer to avoid copying - std::string path = field; - tokens.erase(tokens.begin()); + const AnyMap* ptr = &root; // use raw pointer to avoid copying + std::string path = ""; for (auto& field : tokens) { path += "/" + field; const AnyMap& sub = *ptr; if (!sub.hasKey(field) || !sub[field].is()) { throw CanteraError("SolutionArray::restore", - "No field or solution with id '{}'", path); + "No field or solution with id '{}'", path); } ptr = &sub[field].as(); // AnyMap lacks 'operator=' for const AnyMap } - const AnyMap& sub = *ptr; - m_size = sub.getInt("points", 1); - // @todo: restore data + // set size and initialize + m_size = sub.getInt("points", 0); + if (!sub.hasKey("T") && !sub.hasKey("temperature")) { + // overwrite size - Sim1D erroneously assigns '1' (Cantera 2.6) + m_size = 0; + } + initialize({}); + + // restore data + std::set exclude = {"points", "X", "Y"}; + if (m_size == 0) { + // no data points + } else if (m_size == 1) { + // single data point + double T = sub["temperature"].asDouble(); + double P = sub.getDouble("pressure", OneAtm); // missing - Sim1D (Cantera 2.6) + std::set props = {"temperature", "pressure"}; + exclude.insert(props.begin(), props.end()); + if (sub.hasKey("mass-fractions")) { + auto Y = sub["mass-fractions"].asMap(); + m_sol->thermo()->setState_TPY(T, P, Y); + exclude.insert("mass-fractions"); + } else if (sub.hasKey("coverages")) { + m_sol->thermo()->setState_TP(T, P); + auto cov = sub["coverages"].asMap(); + exclude.insert("coverages"); + auto surf = std::dynamic_pointer_cast(m_sol->thermo()); + if (!surf) { + throw CanteraError("SolutionArray::restore", + "Restoring of coverages requires surface phase"); + } + surf->setCoveragesByName(cov); + } else { + throw NotImplementedError("SolutionArray::restore", + "Unknown YAML serialization format."); + } + for (const auto& prop : m_sol->thermo()->nativeState()) { + if (prop.first == "T") { + m_data[prop.second] = m_sol->thermo()->temperature(); + } else if (prop.first == "D") { + m_data[prop.second] = m_sol->thermo()->density(); + } else if (prop.first == "P") { + m_data[prop.second] = m_sol->thermo()->pressure(); + } else if (prop.first == "Y") { + m_sol->thermo()->getMassFractions(&m_data[prop.second]); + } else if (prop.first == "X") { + m_sol->thermo()->getMoleFractions(&m_data[prop.second]); + } else { + throw NotImplementedError("SolutionArray::restore", + "Unable to restore property '{}'.", prop.first); + } + } + } else { + // multiple data points + const auto& nativeState = m_sol->thermo()->nativeState(); + for (const auto& item : sub) { + const std::string& name = item.first; + const AnyValue& value = item.second; + size_t offset = npos; + if (value.is>()) { + const vector_fp& data = value.as>(); + size_t species = m_sol->thermo()->speciesIndex(name); + if (data.size() != m_size) { + // meta data + continue; + } else if (species != npos) { + // species + if (nativeState.count("X")) { + offset = nativeState.find("X")->second + species; + } else if (nativeState.count("Y")) { + offset = nativeState.find("Y")->second + species; + } + } else if (nativeState.count(name)) { + // property + offset = nativeState.find(name)->second; + } else { + // extra + m_other.emplace(name, std::make_shared(m_size)); + auto& extra = m_other[name]; + std::copy(data.begin(), data.end(), extra->begin()); + } + + if (offset != npos) { + for (size_t i = 0; i < m_size; i++) { + m_data[offset + i * m_stride] = data[i]; + } + } + exclude.insert(item.first); + } + } + + // check that state data are complete + std::set props = {}; + std::set missingProps = {}; + for (const auto& item : nativeState) { + if (exclude.count(item.first)) { + props.insert(item.first); + } else { + missingProps.insert(item.first); + } + } + + std::set TY = {"T", "Y"}; + if (props == TY && missingProps.count("D") && sub.hasKey("pressure")) { + // missing "D" - Sim1D (Cantera 2.6) + double P = sub["pressure"].asDouble(); + const size_t offset_T = nativeState.find("T")->second; + const size_t offset_D = nativeState.find("D")->second; + const size_t offset_Y = nativeState.find("Y")->second; + for (size_t i = 0; i < m_size; i++) { + double T = m_data[offset_T + i * m_stride]; + m_sol->thermo()->setState_TPY(T, P, &m_data[offset_Y + i * m_stride]); + m_data[offset_D + i * m_stride] = m_sol->thermo()->density(); + } + } else if (missingProps.size()) { + throw CanteraError("SolutionArray::restore", + "Incomplete state information."); + } + } + + // add meta data + for (const auto& item : sub) { + if (!exclude.count(item.first)) { + m_meta[item.first] = item.second; + } + } } } From 6f9561f0d1131bfe26e6d98de75014a8b70bd792 Mon Sep 17 00:00:00 2001 From: Ingmar Schoegl Date: Mon, 5 Sep 2022 21:37:13 -0500 Subject: [PATCH 42/93] [base] Implement import of SolutionArray from HDF --- src/base/SolutionArray.cpp | 161 +++++++++++++++++++++++++++++++++++-- 1 file changed, 154 insertions(+), 7 deletions(-) diff --git a/src/base/SolutionArray.cpp b/src/base/SolutionArray.cpp index e8c57fc782d..640f021215f 100644 --- a/src/base/SolutionArray.cpp +++ b/src/base/SolutionArray.cpp @@ -14,10 +14,12 @@ #include #if CT_USE_HIGHFIVE_HDF -#include -#include +#include #include #include +#include +#include +#include namespace h5 = HighFive; #endif @@ -132,6 +134,48 @@ void SolutionArray::restore(const std::string& fname, const std::string& id) } #if CT_USE_HIGHFIVE_HDF +vector_fp readH5FloatVector(h5::DataSet data, std::string id, size_t size) +{ + if (data.getDataType().getClass() != h5::DataTypeClass::Float) { + throw CanteraError("readH5FloatVector", + "Type of DataSet '{}' is inconsistent; expected HDF float.", id); + } + if (data.getElementCount() != size) { + throw CanteraError("readH5FloatVector", + "Size of DataSet '{}' is inconsistent; expected {} elements but " + "received {} elements.", id, size, data.getElementCount()); + } + vector_fp out; + data.read(out); + return out; +} + +std::vector readH5FloatMatrix(h5::DataSet data, std::string id, + size_t rows, size_t cols) +{ + if (data.getDataType().getClass() != h5::DataTypeClass::Float) { + throw CanteraError("readH5FloatMatrix", + "Type of DataSet '{}' is inconsistent; expected HDF float.", id); + } + h5::DataSpace space = data.getSpace(); + if (space.getNumberDimensions() != 2) { + throw CanteraError("readH5FloatMatrix", + "Shape of DataSet '{}' is inconsistent; expected two dimensions.", id); + } + const auto& shape = space.getDimensions(); + if (shape[0] != rows) { + throw CanteraError("readH5FloatMatrix", + "Shape of DataSet '{}' is inconsistent; expected {} rows.", id, rows); + } + if (shape[1] != cols) { + throw CanteraError("readH5FloatMatrix", + "Shape of DataSet '{}' is inconsistent; expected {} columns.", id, cols); + } + std::vector out; + data.read(out); + return out; +} + void SolutionArray::restore(const h5::File& file, const std::string& id) { std::vector tokens; @@ -139,7 +183,7 @@ void SolutionArray::restore(const h5::File& file, const std::string& id) std::string grp = tokens[0]; if (!file.exist(grp) || file.getObjectType(grp) != h5::ObjectType::Group) { throw CanteraError("SolutionArray::restore", - "No group or solution with id '{}'", grp); + "No group or solution with id '{}'", grp); } std::string path = grp; @@ -149,25 +193,128 @@ void SolutionArray::restore(const h5::File& file, const std::string& id) path += "/" + grp; if (!sub.exist(grp) || sub.getObjectType(grp) != h5::ObjectType::Group) { throw CanteraError("SolutionArray::restore", - "No group or solution with id '{}'", path); + "No group or solution with id '{}'", path); } sub = sub.getGroup(grp); } - std::vector names; + std::set names; size_t nDims = npos; for (auto& name : sub.listObjectNames()) { if (sub.getObjectType(name) == h5::ObjectType::Dataset) { h5::DataSpace space = sub.getDataSet(name).getSpace(); - names.push_back(name); + names.insert(name); if (space.getNumberDimensions() < nDims) { nDims = space.getNumberDimensions(); m_size = space.getElementCount(); } } } + if (nDims != 1) { + throw NotImplementedError("SolutionArray::restore", + "Unable to restore SolutionArray with {} dimensions.", nDims); + } - // @todo: restore data + initialize({}); + + // restore meta data from attributes + for (auto& name : sub.listAttributeNames()) { + h5::Attribute attr = sub.getAttribute(name); + h5::DataType dtype = attr.getDataType(); + h5::DataTypeClass dclass = dtype.getClass(); + if (dclass == h5::DataTypeClass::Float) { + double value; + attr.read(value); + m_meta[name] = value; + } else if (dclass == h5::DataTypeClass::Integer) { + int value; + attr.read(value); + m_meta[name] = value; + } else if (dclass == h5::DataTypeClass::String) { + std::string value; + attr.read(value); + m_meta[name] = value; + } else { + throw NotImplementedError("SolutionArray::restore", + "Unable to read attribute '{}' with type '{}'", name, dtype.string()); + } + } + + // identify storage mode of state data + std::string mode = ""; + const auto& nativeState = m_sol->thermo()->nativeState(); + bool usesNativeState; + std::set state; + for (const auto& item : m_sol->thermo()->fullStates()) { + bool found = true; + usesNativeState = true; + state.clear(); + for (size_t i = 0; i < item.size(); i++) { + std::string name(1, item[i]); + if (names.count(name)) { + state.insert(name); + usesNativeState &= nativeState.count(name); + } else { + found = false; + break; + } + } + if (found) { + mode = item; + break; + } + } + if (mode == "") { + throw CanteraError("SolutionArray::restore", + "Data are not consistent with full state modes."); + } + + // restore state data + size_t nSpecies = m_sol->thermo()->nSpecies(); + size_t nState = m_sol->thermo()->stateSize(); + if (usesNativeState) { + // native state can be written directly into data storage + for (const auto& name : state) { + h5::DataSet data = sub.getDataSet(name); + size_t offset = nativeState.find(name)->second; + if (name == "X" || name == "Y") { + std::vector prop; + prop = readH5FloatMatrix(data, name, m_size, nSpecies); + for (size_t i = 0; i < m_size; i++) { + std::copy(prop[i].begin(), prop[i].end(), + &m_data[offset + i * m_stride]); + } + } else { + vector_fp prop = readH5FloatVector(data, name, m_size); + for (size_t i = 0; i < m_size; i++) { + m_data[offset + i * m_stride] = prop[i]; + } + } + } + } else if (mode == "TPX") { + // data format used by Python h5py export (Cantera 2.5) + vector_fp T = readH5FloatVector(sub.getDataSet("T"), "T", m_size); + vector_fp P = readH5FloatVector(sub.getDataSet("P"), "P", m_size); + std::vector X; + X = readH5FloatMatrix(sub.getDataSet("X"), "X", m_size, nSpecies); + for (size_t i = 0; i < m_size; i++) { + m_sol->thermo()->setState_TPX(T[i], P[i], X[i].data()); + m_sol->thermo()->saveState(nState, &m_data[i * m_stride]); + } + } else { + throw NotImplementedError("SolutionArray::restore", + "Import of '{}' data is not supported.", mode); + } + + // restore other data + for (const auto& name : names) { + if (!state.count(name)) { + vector_fp data = readH5FloatVector(sub.getDataSet(name), name, m_size); + m_other.emplace(name, std::make_shared(m_size)); + auto& extra = m_other[name]; + std::copy(data.begin(), data.end(), extra->begin()); + } + } } #endif From 8a01efe3d767316515389e9ddf737b96e373cb7a Mon Sep 17 00:00:00 2001 From: Ingmar Schoegl Date: Tue, 6 Sep 2022 11:27:42 -0500 Subject: [PATCH 43/93] [base] Augment C++ SolutionArray API - Add setters/getters to SolutionArray - Also remove unnecessary code stubs --- include/cantera/base/SolutionArray.h | 127 ++++++++-------- src/base/SolutionArray.cpp | 210 ++++++++++++++++----------- 2 files changed, 195 insertions(+), 142 deletions(-) diff --git a/include/cantera/base/SolutionArray.h b/include/cantera/base/SolutionArray.h index c2a3aaa76c2..674fe7eedcd 100644 --- a/include/cantera/base/SolutionArray.h +++ b/include/cantera/base/SolutionArray.h @@ -20,12 +20,13 @@ namespace Cantera { class Solution; +class ThermoPhase; -/** - * A container class providing a convenient interface for representing many - * thermodynamic states using the same Solution object. C++ SolutionArray objects are - * one-dimensional by design; extensions to multi-dimensional arrays need to be - * implemented in high-level API's. +/*! + * A container class providing a convenient interface for representing many + * thermodynamic states using the same Solution object. C++ SolutionArray objects are + * one-dimensional by design; extensions to multi-dimensional arrays need to be + * implemented in high-level API's. */ class SolutionArray { @@ -45,46 +46,81 @@ class SolutionArray new SolutionArray(sol, size, meta)); } - /** - * Initialize SolutionArray with independent memory management + /*! + * Initialize SolutionArray with independent memory management * - * @param extra Names of auxiliary data + * @param extra Names of auxiliary data */ void initialize(const std::vector& extra={}); - /** - * Initialize SolutionArray object with mapped memory - * - * @param data Pointer to mapped memory address - * @param size Number of entries in SolutionArray - * @param stride An integer indicating the stride between entries - * @param offsets A vector of pairs containing offsets within the mapped memory - */ - void initialize(double* data, - size_t size, - size_t stride, - const std::vector>& offsets); - - /** - * Size of SolutionArray (number of entries) + /*! + * Size of SolutionArray (number of entries) */ int size() const { return m_size; } - /** - * Save the current SolutionArray to a container file. + /*! + * SolutionArray meta data. + */ + AnyMap& meta() { + return m_meta; + } + + /*! + * Retrieve associated ThermoPhase object + */ + std::shared_ptr thermo(); + + /*! + * Check whether SolutionArray contains a component. + */ + bool hasComponent(const std::string& name) const; + + /*! + * Retrieve a component of the SolutionArray by name. + */ + vector_fp getComponent(const std::string& name) const; + + /*! + * Set a component of the SolutionArray by name. * - * @param fname Name of output container file - * @param id Identifier of SolutionArray within the container file + * @param name Component name + * @param data Component data + * @param force If true, add new component to SolutionArray + */ + void setComponent(const std::string& name, const vector_fp& data, bool force=false); + + /*! + * Update the buffered index used to access entries. + */ + void setIndex(size_t index); + + /*! + * Retrieve the state vector for a single entry. If index is valid, it is updated; + * otherwise, the last previously used index is referenced. + */ + vector_fp getState(size_t index=npos); + + /*! + * Retrieve auxiliary data for a single entry. If index is valid, it is updated; + * otherwise, the last previously used index is referenced. + */ + std::map getAuxiliary(size_t index=npos); + + /*! + * Save the current SolutionArray to a container file. + * + * @param fname Name of output container file + * @param id Identifier of SolutionArray within the container file */ void save(const std::string& fname, const std::string& id); - /** - * Restore SolutionArray from a container file. + /*! + * Restore SolutionArray from a container file. * - * @param fname Name of container file - * @param id Identifier of SolutionArray within the container file + * @param fname Name of container file + * @param id Identifier of SolutionArray within the container file */ void restore(const std::string& fname, const std::string& id); @@ -99,36 +135,13 @@ class SolutionArray size_t m_size; //!< Number of entries in SolutionArray size_t m_stride; //!< Stride between SolutionArray entries AnyMap m_meta; //!< Metadata - bool m_managed = false; //!< Flag indicating whether memory is externally managed + size_t m_index = npos; //!< Buffered index - shared_ptr m_work; //!< Work vector holding states (if not managed) - double* m_data; //!< Memory location holding state information (may be augmented) + shared_ptr m_work; //!< Work vector holding states + double* m_data; //!< Memory location holding state information std::map> m_other; //!< Auxiliary data - std::map m_offsets; //!< Map of offsets in state vector - std::map m_extra; //!< Map of offsets in auxiliary data }; - -// /** -// * Create a SolutionArray object with independent memory management -// * -// * @param sol The Solution object associated with state information -// * @param max_size Expected maximum number of entries in SolutionArray -// * @param extra A vector of additional entries -// * @param meta Metadata -// * @return shared_ptr -// */ -// shared_ptr newSolutionArray( -// const shared_ptr& sol, -// size_t max_size, -// const std::vector& extra={}, -// const AnyMap& meta={}) -// { -// shared_ptr arr = SolutionArray::create(sol, max_size, meta); -// arr->initialize(extra); -// return arr; -// } - } #endif diff --git a/src/base/SolutionArray.cpp b/src/base/SolutionArray.cpp index 640f021215f..276a2a4a915 100644 --- a/src/base/SolutionArray.cpp +++ b/src/base/SolutionArray.cpp @@ -43,70 +43,135 @@ SolutionArray::SolutionArray( void SolutionArray::initialize(const std::vector& extra) { - size_t count = 0; - for (auto& key : extra) { - m_extra.emplace(key, count); - count++; - } - - m_offsets = m_sol->thermo()->nativeState(); m_stride = m_sol->thermo()->stateSize(); m_work.reset(new vector_fp(m_size * m_stride, 0.)); m_data = m_work->data(); - m_managed = false; for (auto& key : extra) { m_other.emplace(key, std::make_shared(m_size)); } } -void SolutionArray::initialize( - double* data, - size_t size, - size_t stride, - const std::vector>& offsets) +std::shared_ptr SolutionArray::thermo() { - // check that offsets match order of native thermodynamic state properties - std::map flipped; - for (const auto& item : m_sol->thermo()->nativeState()) { - // flipped map will be sorted by native property offset within state - flipped.emplace(item.second, item.first); - } - std::map mapped; // searchable offset map - for (const auto& item : offsets) { - mapped.emplace(item.first, (int)(item.second)); - } - std::string key0 = flipped.at(0); - for (auto& prop : flipped) { - if (!mapped.count(prop.second)) { - throw CanteraError("SolutionArray::initialize", - "Native property '{}' not found in offset mapping.", prop.second); - } - int diffOffset = mapped.at(prop.second) - mapped.at(key0); - if (diffOffset != (int)(prop.first)) { - throw CanteraError("SolutionArray::initialize", - "Offset for property '{}' is incompatible with order of native state " - "properties", prop.second); + return m_sol->thermo(); +} + +bool SolutionArray::hasComponent(const std::string& name) const +{ + if (m_other.count(name)) { + // auxiliary data + return true; + } + if (m_sol->thermo()->speciesIndex(name) != npos) { + // species + return true; + } + if (name == "X" || name == "Y") { + // reserved names + return false; + } + // native state + return (m_sol->thermo()->nativeState().count(name)); +} + +vector_fp SolutionArray::getComponent(const std::string& name) const +{ + if (!hasComponent(name)) { + throw CanteraError("SolutionArray::getComponent", "no component named " + name); + } + + vector_fp out(m_size); + if (m_other.count(name)) { + // auxiliary data + auto other = m_other.at(name); + std::copy(other->begin(), other->end(), out.begin()); + return out; + } + + size_t ix = m_sol->thermo()->speciesIndex(name); + if (ix == npos) { + ix = m_sol->thermo()->nativeState()[name]; + } else { + ix += m_stride - m_sol->thermo()->nSpecies(); + } + for (size_t k = 0; k < m_size; ++k) { + out[k] = m_data[k * m_stride + ix]; + } + return out; +} + +void SolutionArray::setComponent( + const std::string& name, const vector_fp& data, bool force) +{ + if (!hasComponent(name)) { + if (force) { + m_other.emplace(name, std::make_shared(m_size)); + auto& extra = m_other[name]; + std::copy(data.begin(), data.end(), extra->begin()); + return; } + throw CanteraError("SolutionArray::setComponent", "no component named " + name); + } + if (data.size() != m_size) { + throw CanteraError("SolutionArray::setComponent", "incompatible sizes"); } - // assign managed memory - m_work.reset(); - m_data = data; - m_managed = true; - m_size = size; - m_stride = stride; + if (m_other.count(name)) { + // auxiliary data + auto other = m_other[name]; + std::copy(data.begin(), data.end(), other->begin()); + } - size_t count = 0; - for (auto& item : offsets) { - auto& key = item.first; - if (item.second != npos) { - m_offsets[key] = item.second; - } else { - m_other.emplace(key, std::make_shared(m_size)); - m_extra.emplace(key, count); - count++; + size_t ix = m_sol->thermo()->speciesIndex(name); + if (ix == npos) { + ix = m_sol->thermo()->nativeState()[name]; + } else { + ix += m_stride - m_sol->thermo()->nSpecies(); + } + for (size_t k = 0; k < m_size; ++k) { + m_data[k * m_stride + ix] = data[k]; + } +} + +void SolutionArray::setIndex(size_t index) +{ + if (m_size == 0) { + throw CanteraError("SolutionArray::setIndex", + "Unable to set index in empty SolutionArray."); + } else if (index == npos) { + if (m_index == npos) { + throw CanteraError("SolutionArray::setIndex", + "Both current and buffered indices are invalid."); } + return; + } else if (index == m_index) { + return; + } else if (index >= m_size) { + throw IndexError("SolutionArray::setIndex", "entries", index, m_size - 1); + } + m_index = index; + size_t nState = m_sol->thermo()->stateSize(); + m_sol->thermo()->restoreState(nState, &m_data[m_index * m_stride]); +} + +vector_fp SolutionArray::getState(size_t index) +{ + setIndex(index); + size_t nState = m_sol->thermo()->stateSize(); + vector_fp out(nState); + m_sol->thermo()->saveState(out); // thermo contains current state + return out; +} + +std::map SolutionArray::getAuxiliary(size_t index) +{ + setIndex(index); + std::map out; + for (auto& item : m_other) { + auto& extra = *item.second; + out[item.first] = extra[m_index]; } + return out; } void SolutionArray::save(const std::string& fname, const std::string& id) @@ -240,6 +305,10 @@ void SolutionArray::restore(const h5::File& file, const std::string& id) } } + if (m_size == 0) { + return; + } + // identify storage mode of state data std::string mode = ""; const auto& nativeState = m_sol->thermo()->nativeState(); @@ -276,27 +345,22 @@ void SolutionArray::restore(const h5::File& file, const std::string& id) // native state can be written directly into data storage for (const auto& name : state) { h5::DataSet data = sub.getDataSet(name); - size_t offset = nativeState.find(name)->second; if (name == "X" || name == "Y") { - std::vector prop; - prop = readH5FloatMatrix(data, name, m_size, nSpecies); + size_t offset = nativeState.find(name)->second; + auto prop = readH5FloatMatrix(data, name, m_size, nSpecies); for (size_t i = 0; i < m_size; i++) { std::copy(prop[i].begin(), prop[i].end(), &m_data[offset + i * m_stride]); } } else { - vector_fp prop = readH5FloatVector(data, name, m_size); - for (size_t i = 0; i < m_size; i++) { - m_data[offset + i * m_stride] = prop[i]; - } + setComponent(name, readH5FloatVector(data, name, m_size)); } } } else if (mode == "TPX") { // data format used by Python h5py export (Cantera 2.5) vector_fp T = readH5FloatVector(sub.getDataSet("T"), "T", m_size); vector_fp P = readH5FloatVector(sub.getDataSet("P"), "P", m_size); - std::vector X; - X = readH5FloatMatrix(sub.getDataSet("X"), "X", m_size, nSpecies); + auto X = readH5FloatMatrix(sub.getDataSet("X"), "X", m_size, nSpecies); for (size_t i = 0; i < m_size; i++) { m_sol->thermo()->setState_TPX(T[i], P[i], X[i].data()); m_sol->thermo()->saveState(nState, &m_data[i * m_stride]); @@ -394,36 +458,12 @@ void SolutionArray::restore(const AnyMap& root, const std::string& id) for (const auto& item : sub) { const std::string& name = item.first; const AnyValue& value = item.second; - size_t offset = npos; if (value.is>()) { const vector_fp& data = value.as>(); - size_t species = m_sol->thermo()->speciesIndex(name); - if (data.size() != m_size) { - // meta data - continue; - } else if (species != npos) { - // species - if (nativeState.count("X")) { - offset = nativeState.find("X")->second + species; - } else if (nativeState.count("Y")) { - offset = nativeState.find("Y")->second + species; - } - } else if (nativeState.count(name)) { - // property - offset = nativeState.find(name)->second; - } else { - // extra - m_other.emplace(name, std::make_shared(m_size)); - auto& extra = m_other[name]; - std::copy(data.begin(), data.end(), extra->begin()); - } - - if (offset != npos) { - for (size_t i = 0; i < m_size; i++) { - m_data[offset + i * m_stride] = data[i]; - } + if (data.size() == m_size) { + setComponent(name, data, true); + exclude.insert(item.first); } - exclude.insert(item.first); } } From b6950992c59b9db9bbdfce998341c895047077fa Mon Sep 17 00:00:00 2001 From: Ingmar Schoegl Date: Sun, 4 Sep 2022 14:52:11 -0500 Subject: [PATCH 44/93] [OneD] integrate SolutionArray into Sim1D::restore --- include/cantera/oneD/Boundary1D.h | 7 +++ include/cantera/oneD/Domain1D.h | 10 ++++ include/cantera/oneD/StFlow.h | 3 ++ src/oneD/Boundary1D.cpp | 43 +++++++++++++++++ src/oneD/Domain1D.cpp | 5 ++ src/oneD/Sim1D.cpp | 77 +++++++++++++++++-------------- src/oneD/StFlow.cpp | 34 ++++++++++++++ 7 files changed, 145 insertions(+), 34 deletions(-) diff --git a/include/cantera/oneD/Boundary1D.h b/include/cantera/oneD/Boundary1D.h index 61cebf850a1..cba4b4705fe 100644 --- a/include/cantera/oneD/Boundary1D.h +++ b/include/cantera/oneD/Boundary1D.h @@ -137,6 +137,7 @@ class Inlet1D : public Boundary1D integer* diagg, double rdt); virtual AnyMap serialize(const double* soln) const; virtual void restore(const AnyMap& state, double* soln, int loglevel); + virtual void restore(SolutionArray& arr, double* soln, int loglevel); protected: int m_ilr; @@ -170,6 +171,7 @@ class Empty1D : public Boundary1D integer* diagg, double rdt); virtual AnyMap serialize(const double* soln) const; + virtual void restore(SolutionArray& arr, double* soln, int loglevel) {} }; /** @@ -194,6 +196,7 @@ class Symm1D : public Boundary1D integer* diagg, double rdt); virtual AnyMap serialize(const double* soln) const; + virtual void restore(SolutionArray& arr, double* soln, int loglevel) {} }; @@ -218,6 +221,7 @@ class Outlet1D : public Boundary1D integer* diagg, double rdt); virtual AnyMap serialize(const double* soln) const; + virtual void restore(SolutionArray& arr, double* soln, int loglevel) {} }; @@ -250,6 +254,7 @@ class OutletRes1D : public Boundary1D integer* diagg, double rdt); virtual AnyMap serialize(const double* soln) const; virtual void restore(const AnyMap& state, double* soln, int loglevel); + virtual void restore(SolutionArray& arr, double* soln, int loglevel); protected: size_t m_nsp; @@ -282,6 +287,7 @@ class Surf1D : public Boundary1D virtual AnyMap serialize(const double* soln) const; virtual void restore(const AnyMap& state, double* soln, int loglevel); + virtual void restore(SolutionArray& arr, double* soln, int loglevel); virtual void showSolution_s(std::ostream& s, const double* x); @@ -318,6 +324,7 @@ class ReactingSurf1D : public Boundary1D virtual AnyMap serialize(const double* soln) const; virtual void restore(const AnyMap& state, double* soln, int loglevel); + virtual void restore(SolutionArray& arr, double* soln, int loglevel); virtual void _getInitialSoln(double* x) { m_sphase->getCoverages(x); diff --git a/include/cantera/oneD/Domain1D.h b/include/cantera/oneD/Domain1D.h index 12a189b23cb..e5f737b042b 100644 --- a/include/cantera/oneD/Domain1D.h +++ b/include/cantera/oneD/Domain1D.h @@ -29,6 +29,7 @@ class OneDim; class Refiner; class AnyMap; class Solution; +class SolutionArray; /** * Base class for one-dimensional domains. @@ -324,6 +325,15 @@ class Domain1D */ virtual void restore(const AnyMap& state, double* soln, int loglevel); + //! Restore the solution for this domain from a SolutionArray + /*! + * @param[in] arr SolutionArray defining the state of this domain + * @param[out] soln Value of the solution vector, local to this domain + * @param[in] loglevel 0 to suppress all output; 1 to show warnings; 2 for + * verbose output + */ + virtual void restore(SolutionArray& arr, double* soln, int loglevel); + //! Return thermo/kinetics/transport manager used in the domain //! @since New in Cantera 3.0. shared_ptr solution() const { diff --git a/include/cantera/oneD/StFlow.h b/include/cantera/oneD/StFlow.h index 23826cb8df1..a2996ed6846 100644 --- a/include/cantera/oneD/StFlow.h +++ b/include/cantera/oneD/StFlow.h @@ -159,6 +159,7 @@ class StFlow : public Domain1D virtual AnyMap serialize(const double* soln) const; virtual void restore(const AnyMap& state, double* soln, int loglevel); + virtual void restore(SolutionArray& arr, double* soln, int loglevel); //! Set flow configuration for freely-propagating flames, using an internal //! point with a fixed temperature as the condition to determine the inlet @@ -282,6 +283,8 @@ class StFlow : public Domain1D } protected: + void setMeta(const AnyMap& state); + doublereal wdot(size_t k, size_t j) const { return m_wdot(k,j); } diff --git a/src/oneD/Boundary1D.cpp b/src/oneD/Boundary1D.cpp index 9d0bcfa1d8b..f6159ea8ef9 100644 --- a/src/oneD/Boundary1D.cpp +++ b/src/oneD/Boundary1D.cpp @@ -3,6 +3,7 @@ // This file is part of Cantera. See License.txt in the top-level directory or // at https://cantera.org/license.txt for license and copyright information. +#include "cantera/base/SolutionArray.h" #include "cantera/oneD/Boundary1D.h" #include "cantera/oneD/OneDim.h" #include "cantera/oneD/StFlow.h" @@ -257,6 +258,18 @@ void Inlet1D::restore(const AnyMap& state, double* soln, int loglevel) } } +void Inlet1D::restore(SolutionArray& arr, double* soln, int loglevel) +{ + Boundary1D::restore(arr.meta(), soln, loglevel); + arr.setIndex(0); + auto phase = arr.thermo(); + auto aux = arr.getAuxiliary(0); + m_temp = phase->temperature(); + m_mdot = phase->density() * aux["velocity"]; + auto Y = phase->massFractions(); + std::copy(Y, Y + m_nsp, &m_yin[0]); +} + // ------------- Empty1D ------------- void Empty1D::init() @@ -543,6 +556,16 @@ void OutletRes1D::restore(const AnyMap& state, double* soln, int loglevel) } } +void OutletRes1D::restore(SolutionArray& arr, double* soln, int loglevel) +{ + Boundary1D::restore(arr.meta(), soln, loglevel); + arr.setIndex(0); + auto phase = arr.thermo(); + m_temp = phase->temperature(); + auto Y = phase->massFractions(); + std::copy(Y, Y + m_nsp, &m_yres[0]); +} + // -------- Surf1D -------- void Surf1D::init() @@ -589,6 +612,13 @@ void Surf1D::restore(const AnyMap& state, double* soln, int loglevel) m_temp = state["temperature"].asDouble(); } +void Surf1D::restore(SolutionArray& arr, double* soln, int loglevel) +{ + Boundary1D::restore(arr.meta(), soln, loglevel); + arr.setIndex(0); + m_temp = arr.thermo()->temperature(); +} + void Surf1D::showSolution_s(std::ostream& s, const double* x) { s << "------------------- Surface " << domainIndex() << " ------------------- " << std::endl; @@ -792,6 +822,19 @@ void ReactingSurf1D::restore(const AnyMap& state, double* soln, int loglevel) } } +void ReactingSurf1D::restore(SolutionArray& arr, double* soln, int loglevel) +{ + Boundary1D::restore(arr.meta(), soln, loglevel); + arr.setIndex(0); + auto surf = std::dynamic_pointer_cast(arr.thermo()); + if (!surf) { + throw CanteraError("ReactingSurf1D::restore", + "Restoring of coverages requires surface phase"); + } + m_temp = surf->temperature(); + surf->getCoverages(soln); +} + void ReactingSurf1D::showSolution(const double* x) { writelog(" Temperature: {:10.4g} K \n", m_temp); diff --git a/src/oneD/Domain1D.cpp b/src/oneD/Domain1D.cpp index e1ddcc672ee..d4bfae62eab 100644 --- a/src/oneD/Domain1D.cpp +++ b/src/oneD/Domain1D.cpp @@ -173,6 +173,11 @@ void Domain1D::restore(const AnyMap& state, double* soln, int loglevel) } } +void Domain1D::restore(SolutionArray& arr, double* soln, int loglevel) +{ + throw CanteraError("Domain1D::restore", "Needs to be overloaded."); +} + void Domain1D::locate() { if (m_left) { diff --git a/src/oneD/Sim1D.cpp b/src/oneD/Sim1D.cpp index 756b2a3591f..fdfa90bb906 100644 --- a/src/oneD/Sim1D.cpp +++ b/src/oneD/Sim1D.cpp @@ -12,6 +12,7 @@ #include "cantera/oneD/refine.h" #include "cantera/numerics/funcs.h" #include "cantera/base/stringUtils.h" +#include "cantera/base/SolutionArray.h" #include "cantera/numerics/Func1.h" #include #include @@ -170,55 +171,63 @@ void Sim1D::restore(const std::string& fname, const std::string& id, } else if (extension == "h5" || extension == "hdf") { #if CT_USE_HIGHFIVE_HDF h5::File file(fname, h5::File::ReadOnly); - if (!file.exist(id) || file.getObjectType(id) != h5::ObjectType::Group) { - throw CanteraError("Sim1D::restore", - "No solution with id '{}'", id); - } - h5::Group grp = file.getGroup(id); + std::map> arrs; for (auto dom : m_dom) { - std::string dn = dom->id(); - if (!grp.exist(dn)|| grp.getObjectType(dn) != h5::ObjectType::Group) { - throw CanteraError("Sim1D::restore", - "Saved state '{}' does not contain a domain named '{}'.", id, dn); - } - size_t points; - try { - // determine size based on stored temperature - points = grp.getGroup(dn).getDataSet("T").getElementCount(); - } catch (exception& err) { - throw CanteraError("Sim1D::restore", - "Unable to determine domain size:\n{}", err.what()); - } - dom->resize(dom->nComponents(), points); + auto arr = SolutionArray::create(dom->solution()); + arr->restore(fname, id + "/" + dom->id()); + dom->resize(dom->nComponents(), arr->size()); + arrs[dom->id()] = arr; } resize(); m_xlast_ts.clear(); - - throw CanteraError("Sim1D::restore", "Work in progress."); + for (auto dom : m_dom) { + auto arr = arrs[dom->id()]; + std::cout << dom->id() << ": size=" << arr->size() << std::endl; + if (arr->size()) { + arr->setIndex(arr->size() - 1); + auto state = arr->getState(); + std::cout << state[0] << " / " << state[1] << std::endl; + for (const auto& item : arr->getAuxiliary()) { + std::cout << "- " << item.first << "=" << item.second << std::endl; + } + } + } + for (auto dom : m_dom) { + dom->restore(*arrs[dom->id()], m_x.data() + dom->loc(), loglevel); + } + finalize(); #else throw CanteraError("Sim1D::restore", "Restoring from HDF requires HighFive installation."); #endif } else if (extension == "yaml" || extension == "yml") { AnyMap root = AnyMap::fromYamlFile(fname); - if (!root.hasKey(id)) { - throw InputFileError("Sim1D::restore", root, - "No solution with id '{}'", id); - } - const auto& state = root[id]; + std::map> arrs; + // const auto& state = root[id]; for (auto dom : m_dom) { - if (!state.hasKey(dom->id())) { - throw InputFileError("Sim1D::restore", state, - "Saved state '{}' does not contain a domain named '{}'.", - id, dom->id()); - } - dom->resize(dom->nComponents(), state[dom->id()]["points"].asInt()); + auto arr = SolutionArray::create(dom->solution()); + arr->restore(fname, id + "/" + dom->id()); + dom->resize(dom->nComponents(), arr->size()); + arrs[dom->id()] = arr; } resize(); m_xlast_ts.clear(); + // for (auto dom : m_dom) { + // auto arr = arrs[dom->id()]; + // std::cout << dom->id() << ": size=" << arr->size() << std::endl; + // if (arr->size()) { + // arr->setIndex(arr->size() - 1); + // auto state = arr->getState(); + // std::cout << state[0] << " / " << state[1] << std::endl; + // for (const auto& item : arr->getAuxiliary()) { + // std::cout << "- " << item.first << "=" << item.second << std::endl; + // } + // } + // } for (auto dom : m_dom) { - dom->restore(state[dom->id()].as(), m_x.data() + dom->loc(), - loglevel); + dom->restore(*arrs[dom->id()], m_x.data() + dom->loc(), loglevel); + // dom->restore(state[dom->id()].as(), m_x.data() + dom->loc(), + // loglevel); } finalize(); } else { diff --git a/src/oneD/StFlow.cpp b/src/oneD/StFlow.cpp index 5dc144614e1..20184806357 100644 --- a/src/oneD/StFlow.cpp +++ b/src/oneD/StFlow.cpp @@ -3,6 +3,7 @@ // This file is part of Cantera. See License.txt in the top-level directory or // at https://cantera.org/license.txt for license and copyright information. +#include "cantera/base/SolutionArray.h" #include "cantera/oneD/StFlow.h" #include "cantera/oneD/refine.h" #include "cantera/transport/Transport.h" @@ -763,7 +764,40 @@ void StFlow::restore(const AnyMap& state, double* soln, int loglevel) "component '{}' in domain '{}'.", name, id()); } } + setMeta(state); +} + +void StFlow::restore(SolutionArray& arr, double* soln, int loglevel) +{ + Domain1D::restore(arr.meta(), soln, loglevel); + arr.setIndex(0); + auto phase = arr.thermo(); + m_press = phase->pressure(); + const auto grid = arr.getComponent("grid"); + setupGrid(nPoints(), &grid[0]); + + for (size_t i = 0; i < nComponents(); i++) { + if (!componentActive(i)) { + continue; + } + std::string name = componentName(i); + if (arr.hasComponent(name)) { + const vector_fp data = arr.getComponent(name); + for (size_t j = 0; j < nPoints(); j++) { + soln[index(i,j)] = data[j]; + } + } else if (loglevel) { + warn_user("StFlow::restore", "Saved state does not contain values for " + "component '{}' in domain '{}'.", name, id()); + } + } + + setMeta(arr.meta()); +} + +void StFlow::setMeta(const AnyMap& state) +{ if (state.hasKey("energy-enabled")) { const AnyValue& ee = state["energy-enabled"]; if (ee.isScalar()) { From 30d396649a09dfafe370932bc6ec63abf67be761 Mon Sep 17 00:00:00 2001 From: Ingmar Schoegl Date: Tue, 29 Nov 2022 10:00:16 -0600 Subject: [PATCH 45/93] [base] Add SolutionArray::readHeader --- include/cantera/base/SolutionArray.h | 22 ++-- src/base/SolutionArray.cpp | 152 +++++++++++++++++++-------- 2 files changed, 127 insertions(+), 47 deletions(-) diff --git a/include/cantera/base/SolutionArray.h b/include/cantera/base/SolutionArray.h index 674fe7eedcd..478d243fb49 100644 --- a/include/cantera/base/SolutionArray.h +++ b/include/cantera/base/SolutionArray.h @@ -111,21 +111,31 @@ class SolutionArray /*! * Save the current SolutionArray to a container file. * - * @param fname Name of output container file - * @param id Identifier of SolutionArray within the container file + * @param fname Name of output container file + * @param id Identifier of SolutionArray within the container file */ void save(const std::string& fname, const std::string& id); + /*! + * Read header data from container file. + * + * @param fname Name of container file + * @param id Identifier of SolutionArray root within the container file + */ + static AnyMap readHeader(const std::string& fname, const std::string& id); + static AnyMap readHeader(const AnyMap& root, const std::string& id); +#if CT_USE_HIGHFIVE_HDF + static AnyMap readHeader(const HighFive::File& file, const std::string& id); +#endif + /*! * Restore SolutionArray from a container file. * - * @param fname Name of container file - * @param id Identifier of SolutionArray within the container file + * @param fname Name of container file + * @param id Identifier of SolutionArray within the container file */ void restore(const std::string& fname, const std::string& id); - void restore(const AnyMap& root, const std::string& id); - #if CT_USE_HIGHFIVE_HDF void restore(const HighFive::File& file, const std::string& id); #endif diff --git a/src/base/SolutionArray.cpp b/src/base/SolutionArray.cpp index 276a2a4a915..a3cfff13a9a 100644 --- a/src/base/SolutionArray.cpp +++ b/src/base/SolutionArray.cpp @@ -22,6 +22,18 @@ #include namespace h5 = HighFive; + +enum class H5Boolean { + FALSE = 0, + TRUE = 1, +}; + +h5::EnumType create_enum_boolean() { + return {{"FALSE", H5Boolean::FALSE}, + {"TRUE", H5Boolean::TRUE}}; +} + +HIGHFIVE_REGISTER_TYPE(H5Boolean, create_enum_boolean) #endif namespace Cantera @@ -179,6 +191,103 @@ void SolutionArray::save(const std::string& fname, const std::string& id) throw CanteraError("SolutionArray::save", "Not implemented."); } +AnyMap SolutionArray::readHeader(const std::string& fname, const std::string& id) +{ + size_t dot = fname.find_last_of("."); + std::string extension = (dot != npos) ? toLowerCopy(fname.substr(dot + 1)) : ""; + if (extension == "h5" || extension == "hdf") { +#if CT_USE_HIGHFIVE_HDF + return readHeader(h5::File(fname, h5::File::ReadOnly), id); +#else + throw CanteraError("SolutionArray::readHeader", + "Restoring from HDF requires HighFive installation."); +#endif + } + if (extension == "yaml" || extension == "yml") { + return readHeader(AnyMap::fromYamlFile(fname), id); + } + throw CanteraError("SolutionArray::readHeader", + "Unknown file extension '{}'", extension); +} + +#if CT_USE_HIGHFIVE_HDF +h5::Group locateH5Group(const h5::File& file, const std::string& id) +{ + std::vector tokens; + tokenizePath(id, tokens); + std::string grp = tokens[0]; + if (!file.exist(grp) || file.getObjectType(grp) != h5::ObjectType::Group) { + throw CanteraError("locateH5Group", + "No group or solution with id '{}'", grp); + } + + std::string path = grp; + h5::Group sub = file.getGroup(grp); + tokens.erase(tokens.begin()); + for (auto& grp : tokens) { + path += "/" + grp; + if (!sub.exist(grp) || sub.getObjectType(grp) != h5::ObjectType::Group) { + throw CanteraError("locateH5Group", + "No group or solution with id '{}'", path); + } + sub = sub.getGroup(grp); + } + return sub; +} + +AnyMap readH5Attributes(const h5::Group& sub, bool recursive) +{ + // restore meta data from attributes + AnyMap out; + for (auto& name : sub.listAttributeNames()) { + h5::Attribute attr = sub.getAttribute(name); + h5::DataType dtype = attr.getDataType(); + h5::DataTypeClass dclass = dtype.getClass(); + if (dclass == h5::DataTypeClass::Float) { + double value; + attr.read(value); + out[name] = value; + } else if (dclass == h5::DataTypeClass::Integer) { + int value; + attr.read(value); + out[name] = value; + } else if (dclass == h5::DataTypeClass::String) { + std::string value; + attr.read(value); + out[name] = value; + } else if (dclass == h5::DataTypeClass::Enum) { + // only booleans are supported + H5Boolean value; + attr.read(value); + out[name] = bool(value); + } else { + throw NotImplementedError("readH5Attributes", + "Unable to read attribute '{}' with type '{}'", name, dtype.string()); + } + } + + if (recursive) { + for (auto& name : sub.listObjectNames()) { + if (sub.getObjectType(name) == h5::ObjectType::Group) { + out[name] = readH5Attributes(sub.getGroup(name), recursive); + } + } + } + + return out; +} + +AnyMap SolutionArray::readHeader(const h5::File& file, const std::string& id) +{ + return readH5Attributes(locateH5Group(file, id), false); +} +#endif + +AnyMap SolutionArray::readHeader(const AnyMap& root, const std::string& id) +{ + throw CanteraError("SolutionArray::readHeader", "Not implemented."); +} + void SolutionArray::restore(const std::string& fname, const std::string& id) { size_t dot = fname.find_last_of("."); @@ -243,25 +352,7 @@ std::vector readH5FloatMatrix(h5::DataSet data, std::string id, void SolutionArray::restore(const h5::File& file, const std::string& id) { - std::vector tokens; - tokenizePath(id, tokens); - std::string grp = tokens[0]; - if (!file.exist(grp) || file.getObjectType(grp) != h5::ObjectType::Group) { - throw CanteraError("SolutionArray::restore", - "No group or solution with id '{}'", grp); - } - - std::string path = grp; - h5::Group sub = file.getGroup(grp); - tokens.erase(tokens.begin()); - for (auto& grp : tokens) { - path += "/" + grp; - if (!sub.exist(grp) || sub.getObjectType(grp) != h5::ObjectType::Group) { - throw CanteraError("SolutionArray::restore", - "No group or solution with id '{}'", path); - } - sub = sub.getGroup(grp); - } + auto sub = locateH5Group(file, id); std::set names; size_t nDims = npos; @@ -282,28 +373,7 @@ void SolutionArray::restore(const h5::File& file, const std::string& id) initialize({}); - // restore meta data from attributes - for (auto& name : sub.listAttributeNames()) { - h5::Attribute attr = sub.getAttribute(name); - h5::DataType dtype = attr.getDataType(); - h5::DataTypeClass dclass = dtype.getClass(); - if (dclass == h5::DataTypeClass::Float) { - double value; - attr.read(value); - m_meta[name] = value; - } else if (dclass == h5::DataTypeClass::Integer) { - int value; - attr.read(value); - m_meta[name] = value; - } else if (dclass == h5::DataTypeClass::String) { - std::string value; - attr.read(value); - m_meta[name] = value; - } else { - throw NotImplementedError("SolutionArray::restore", - "Unable to read attribute '{}' with type '{}'", name, dtype.string()); - } - } + m_meta = readH5Attributes(sub, true); if (m_size == 0) { return; From 29fc674b6c8afaf1b4d9f310d7282a8e08072311 Mon Sep 17 00:00:00 2001 From: Ingmar Schoegl Date: Tue, 29 Nov 2022 11:40:31 -0600 Subject: [PATCH 46/93] [OneD] Convert legacy HDF header to meta data --- src/oneD/Sim1D.cpp | 108 +++++++++++++++++++++++++++++++++++---------- 1 file changed, 84 insertions(+), 24 deletions(-) diff --git a/src/oneD/Sim1D.cpp b/src/oneD/Sim1D.cpp index fdfa90bb906..ce89a757ecb 100644 --- a/src/oneD/Sim1D.cpp +++ b/src/oneD/Sim1D.cpp @@ -160,6 +160,86 @@ void Sim1D::saveResidual(const std::string& fname, const std::string& id, std::swap(res, m_x); } +AnyMap legacyH5(shared_ptr arr, const AnyMap& header={}) +{ + // convert data format used by Python h5py export (Cantera < 3.0) + auto meta = arr->meta(); + AnyMap out; + + std::map meta_pairs = { + {"type", "Domain1D_type"}, + {"name", "name"}, + {"emissivity-left", "emissivity_left"}, + {"emissivity-right", "emissivity_right"}, + }; + for (const auto& item : meta_pairs) { + if (meta.hasKey(item.second)) { + out[item.first] = meta[item.second]; + } + } + + std::map tol_pairs = { + {"transient-abstol", "transient_abstol"}, + {"steady-abstol", "steady_abstol"}, + {"transient-reltol", "transient_reltol"}, + {"steady-reltol", "steady_reltol"}, + }; + for (const auto& item : tol_pairs) { + if (meta.hasKey(item.second)) { + out["tolerances"][item.first] = meta[item.second]; + } + } + + if (meta.hasKey("phase")) { + out["phase"]["name"] = meta["phase"]["name"]; + out["phase"]["source"] = meta["phase"]["source"]; + } + + if (arr->size() <= 1) { + return out; + } + + std::map header_pairs = { + {"transport-model", "transport_model"}, + {"radiation-enabled", "radiation_enabled"}, + {"energy-enabled", "energy_enabled"}, + {"Soret-enabled", "soret_enabled"}, + {"species-enabled", "species_enabled"}, + }; + for (const auto& item : header_pairs) { + if (header.hasKey(item.second)) { + out[item.first] = header[item.second]; + } + } + + std::map refiner_pairs = { + {"ratio", "ratio"}, + {"slope", "slope"}, + {"curve", "curve"}, + {"prune", "prune"}, + // {"grid-min", "???"}, // missing + {"max-points", "max_grid_points"}, + }; + for (const auto& item : header_pairs) { + if (header.hasKey(item.second)) { + out["refine-criteria"][item.first] = header[item.second]; + } + } + + if (header.hasKey("fixed_temperature")) { + double temp = header.getDouble("fixed_temperature", -1.); + auto profile = arr->getComponent("T"); + size_t ix = 0; + while (profile[ix] <= temp && ix < arr->size()) { + ix++; + } + out["fixed-point"]["location"] = arr->getComponent("grid")[ix - 1]; + out["fixed-point"]["temperature"] = temp; + } + + return out; +} + void Sim1D::restore(const std::string& fname, const std::string& id, int loglevel) { @@ -172,26 +252,18 @@ void Sim1D::restore(const std::string& fname, const std::string& id, #if CT_USE_HIGHFIVE_HDF h5::File file(fname, h5::File::ReadOnly); std::map> arrs; + auto header = SolutionArray::readHeader(fname, id); for (auto dom : m_dom) { auto arr = SolutionArray::create(dom->solution()); arr->restore(fname, id + "/" + dom->id()); dom->resize(dom->nComponents(), arr->size()); + if (!header.hasKey("generator")) { + arr->meta() = legacyH5(arr, header); + } arrs[dom->id()] = arr; } resize(); m_xlast_ts.clear(); - for (auto dom : m_dom) { - auto arr = arrs[dom->id()]; - std::cout << dom->id() << ": size=" << arr->size() << std::endl; - if (arr->size()) { - arr->setIndex(arr->size() - 1); - auto state = arr->getState(); - std::cout << state[0] << " / " << state[1] << std::endl; - for (const auto& item : arr->getAuxiliary()) { - std::cout << "- " << item.first << "=" << item.second << std::endl; - } - } - } for (auto dom : m_dom) { dom->restore(*arrs[dom->id()], m_x.data() + dom->loc(), loglevel); } @@ -212,18 +284,6 @@ void Sim1D::restore(const std::string& fname, const std::string& id, } resize(); m_xlast_ts.clear(); - // for (auto dom : m_dom) { - // auto arr = arrs[dom->id()]; - // std::cout << dom->id() << ": size=" << arr->size() << std::endl; - // if (arr->size()) { - // arr->setIndex(arr->size() - 1); - // auto state = arr->getState(); - // std::cout << state[0] << " / " << state[1] << std::endl; - // for (const auto& item : arr->getAuxiliary()) { - // std::cout << "- " << item.first << "=" << item.second << std::endl; - // } - // } - // } for (auto dom : m_dom) { dom->restore(*arrs[dom->id()], m_x.data() + dom->loc(), loglevel); // dom->restore(state[dom->id()].as(), m_x.data() + dom->loc(), From 0d79adbfa6a54098b6a993371f5bae0917748f28 Mon Sep 17 00:00:00 2001 From: Ingmar Schoegl Date: Tue, 29 Nov 2022 14:58:15 -0600 Subject: [PATCH 47/93] [SCons] Add CPPDEFINE for HighFive on Windows --- SConstruct | 3 +++ 1 file changed, 3 insertions(+) diff --git a/SConstruct b/SConstruct index f28dc703240..01226dc56f7 100644 --- a/SConstruct +++ b/SConstruct @@ -2117,6 +2117,9 @@ env["external_libs"] = [] env["external_libs"].extend(env["sundials_libs"]) if env["has_highfive"]: + if env["OS"] == "Windows": + # see https://github.com/microsoft/vcpkg/issues/24293 + env.Append(CPPDEFINES=["H5_BUILT_AS_DYNAMIC_LIB"]) env["external_libs"].append("hdf5") if env["system_fmt"]: From 240f9bba196544ae86454ec894bab2de39801037 Mon Sep 17 00:00:00 2001 From: Ingmar Schoegl Date: Tue, 29 Nov 2022 15:14:16 -0600 Subject: [PATCH 48/93] [Python] Add utility to query HDF support --- interfaces/cython/cantera/_utils.pxd | 1 + interfaces/cython/cantera/_utils.pyx | 17 +++++++++++++++++ 2 files changed, 18 insertions(+) diff --git a/interfaces/cython/cantera/_utils.pxd b/interfaces/cython/cantera/_utils.pxd index bb3abb8193a..c5f97894264 100644 --- a/interfaces/cython/cantera/_utils.pxd +++ b/interfaces/cython/cantera/_utils.pxd @@ -72,6 +72,7 @@ cdef extern from "cantera/base/global.h" namespace "Cantera": cdef void Cxx_suppress_thermo_warnings "Cantera::suppress_thermo_warnings" (cbool) cdef void Cxx_use_legacy_rate_constants "Cantera::use_legacy_rate_constants" (cbool) cdef string CxxGitCommit "Cantera::gitCommit" () + cdef cbool CxxUsesHighFive "Cantera::usesHighFive" () cdef cbool CxxDebugModeEnabled "Cantera::debugModeEnabled" () diff --git a/interfaces/cython/cantera/_utils.pyx b/interfaces/cython/cantera/_utils.pyx index 4a402d0159a..674a81271a7 100644 --- a/interfaces/cython/cantera/_utils.pyx +++ b/interfaces/cython/cantera/_utils.pyx @@ -93,6 +93,23 @@ def use_legacy_rate_constants(pybool legacy): """ Cxx_use_legacy_rate_constants(legacy) +def hdf_support(): + """ + Returns list of libraries that include HDF support: + - 'h5py': HDF support by Python package 'h5py'. + - 'HighFive': if Cantera was compiled with C++ HighFive HDF support. + """ + out = [] + try: + pkg_resources.get_distribution("h5py") + except pkg_resources.DistributionNotFound: + pass + else: + out.append("h5py") + if CxxUsesHighFive(): + out.append("HighFive") + return set(out) + cdef Composition comp_map(X) except *: if isinstance(X, (str, bytes)): return parseCompString(stringify(X)) From d6ad4ec54fb66051f51da6de967bc41e8d6e554b Mon Sep 17 00:00:00 2001 From: Ingmar Schoegl Date: Tue, 29 Nov 2022 20:26:37 -0600 Subject: [PATCH 49/93] [base] Implement SolutionArray::writeHeader --- include/cantera/base/SolutionArray.h | 16 ++++ src/base/SolutionArray.cpp | 107 ++++++++++++++++++++++++++- 2 files changed, 119 insertions(+), 4 deletions(-) diff --git a/include/cantera/base/SolutionArray.h b/include/cantera/base/SolutionArray.h index 478d243fb49..9df836ef751 100644 --- a/include/cantera/base/SolutionArray.h +++ b/include/cantera/base/SolutionArray.h @@ -108,6 +108,22 @@ class SolutionArray */ std::map getAuxiliary(size_t index=npos); + /*! + * Read header data from container file. + * + * @param fname Name of container file + * @param id Identifier of SolutionArray root within the container file + * @param desc Description + */ + static void writeHeader(const std::string& fname, const std::string& id, + const std::string& desc); + static void writeHeader(AnyMap& root, const std::string& id, + const std::string& desc); +#if CT_USE_HIGHFIVE_HDF + static void writeHeader(HighFive::File& file, const std::string& id, + const std::string& desc); +#endif + /*! * Save the current SolutionArray to a container file. * diff --git a/src/base/SolutionArray.cpp b/src/base/SolutionArray.cpp index a3cfff13a9a..c46d3de671d 100644 --- a/src/base/SolutionArray.cpp +++ b/src/base/SolutionArray.cpp @@ -12,6 +12,7 @@ #include "cantera/thermo/ThermoPhase.h" #include "cantera/thermo/SurfPhase.h" #include +#include #if CT_USE_HIGHFIVE_HDF #include @@ -186,6 +187,106 @@ std::map SolutionArray::getAuxiliary(size_t index) return out; } +AnyMap preamble(const std::string& desc) +{ + AnyMap data; + data["description"] = desc; + data["generator"] = "Cantera SolutionArray"; + data["cantera-version"] = CANTERA_VERSION; + data["git-commit"] = gitCommit(); + + // Add a timestamp indicating the current time + time_t aclock; + ::time(&aclock); // Get time in seconds + struct tm* newtime = localtime(&aclock); // Convert time to struct tm form + data["date"] = stripnonprint(asctime(newtime)); + + // Force metadata fields to the top of the file + data["description"].setLoc(-6, 0); + data["generator"].setLoc(-5, 0); + data["cantera-version"].setLoc(-4, 0); + data["git-commit"].setLoc(-3, 0); + data["date"].setLoc(-2, 0); + + + return data; +} + +void SolutionArray::writeHeader( + const std::string& fname, const std::string& id, const std::string& desc) +{ + size_t dot = fname.find_last_of("."); + std::string extension = (dot != npos) ? toLowerCopy(fname.substr(dot + 1)) : ""; + if (extension == "h5" || extension == "hdf") { +#if CT_USE_HIGHFIVE_HDF + h5::File out(fname, h5::File::OpenOrCreate); + writeHeader(out, id, desc); + return; +#else + throw CanteraError("SolutionArray::writeHeader", + "Saving to HDF requires HighFive installation."); +#endif + } + if (extension == "yaml" || extension == "yml") { + // Check for an existing file and load it if present + AnyMap data; + if (std::ifstream(fname).good()) { + data = AnyMap::fromYamlFile(fname); + } + writeHeader(data, id, desc); + + // Write the output file and remove the now-outdated cached file + std::ofstream out(fname); + out << data.toYamlString(); + AnyMap::clearCachedFile(fname); + return; + } + throw CanteraError("SolutionArray::writeHeader", + "Unknown file extension '{}'", extension); +} + +#if CT_USE_HIGHFIVE_HDF +h5::Group openH5Group(h5::File& file, const std::string& id) +{ + if (!file.exist(id)) { + return file.createGroup(id); + } + if (file.getObjectType(id) != h5::ObjectType::Group) { + throw CanteraError("openH5Group", "Invalid object with id '{}' exists", id); + } + return file.getGroup(id); +} + +void writeH5Attributes(h5::Group& sub, const AnyMap& meta) +{ + for (auto& item : meta) { + if (item.second.is()) { + std::string value = item.second.asString(); + h5::Attribute attr = sub.createAttribute( + item.first, h5::DataSpace::From(value)); + attr.write(value); + } else { + throw NotImplementedError("writeH5Attributes", + "Unable to write attribute '{}' with type '{}'", + item.first, item.second.type_str()); + } + } +} + +void SolutionArray::writeHeader(h5::File& file, const std::string& id, + const std::string& desc) +{ + auto sub = openH5Group(file, id); + writeH5Attributes(sub, preamble(desc)); +} +#endif + +void SolutionArray::writeHeader(AnyMap& root, const std::string& id, + const std::string& desc) +{ + root[id] = preamble(desc); +} + void SolutionArray::save(const std::string& fname, const std::string& id) { throw CanteraError("SolutionArray::save", "Not implemented."); @@ -217,8 +318,7 @@ h5::Group locateH5Group(const h5::File& file, const std::string& id) tokenizePath(id, tokens); std::string grp = tokens[0]; if (!file.exist(grp) || file.getObjectType(grp) != h5::ObjectType::Group) { - throw CanteraError("locateH5Group", - "No group or solution with id '{}'", grp); + throw CanteraError("locateH5Group", "No group with id '{}' found", grp); } std::string path = grp; @@ -227,8 +327,7 @@ h5::Group locateH5Group(const h5::File& file, const std::string& id) for (auto& grp : tokens) { path += "/" + grp; if (!sub.exist(grp) || sub.getObjectType(grp) != h5::ObjectType::Group) { - throw CanteraError("locateH5Group", - "No group or solution with id '{}'", path); + throw CanteraError("locateH5Group", "No group with id '{}' found", path); } sub = sub.getGroup(grp); } From c64fd048e10b1f02684b5ad51f81372fd9b2d79b Mon Sep 17 00:00:00 2001 From: Ingmar Schoegl Date: Tue, 29 Nov 2022 20:32:14 -0600 Subject: [PATCH 50/93] [OneD] Add alternative formats to Sim1D::save --- src/oneD/Sim1D.cpp | 14 ++++++++++++++ 1 file changed, 14 insertions(+) diff --git a/src/oneD/Sim1D.cpp b/src/oneD/Sim1D.cpp index ce89a757ecb..bc06df1d695 100644 --- a/src/oneD/Sim1D.cpp +++ b/src/oneD/Sim1D.cpp @@ -105,6 +105,20 @@ void Sim1D::setProfile(size_t dom, size_t comp, void Sim1D::save(const std::string& fname, const std::string& id, const std::string& desc, int loglevel) { + size_t dot = fname.find_last_of("."); + string extension = (dot != npos) ? toLowerCopy(fname.substr(dot+1)) : ""; + if (extension == "h5" || extension == "hdf") { +#if CT_USE_HIGHFIVE_HDF + SolutionArray::writeHeader(fname, id, desc); + return; +#else + throw CanteraError("Sim1D::save", + "Saving to HDF requires HighFive installation."); +#endif + } else if (extension != "yaml" && extension != "yml") { + throw CanteraError("Sim1D::save", + "Unsupported file format '{}'", extension); + } // Check for an existing file and load it if present AnyMap data; if (ifstream(fname).good()) { From 7edf7c36f25aac2ce01896f13ea2a8e5f9a11fad Mon Sep 17 00:00:00 2001 From: Ingmar Schoegl Date: Tue, 29 Nov 2022 20:42:23 -0600 Subject: [PATCH 51/93] [base] Create hdfUtils.h --- include/cantera/base/SolutionArray.h | 19 ++- src/base/SolutionArray.cpp | 220 +++++---------------------- src/base/hdfUtils.h | 208 +++++++++++++++++++++++++ 3 files changed, 264 insertions(+), 183 deletions(-) create mode 100644 src/base/hdfUtils.h diff --git a/include/cantera/base/SolutionArray.h b/include/cantera/base/SolutionArray.h index 9df836ef751..581b1dd511c 100644 --- a/include/cantera/base/SolutionArray.h +++ b/include/cantera/base/SolutionArray.h @@ -109,14 +109,12 @@ class SolutionArray std::map getAuxiliary(size_t index=npos); /*! - * Read header data from container file. + * Write header data to container file. * - * @param fname Name of container file + * @param root Root location * @param id Identifier of SolutionArray root within the container file * @param desc Description */ - static void writeHeader(const std::string& fname, const std::string& id, - const std::string& desc); static void writeHeader(AnyMap& root, const std::string& id, const std::string& desc); #if CT_USE_HIGHFIVE_HDF @@ -124,13 +122,24 @@ class SolutionArray const std::string& desc); #endif + /*! + * Write SolutionArray data to container file. + * + * @param root Root location + * @param id Identifier of SolutionArray within the container file + */ + void writeEntry(AnyMap& root, const std::string& id); +#if CT_USE_HIGHFIVE_HDF + void writeEntry(HighFive::File& file, const std::string& id); +#endif + /*! * Save the current SolutionArray to a container file. * * @param fname Name of output container file * @param id Identifier of SolutionArray within the container file */ - void save(const std::string& fname, const std::string& id); + void save(const std::string& fname, const std::string& id, const std::string& desc); /*! * Read header data from container file. diff --git a/src/base/SolutionArray.cpp b/src/base/SolutionArray.cpp index c46d3de671d..92f91c5b399 100644 --- a/src/base/SolutionArray.cpp +++ b/src/base/SolutionArray.cpp @@ -15,26 +15,7 @@ #include #if CT_USE_HIGHFIVE_HDF -#include -#include -#include -#include -#include -#include - -namespace h5 = HighFive; - -enum class H5Boolean { - FALSE = 0, - TRUE = 1, -}; - -h5::EnumType create_enum_boolean() { - return {{"FALSE", H5Boolean::FALSE}, - {"TRUE", H5Boolean::TRUE}}; -} - -HIGHFIVE_REGISTER_TYPE(H5Boolean, create_enum_boolean) +#include "hdfUtils.h" #endif namespace Cantera @@ -208,19 +189,55 @@ AnyMap preamble(const std::string& desc) data["git-commit"].setLoc(-3, 0); data["date"].setLoc(-2, 0); - return data; } -void SolutionArray::writeHeader( +#if CT_USE_HIGHFIVE_HDF +void SolutionArray::writeHeader(h5::File& file, const std::string& id, + const std::string& desc) +{ + auto sub = openH5Group(file, id); + writeH5Attributes(sub, preamble(desc)); +} +#endif + +void SolutionArray::writeHeader(AnyMap& root, const std::string& id, + const std::string& desc) +{ + root[id] = preamble(desc); +} + +#if CT_USE_HIGHFIVE_HDF +void SolutionArray::writeEntry(h5::File& file, const std::string& id) +{ + auto sub = openH5Group(file, id); + writeH5Attributes(sub, m_meta); + // throw CanteraError("SolutionArray::save", "Not implemented."); +} +#endif + +void SolutionArray::writeEntry(AnyMap& root, const std::string& id) +{ + throw CanteraError("SolutionArray::save", "Not implemented."); + + // bool preexisting = data.hasKey(id); + + // // If this is not replacing an existing solution, put it at the end + // if (!preexisting) { + // data[id].setLoc(INT_MAX, 0); + // } +} + +void SolutionArray::save( const std::string& fname, const std::string& id, const std::string& desc) { size_t dot = fname.find_last_of("."); std::string extension = (dot != npos) ? toLowerCopy(fname.substr(dot + 1)) : ""; if (extension == "h5" || extension == "hdf") { #if CT_USE_HIGHFIVE_HDF - h5::File out(fname, h5::File::OpenOrCreate); - writeHeader(out, id, desc); + h5::File file(fname, h5::File::OpenOrCreate); + writeHeader(file, id, desc); + writeEntry(file, id); return; #else throw CanteraError("SolutionArray::writeHeader", @@ -233,7 +250,7 @@ void SolutionArray::writeHeader( if (std::ifstream(fname).good()) { data = AnyMap::fromYamlFile(fname); } - writeHeader(data, id, desc); + writeEntry(data, id); // Write the output file and remove the now-outdated cached file std::ofstream out(fname); @@ -245,53 +262,6 @@ void SolutionArray::writeHeader( "Unknown file extension '{}'", extension); } -#if CT_USE_HIGHFIVE_HDF -h5::Group openH5Group(h5::File& file, const std::string& id) -{ - if (!file.exist(id)) { - return file.createGroup(id); - } - if (file.getObjectType(id) != h5::ObjectType::Group) { - throw CanteraError("openH5Group", "Invalid object with id '{}' exists", id); - } - return file.getGroup(id); -} - -void writeH5Attributes(h5::Group& sub, const AnyMap& meta) -{ - for (auto& item : meta) { - if (item.second.is()) { - std::string value = item.second.asString(); - h5::Attribute attr = sub.createAttribute( - item.first, h5::DataSpace::From(value)); - attr.write(value); - } else { - throw NotImplementedError("writeH5Attributes", - "Unable to write attribute '{}' with type '{}'", - item.first, item.second.type_str()); - } - } -} - -void SolutionArray::writeHeader(h5::File& file, const std::string& id, - const std::string& desc) -{ - auto sub = openH5Group(file, id); - writeH5Attributes(sub, preamble(desc)); -} -#endif - -void SolutionArray::writeHeader(AnyMap& root, const std::string& id, - const std::string& desc) -{ - root[id] = preamble(desc); -} - -void SolutionArray::save(const std::string& fname, const std::string& id) -{ - throw CanteraError("SolutionArray::save", "Not implemented."); -} - AnyMap SolutionArray::readHeader(const std::string& fname, const std::string& id) { size_t dot = fname.find_last_of("."); @@ -312,70 +282,6 @@ AnyMap SolutionArray::readHeader(const std::string& fname, const std::string& id } #if CT_USE_HIGHFIVE_HDF -h5::Group locateH5Group(const h5::File& file, const std::string& id) -{ - std::vector tokens; - tokenizePath(id, tokens); - std::string grp = tokens[0]; - if (!file.exist(grp) || file.getObjectType(grp) != h5::ObjectType::Group) { - throw CanteraError("locateH5Group", "No group with id '{}' found", grp); - } - - std::string path = grp; - h5::Group sub = file.getGroup(grp); - tokens.erase(tokens.begin()); - for (auto& grp : tokens) { - path += "/" + grp; - if (!sub.exist(grp) || sub.getObjectType(grp) != h5::ObjectType::Group) { - throw CanteraError("locateH5Group", "No group with id '{}' found", path); - } - sub = sub.getGroup(grp); - } - return sub; -} - -AnyMap readH5Attributes(const h5::Group& sub, bool recursive) -{ - // restore meta data from attributes - AnyMap out; - for (auto& name : sub.listAttributeNames()) { - h5::Attribute attr = sub.getAttribute(name); - h5::DataType dtype = attr.getDataType(); - h5::DataTypeClass dclass = dtype.getClass(); - if (dclass == h5::DataTypeClass::Float) { - double value; - attr.read(value); - out[name] = value; - } else if (dclass == h5::DataTypeClass::Integer) { - int value; - attr.read(value); - out[name] = value; - } else if (dclass == h5::DataTypeClass::String) { - std::string value; - attr.read(value); - out[name] = value; - } else if (dclass == h5::DataTypeClass::Enum) { - // only booleans are supported - H5Boolean value; - attr.read(value); - out[name] = bool(value); - } else { - throw NotImplementedError("readH5Attributes", - "Unable to read attribute '{}' with type '{}'", name, dtype.string()); - } - } - - if (recursive) { - for (auto& name : sub.listObjectNames()) { - if (sub.getObjectType(name) == h5::ObjectType::Group) { - out[name] = readH5Attributes(sub.getGroup(name), recursive); - } - } - } - - return out; -} - AnyMap SolutionArray::readHeader(const h5::File& file, const std::string& id) { return readH5Attributes(locateH5Group(file, id), false); @@ -407,48 +313,6 @@ void SolutionArray::restore(const std::string& fname, const std::string& id) } #if CT_USE_HIGHFIVE_HDF -vector_fp readH5FloatVector(h5::DataSet data, std::string id, size_t size) -{ - if (data.getDataType().getClass() != h5::DataTypeClass::Float) { - throw CanteraError("readH5FloatVector", - "Type of DataSet '{}' is inconsistent; expected HDF float.", id); - } - if (data.getElementCount() != size) { - throw CanteraError("readH5FloatVector", - "Size of DataSet '{}' is inconsistent; expected {} elements but " - "received {} elements.", id, size, data.getElementCount()); - } - vector_fp out; - data.read(out); - return out; -} - -std::vector readH5FloatMatrix(h5::DataSet data, std::string id, - size_t rows, size_t cols) -{ - if (data.getDataType().getClass() != h5::DataTypeClass::Float) { - throw CanteraError("readH5FloatMatrix", - "Type of DataSet '{}' is inconsistent; expected HDF float.", id); - } - h5::DataSpace space = data.getSpace(); - if (space.getNumberDimensions() != 2) { - throw CanteraError("readH5FloatMatrix", - "Shape of DataSet '{}' is inconsistent; expected two dimensions.", id); - } - const auto& shape = space.getDimensions(); - if (shape[0] != rows) { - throw CanteraError("readH5FloatMatrix", - "Shape of DataSet '{}' is inconsistent; expected {} rows.", id, rows); - } - if (shape[1] != cols) { - throw CanteraError("readH5FloatMatrix", - "Shape of DataSet '{}' is inconsistent; expected {} columns.", id, cols); - } - std::vector out; - data.read(out); - return out; -} - void SolutionArray::restore(const h5::File& file, const std::string& id) { auto sub = locateH5Group(file, id); diff --git a/src/base/hdfUtils.h b/src/base/hdfUtils.h new file mode 100644 index 00000000000..ef9b0ec450e --- /dev/null +++ b/src/base/hdfUtils.h @@ -0,0 +1,208 @@ +//! @file hdfUtils.h + +// This file is part of Cantera. See License.txt in the top-level directory or +// at https://cantera.org/license.txt for license and copyright information. + +#ifndef CT_HDF_UTILS_H +#define CT_HDF_UTILS_H + +#include +#include +#include +#include +#include +#include + +namespace h5 = HighFive; + +enum class H5Boolean { + FALSE = 0, + TRUE = 1, +}; + +h5::EnumType create_enum_boolean() { + return {{"FALSE", H5Boolean::FALSE}, + {"TRUE", H5Boolean::TRUE}}; +} + +HIGHFIVE_REGISTER_TYPE(H5Boolean, create_enum_boolean) + +namespace Cantera +{ + +h5::Group locateH5Group(const h5::File& file, const std::string& id) +{ + std::vector tokens; + tokenizePath(id, tokens); + std::string grp = tokens[0]; + if (!file.exist(grp) || file.getObjectType(grp) != h5::ObjectType::Group) { + throw CanteraError("locateH5Group", "No group with id '{}' found", grp); + } + + std::string path = grp; + h5::Group sub = file.getGroup(grp); + tokens.erase(tokens.begin()); + for (auto& grp : tokens) { + path += "/" + grp; + if (!sub.exist(grp) || sub.getObjectType(grp) != h5::ObjectType::Group) { + throw CanteraError("locateH5Group", "No group with id '{}' found", path); + } + sub = sub.getGroup(grp); + } + return sub; +} + +h5::Group openH5Group(h5::File& file, const std::string& id) +{ + if (!file.exist(id)) { + return file.createGroup(id); + } + if (file.getObjectType(id) != h5::ObjectType::Group) { + throw CanteraError("openH5Group", "Invalid object with id '{}' exists", id); + } + return file.getGroup(id); +} + +AnyMap readH5Attributes(const h5::Group& sub, bool recursive) +{ + // restore meta data from attributes + AnyMap out; + for (auto& name : sub.listAttributeNames()) { + h5::Attribute attr = sub.getAttribute(name); + h5::DataType dtype = attr.getDataType(); + h5::DataTypeClass dclass = dtype.getClass(); + if (dclass == h5::DataTypeClass::Float) { + double value; + attr.read(value); + out[name] = value; + } else if (dclass == h5::DataTypeClass::Integer) { + int value; + attr.read(value); + out[name] = value; + } else if (dclass == h5::DataTypeClass::String) { + std::string value; + attr.read(value); + out[name] = value; + } else if (dclass == h5::DataTypeClass::Enum) { + // only booleans are supported + if (attr.getStorageSize() > 1) { + std::vector values; + attr.read(values); + std::vector bValues; + for (auto v : values) { + bValues.push_back(bool(v)); + } + out[name] = bValues; + } else { + H5Boolean value; + attr.read(value); + out[name] = bool(value); + } + } else { + throw NotImplementedError("readH5Attributes", + "Unable to read attribute '{}' with type '{}'", name, dtype.string()); + } + } + + if (recursive) { + for (auto& name : sub.listObjectNames()) { + if (sub.getObjectType(name) == h5::ObjectType::Group) { + out[name] = readH5Attributes(sub.getGroup(name), recursive); + } + } + } + + return out; +} + +void writeH5Attributes(h5::Group& sub, const AnyMap& meta) +{ + for (auto& item : meta) { + if (item.second.is()) { + double value = item.second.asDouble(); + h5::Attribute attr = sub.createAttribute( + item.first, h5::DataSpace::From(value)); + attr.write(value); + } else if (item.second.is() || item.second.is()) { + int value = item.second.asInt(); + h5::Attribute attr = sub.createAttribute( + item.first, h5::DataSpace::From(value)); + attr.write(value); + } else if (item.second.is()) { + std::string value = item.second.asString(); + h5::Attribute attr = sub.createAttribute( + item.first, h5::DataSpace::From(value)); + attr.write(value); + } else if (item.second.is()) { + bool bValue = item.second.asBool(); + H5Boolean value = bValue ? H5Boolean::TRUE : H5Boolean::FALSE; + h5::Attribute attr = sub.createAttribute( + item.first, h5::DataSpace::From(value)); + attr.write(value); + } else if (item.second.is>()) { + auto bValue = item.second.as>(); + std::vector value; + for (auto b : bValue) { + value.push_back(b ? H5Boolean::TRUE : H5Boolean::FALSE); + } + h5::Attribute attr = sub.createAttribute( + item.first, h5::DataSpace::From(value)); + attr.write(value); + } else if (item.second.is()) { + // step into recursion + auto value = item.second.as(); + auto grp = sub.createGroup(item.first); + writeH5Attributes(grp, value); + } else { + throw NotImplementedError("writeH5Attributes", + "Unable to write attribute '{}' with type '{}'", + item.first, item.second.type_str()); + } + } +} + +vector_fp readH5FloatVector(h5::DataSet data, std::string id, size_t size) +{ + if (data.getDataType().getClass() != h5::DataTypeClass::Float) { + throw CanteraError("readH5FloatVector", + "Type of DataSet '{}' is inconsistent; expected HDF float.", id); + } + if (data.getElementCount() != size) { + throw CanteraError("readH5FloatVector", + "Size of DataSet '{}' is inconsistent; expected {} elements but " + "received {} elements.", id, size, data.getElementCount()); + } + vector_fp out; + data.read(out); + return out; +} + +std::vector readH5FloatMatrix(h5::DataSet data, std::string id, + size_t rows, size_t cols) +{ + if (data.getDataType().getClass() != h5::DataTypeClass::Float) { + throw CanteraError("readH5FloatMatrix", + "Type of DataSet '{}' is inconsistent; expected HDF float.", id); + } + h5::DataSpace space = data.getSpace(); + if (space.getNumberDimensions() != 2) { + throw CanteraError("readH5FloatMatrix", + "Shape of DataSet '{}' is inconsistent; expected two dimensions.", id); + } + const auto& shape = space.getDimensions(); + if (shape[0] != rows) { + throw CanteraError("readH5FloatMatrix", + "Shape of DataSet '{}' is inconsistent; expected {} rows.", id, rows); + } + if (shape[1] != cols) { + throw CanteraError("readH5FloatMatrix", + "Shape of DataSet '{}' is inconsistent; expected {} columns.", id, cols); + } + std::vector out; + data.read(out); + return out; +} + +} + +#endif From 06a0436fa3d96cb39792ce8aaf75a24eeeeb5f35 Mon Sep 17 00:00:00 2001 From: Ingmar Schoegl Date: Wed, 30 Nov 2022 01:10:08 -0600 Subject: [PATCH 52/93] [base] Add SolutionArray::setState --- include/cantera/base/SolutionArray.h | 8 +++++++- src/base/SolutionArray.cpp | 23 ++++++++++++++++++++--- 2 files changed, 27 insertions(+), 4 deletions(-) diff --git a/include/cantera/base/SolutionArray.h b/include/cantera/base/SolutionArray.h index 581b1dd511c..893b1df90ae 100644 --- a/include/cantera/base/SolutionArray.h +++ b/include/cantera/base/SolutionArray.h @@ -94,7 +94,7 @@ class SolutionArray /*! * Update the buffered index used to access entries. */ - void setIndex(size_t index); + void setIndex(size_t index, bool restore=true); /*! * Retrieve the state vector for a single entry. If index is valid, it is updated; @@ -102,6 +102,12 @@ class SolutionArray */ vector_fp getState(size_t index=npos); + /*! + * Set the state vector for a single entry. If index is valid, it is updated; + * otherwise, the last previously used index is referenced. + */ + void setState(const vector_fp& data, size_t index=npos); + /*! * Retrieve auxiliary data for a single entry. If index is valid, it is updated; * otherwise, the last previously used index is referenced. diff --git a/src/base/SolutionArray.cpp b/src/base/SolutionArray.cpp index 92f91c5b399..ab2814ed1d8 100644 --- a/src/base/SolutionArray.cpp +++ b/src/base/SolutionArray.cpp @@ -97,6 +97,10 @@ vector_fp SolutionArray::getComponent(const std::string& name) const void SolutionArray::setComponent( const std::string& name, const vector_fp& data, bool force) { + if (!m_work) { + initialize(); + } + if (!hasComponent(name)) { if (force) { m_other.emplace(name, std::make_shared(m_size)); @@ -127,8 +131,11 @@ void SolutionArray::setComponent( } } -void SolutionArray::setIndex(size_t index) +void SolutionArray::setIndex(size_t index, bool restore) { + if (!m_work) { + initialize(); + } if (m_size == 0) { throw CanteraError("SolutionArray::setIndex", "Unable to set index in empty SolutionArray."); @@ -144,8 +151,10 @@ void SolutionArray::setIndex(size_t index) throw IndexError("SolutionArray::setIndex", "entries", index, m_size - 1); } m_index = index; - size_t nState = m_sol->thermo()->stateSize(); - m_sol->thermo()->restoreState(nState, &m_data[m_index * m_stride]); + if (restore) { + size_t nState = m_sol->thermo()->stateSize(); + m_sol->thermo()->restoreState(nState, &m_data[m_index * m_stride]); + } } vector_fp SolutionArray::getState(size_t index) @@ -157,6 +166,14 @@ vector_fp SolutionArray::getState(size_t index) return out; } +void SolutionArray::setState(const vector_fp& data, size_t index) +{ + setIndex(index, false); + m_sol->thermo()->restoreState(data); + size_t nState = m_sol->thermo()->stateSize(); + m_sol->thermo()->saveState(nState, &m_data[m_index * m_stride]); +} + std::map SolutionArray::getAuxiliary(size_t index) { setIndex(index); From bdfcbcf07746b42ed09033d1278183da4d7bf6b4 Mon Sep 17 00:00:00 2001 From: Ingmar Schoegl Date: Wed, 30 Nov 2022 01:12:16 -0600 Subject: [PATCH 53/93] [OneD] Implement Domain::asArray converter --- include/cantera/oneD/Boundary1D.h | 2 ++ include/cantera/oneD/Domain1D.h | 9 +++++++++ include/cantera/oneD/StFlow.h | 2 ++ src/oneD/Boundary1D.cpp | 27 ++++++++++++++++++++++++++ src/oneD/Domain1D.cpp | 12 +++++++++++- src/oneD/Sim1D.cpp | 8 +++++++- src/oneD/StFlow.cpp | 32 ++++++++++++++++++++++++++++--- 7 files changed, 87 insertions(+), 5 deletions(-) diff --git a/include/cantera/oneD/Boundary1D.h b/include/cantera/oneD/Boundary1D.h index cba4b4705fe..e73986dae50 100644 --- a/include/cantera/oneD/Boundary1D.h +++ b/include/cantera/oneD/Boundary1D.h @@ -136,6 +136,7 @@ class Inlet1D : public Boundary1D virtual void eval(size_t jg, double* xg, double* rg, integer* diagg, double rdt); virtual AnyMap serialize(const double* soln) const; + virtual std::shared_ptr asArray(const double* soln) const; virtual void restore(const AnyMap& state, double* soln, int loglevel); virtual void restore(SolutionArray& arr, double* soln, int loglevel); @@ -221,6 +222,7 @@ class Outlet1D : public Boundary1D integer* diagg, double rdt); virtual AnyMap serialize(const double* soln) const; + virtual std::shared_ptr asArray(const double* soln) const; virtual void restore(SolutionArray& arr, double* soln, int loglevel) {} }; diff --git a/include/cantera/oneD/Domain1D.h b/include/cantera/oneD/Domain1D.h index e5f737b042b..96aa3af7a89 100644 --- a/include/cantera/oneD/Domain1D.h +++ b/include/cantera/oneD/Domain1D.h @@ -316,6 +316,12 @@ class Domain1D */ virtual AnyMap serialize(const double* soln) const; + //! Save the state of this domain as a SolutionArray + /*! + * @param soln local solution vector for this domain + */ + virtual std::shared_ptr asArray(const double* soln) const; + //! Restore the solution for this domain from an AnyMap /*! * @param[in] state AnyMap defining the state of this domain @@ -485,6 +491,9 @@ class Domain1D } protected: + //! Retrieve meta data + virtual AnyMap getMeta() const; + doublereal m_rdt; size_t m_nv; size_t m_points; diff --git a/include/cantera/oneD/StFlow.h b/include/cantera/oneD/StFlow.h index a2996ed6846..594c13d4183 100644 --- a/include/cantera/oneD/StFlow.h +++ b/include/cantera/oneD/StFlow.h @@ -158,6 +158,7 @@ class StFlow : public Domain1D virtual void showSolution(const doublereal* x); virtual AnyMap serialize(const double* soln) const; + virtual std::shared_ptr asArray(const double* soln) const; virtual void restore(const AnyMap& state, double* soln, int loglevel); virtual void restore(SolutionArray& arr, double* soln, int loglevel); @@ -284,6 +285,7 @@ class StFlow : public Domain1D protected: void setMeta(const AnyMap& state); + virtual AnyMap getMeta() const; doublereal wdot(size_t k, size_t j) const { return m_wdot(k,j); diff --git a/src/oneD/Boundary1D.cpp b/src/oneD/Boundary1D.cpp index f6159ea8ef9..75998a2b189 100644 --- a/src/oneD/Boundary1D.cpp +++ b/src/oneD/Boundary1D.cpp @@ -235,6 +235,25 @@ AnyMap Inlet1D::serialize(const double* soln) const return state; } +std::shared_ptr Inlet1D::asArray(const double* soln) const +{ + AnyMap meta = Boundary1D::getMeta(); + meta["type"] = "inlet"; + meta["mass-flux"] = m_mdot; + + // set gas state (using pressure from adjacent domain) + m_flow->setGas(soln, 0); + double pressure = m_flow->phase().pressure(); + auto phase = m_solution->thermo(); + phase->setState_TPY(m_temp, pressure, &m_yin[0]); + vector_fp data(phase->stateSize()); + phase->saveState(data); + + auto arr = SolutionArray::create(m_solution, 1, meta); + arr->setState(data, 0); + return arr; +} + void Inlet1D::restore(const AnyMap& state, double* soln, int loglevel) { Boundary1D::restore(state, soln, loglevel); @@ -420,6 +439,14 @@ AnyMap Outlet1D::serialize(const double* soln) const return state; } +std::shared_ptr Outlet1D::asArray(const double* soln) const +{ + AnyMap meta = Boundary1D::getMeta(); + meta["type"] = "outlet"; + auto arr = SolutionArray::create(m_solution, 0, meta); + return arr; +} + // -------- OutletRes1D -------- void OutletRes1D::setMoleFractions(const std::string& xres) diff --git a/src/oneD/Domain1D.cpp b/src/oneD/Domain1D.cpp index d4bfae62eab..7a28f40913c 100644 --- a/src/oneD/Domain1D.cpp +++ b/src/oneD/Domain1D.cpp @@ -114,7 +114,7 @@ void Domain1D::needJacUpdate() } } -AnyMap Domain1D::serialize(const double* soln) const +AnyMap Domain1D::getMeta() const { auto wrap_tols = [this](const vector_fp& tols) { // If all tolerances are the same, just store the scalar value. @@ -141,6 +141,16 @@ AnyMap Domain1D::serialize(const double* soln) const return state; } +AnyMap Domain1D::serialize(const double* soln) const +{ + return getMeta(); +} + +std::shared_ptr Domain1D::asArray(const double* soln) const +{ + throw CanteraError("Domain1D::asArray", "Needs to be overloaded."); +} + void Domain1D::restore(const AnyMap& state, double* soln, int loglevel) { auto set_tols = [&](const AnyValue& tols, const string& which, vector_fp& out) diff --git a/src/oneD/Sim1D.cpp b/src/oneD/Sim1D.cpp index bc06df1d695..bb63073a25b 100644 --- a/src/oneD/Sim1D.cpp +++ b/src/oneD/Sim1D.cpp @@ -109,7 +109,13 @@ void Sim1D::save(const std::string& fname, const std::string& id, string extension = (dot != npos) ? toLowerCopy(fname.substr(dot+1)) : ""; if (extension == "h5" || extension == "hdf") { #if CT_USE_HIGHFIVE_HDF - SolutionArray::writeHeader(fname, id, desc); + h5::File file(fname, h5::File::OpenOrCreate); + SolutionArray::writeHeader(file, id, desc); + + for (auto dom : m_dom) { + auto arr = dom->asArray(m_x.data()); + arr->writeEntry(file, id + "/" + dom->id()); + } return; #else throw CanteraError("Sim1D::save", diff --git a/src/oneD/StFlow.cpp b/src/oneD/StFlow.cpp index 20184806357..5d1a9ac9cf0 100644 --- a/src/oneD/StFlow.cpp +++ b/src/oneD/StFlow.cpp @@ -680,11 +680,10 @@ bool StFlow::componentActive(size_t n) const } } -AnyMap StFlow::serialize(const double* soln) const +AnyMap StFlow::getMeta() const { - AnyMap state = Domain1D::serialize(soln); + AnyMap state = Domain1D::getMeta(); state["type"] = flowType(); - state["pressure"] = m_press; state["transport-model"] = m_trans->transportModel(); state["phase"]["name"] = m_thermo->name(); @@ -729,6 +728,16 @@ AnyMap StFlow::serialize(const double* soln) const state["fixed-point"]["temperature"] = m_tfixed; } + return state; +} + +AnyMap StFlow::serialize(const double* soln) const +{ + auto state = getMeta(); + + // m_rho + + state["pressure"] = m_press; state["grid"] = m_z; vector_fp data(nPoints()); for (size_t i = 0; i < nComponents(); i++) { @@ -743,6 +752,23 @@ AnyMap StFlow::serialize(const double* soln) const return state; } +std::shared_ptr StFlow::asArray(const double* soln) const +{ + auto arr = SolutionArray::create(m_solution, nPoints(), getMeta()); + arr->setComponent("grid", m_z, true); + vector_fp data(nPoints()); + for (size_t i = 0; i < nComponents(); i++) { + if (componentActive(i)) { + for (size_t j = 0; j < nPoints(); j++) { + data[j] = soln[index(i, j)]; + } + arr->setComponent(componentName(i), data, true); + } + } + arr->setComponent("D", m_rho); // use density rather than pressure + return arr; +} + void StFlow::restore(const AnyMap& state, double* soln, int loglevel) { Domain1D::restore(state, soln, loglevel); From 4c140a8625a5aeeb5fad322fb9eaed2360d14d57 Mon Sep 17 00:00:00 2001 From: Ingmar Schoegl Date: Wed, 30 Nov 2022 11:45:17 -0600 Subject: [PATCH 54/93] [Base] Implement SolutionArray::writeEntry --- src/base/SolutionArray.cpp | 42 ++++++++++++++++++++++++++++++-------- src/base/hdfUtils.h | 37 ++++++++++++++++++++++++--------- 2 files changed, 60 insertions(+), 19 deletions(-) diff --git a/src/base/SolutionArray.cpp b/src/base/SolutionArray.cpp index ab2814ed1d8..a18bbf682a1 100644 --- a/src/base/SolutionArray.cpp +++ b/src/base/SolutionArray.cpp @@ -229,7 +229,31 @@ void SolutionArray::writeEntry(h5::File& file, const std::string& id) { auto sub = openH5Group(file, id); writeH5Attributes(sub, m_meta); - // throw CanteraError("SolutionArray::save", "Not implemented."); + + if (!m_size) { + return; + } + + const auto& nativeState = m_sol->thermo()->nativeState(); + size_t nSpecies = m_sol->thermo()->nSpecies(); + for (auto& state : nativeState) { + std::string name = state.first; + if (name == "X" || name == "Y") { + size_t offset = state.second; + std::vector prop; + for (size_t i = 0; i < m_size; i++) { + size_t first = offset + i * m_stride; + prop.push_back(vector_fp(&m_data[first], &m_data[first + nSpecies])); + } + writeH5FloatMatrix(sub, name, prop); + } else { + writeH5FloatVector(sub, name, getComponent(name)); + } + } + + for (auto& other : m_other) { + writeH5FloatVector(sub, other.first, *(other.second)); + } } #endif @@ -346,7 +370,7 @@ void SolutionArray::restore(const h5::File& file, const std::string& id) } } } - if (nDims != 1) { + if (nDims != 1 && nDims != npos) { throw NotImplementedError("SolutionArray::restore", "Unable to restore SolutionArray with {} dimensions.", nDims); } @@ -394,23 +418,23 @@ void SolutionArray::restore(const h5::File& file, const std::string& id) if (usesNativeState) { // native state can be written directly into data storage for (const auto& name : state) { - h5::DataSet data = sub.getDataSet(name); + // h5::DataSet data = sub.getDataSet(name); if (name == "X" || name == "Y") { size_t offset = nativeState.find(name)->second; - auto prop = readH5FloatMatrix(data, name, m_size, nSpecies); + auto prop = readH5FloatMatrix(sub, name, m_size, nSpecies); for (size_t i = 0; i < m_size; i++) { std::copy(prop[i].begin(), prop[i].end(), &m_data[offset + i * m_stride]); } } else { - setComponent(name, readH5FloatVector(data, name, m_size)); + setComponent(name, readH5FloatVector(sub, name, m_size)); } } } else if (mode == "TPX") { // data format used by Python h5py export (Cantera 2.5) - vector_fp T = readH5FloatVector(sub.getDataSet("T"), "T", m_size); - vector_fp P = readH5FloatVector(sub.getDataSet("P"), "P", m_size); - auto X = readH5FloatMatrix(sub.getDataSet("X"), "X", m_size, nSpecies); + vector_fp T = readH5FloatVector(sub, "T", m_size); + vector_fp P = readH5FloatVector(sub, "P", m_size); + auto X = readH5FloatMatrix(sub, "X", m_size, nSpecies); for (size_t i = 0; i < m_size; i++) { m_sol->thermo()->setState_TPX(T[i], P[i], X[i].data()); m_sol->thermo()->saveState(nState, &m_data[i * m_stride]); @@ -423,7 +447,7 @@ void SolutionArray::restore(const h5::File& file, const std::string& id) // restore other data for (const auto& name : names) { if (!state.count(name)) { - vector_fp data = readH5FloatVector(sub.getDataSet(name), name, m_size); + vector_fp data = readH5FloatVector(sub, name, m_size); m_other.emplace(name, std::make_shared(m_size)); auto& extra = m_other[name]; std::copy(data.begin(), data.end(), extra->begin()); diff --git a/src/base/hdfUtils.h b/src/base/hdfUtils.h index ef9b0ec450e..01396e96b78 100644 --- a/src/base/hdfUtils.h +++ b/src/base/hdfUtils.h @@ -85,7 +85,7 @@ AnyMap readH5Attributes(const h5::Group& sub, bool recursive) out[name] = value; } else if (dclass == h5::DataTypeClass::Enum) { // only booleans are supported - if (attr.getStorageSize() > 1) { + if (attr.getSpace().getElementCount() > 1) { std::vector values; attr.read(values); std::vector bValues; @@ -161,30 +161,47 @@ void writeH5Attributes(h5::Group& sub, const AnyMap& meta) } } -vector_fp readH5FloatVector(h5::DataSet data, std::string id, size_t size) +void writeH5FloatVector(h5::Group& sub, std::string id, vector_fp data) { - if (data.getDataType().getClass() != h5::DataTypeClass::Float) { + std::vector dims{data.size()}; + h5::DataSet dataset = sub.createDataSet(id, h5::DataSpace(dims)); + dataset.write(data); +} + +vector_fp readH5FloatVector(h5::Group& sub, std::string id, size_t size) +{ + h5::DataSet dataset = sub.getDataSet(id); + if (dataset.getDataType().getClass() != h5::DataTypeClass::Float) { throw CanteraError("readH5FloatVector", "Type of DataSet '{}' is inconsistent; expected HDF float.", id); } - if (data.getElementCount() != size) { + if (dataset.getElementCount() != size) { throw CanteraError("readH5FloatVector", "Size of DataSet '{}' is inconsistent; expected {} elements but " - "received {} elements.", id, size, data.getElementCount()); + "received {} elements.", id, size, dataset.getElementCount()); } vector_fp out; - data.read(out); + dataset.read(out); return out; } -std::vector readH5FloatMatrix(h5::DataSet data, std::string id, +void writeH5FloatMatrix(h5::Group& sub, std::string id, std::vector data) +{ + std::vector dims{data.size()}; + dims.push_back(data.size() ? data[0].size() : 0); + h5::DataSet dataset = sub.createDataSet(id, h5::DataSpace(dims)); + dataset.write(data); +} + +std::vector readH5FloatMatrix(h5::Group& sub, std::string id, size_t rows, size_t cols) { - if (data.getDataType().getClass() != h5::DataTypeClass::Float) { + h5::DataSet dataset = sub.getDataSet(id); + if (dataset.getDataType().getClass() != h5::DataTypeClass::Float) { throw CanteraError("readH5FloatMatrix", "Type of DataSet '{}' is inconsistent; expected HDF float.", id); } - h5::DataSpace space = data.getSpace(); + h5::DataSpace space = dataset.getSpace(); if (space.getNumberDimensions() != 2) { throw CanteraError("readH5FloatMatrix", "Shape of DataSet '{}' is inconsistent; expected two dimensions.", id); @@ -199,7 +216,7 @@ std::vector readH5FloatMatrix(h5::DataSet data, std::string id, "Shape of DataSet '{}' is inconsistent; expected {} columns.", id, cols); } std::vector out; - data.read(out); + dataset.read(out); return out; } From 8891d271cd43bcdc95ddbb45de86ee995a26c57e Mon Sep 17 00:00:00 2001 From: Ingmar Schoegl Date: Wed, 30 Nov 2022 12:27:11 -0600 Subject: [PATCH 55/93] [oneD] Update properties for StFlow::restore --- src/oneD/StFlow.cpp | 13 ++++++++++++- 1 file changed, 12 insertions(+), 1 deletion(-) diff --git a/src/oneD/StFlow.cpp b/src/oneD/StFlow.cpp index 5d1a9ac9cf0..bddd44dfdd4 100644 --- a/src/oneD/StFlow.cpp +++ b/src/oneD/StFlow.cpp @@ -692,7 +692,6 @@ AnyMap StFlow::getMeta() const state["radiation-enabled"] = m_do_radiation; if (m_do_radiation) { - state["radiative-heat-loss"] = m_qdotRadiation; state["emissivity-left"] = m_epsilon_left; state["emissivity-right"] = m_epsilon_right; } @@ -749,6 +748,10 @@ AnyMap StFlow::serialize(const double* soln) const } } + if (m_do_radiation) { + state["radiative-heat-loss"] = m_qdotRadiation; + } + return state; } @@ -766,6 +769,11 @@ std::shared_ptr StFlow::asArray(const double* soln) const } } arr->setComponent("D", m_rho); // use density rather than pressure + + if (m_do_radiation) { + arr->setComponent("radiative-heat-loss", m_qdotRadiation, true); + } + return arr; } @@ -790,6 +798,8 @@ void StFlow::restore(const AnyMap& state, double* soln, int loglevel) "component '{}' in domain '{}'.", name, id()); } } + + updateProperties(npos, soln + loc(), 0, m_points - 1); setMeta(state); } @@ -819,6 +829,7 @@ void StFlow::restore(SolutionArray& arr, double* soln, int loglevel) } } + updateProperties(npos, soln + loc(), 0, m_points - 1); setMeta(arr.meta()); } From 0df1f97f6a1443e6f33a6fee906e80688b88e201 Mon Sep 17 00:00:00 2001 From: Ingmar Schoegl Date: Wed, 30 Nov 2022 21:07:35 -0600 Subject: [PATCH 56/93] [AnyMap] Add AnyMap::keys --- include/cantera/base/AnyMap.h | 4 ++++ src/base/AnyMap.cpp | 11 +++++++++++ 2 files changed, 15 insertions(+) diff --git a/include/cantera/base/AnyMap.h b/include/cantera/base/AnyMap.h index b20045fd894..54f51c852e2 100644 --- a/include/cantera/base/AnyMap.h +++ b/include/cantera/base/AnyMap.h @@ -12,6 +12,7 @@ #include #include +#include namespace boost { @@ -447,6 +448,9 @@ class AnyMap : public AnyBase //! messages, for example std::string keys_str() const; + //! Return an unordered set of keys + std::set keys() const; + //! Set a metadata value that applies to this AnyMap and its children. //! Mainly for internal use in reading or writing from files. void setMetadata(const std::string& key, const AnyValue& value); diff --git a/src/base/AnyMap.cpp b/src/base/AnyMap.cpp index 0da576385b7..47d557f7545 100644 --- a/src/base/AnyMap.cpp +++ b/src/base/AnyMap.cpp @@ -1442,6 +1442,17 @@ std::string AnyMap::keys_str() const return to_string(b); } +std::set AnyMap::keys() const +{ + std::set out; + auto iter = this->begin(); + while (iter != this->end()) { + out.insert(iter->first); + ++iter; + } + return out; +} + void AnyMap::propagateMetadata(shared_ptr& metadata) { m_metadata = metadata; From 57e114128f28631d4ebfed3c0287b7fa987dc23d Mon Sep 17 00:00:00 2001 From: Ingmar Schoegl Date: Wed, 30 Nov 2022 18:16:56 -0600 Subject: [PATCH 57/93] [base] Streamline SolutionArray --- include/cantera/base/SolutionArray.h | 28 ++- include/cantera/oneD/Boundary1D.h | 5 + include/cantera/oneD/Sim1D.h | 12 ++ src/base/SolutionArray.cpp | 301 ++++++++++++++++++--------- src/oneD/Boundary1D.cpp | 64 ++++++ src/oneD/Sim1D.cpp | 75 +++++-- 6 files changed, 368 insertions(+), 117 deletions(-) diff --git a/include/cantera/base/SolutionArray.h b/include/cantera/base/SolutionArray.h index 893b1df90ae..d0dd79680f0 100644 --- a/include/cantera/base/SolutionArray.h +++ b/include/cantera/base/SolutionArray.h @@ -150,10 +150,9 @@ class SolutionArray /*! * Read header data from container file. * - * @param fname Name of container file - * @param id Identifier of SolutionArray root within the container file + * @param root Root location + * @param id Identifier of SolutionArray within the file structure */ - static AnyMap readHeader(const std::string& fname, const std::string& id); static AnyMap readHeader(const AnyMap& root, const std::string& id); #if CT_USE_HIGHFIVE_HDF static AnyMap readHeader(const HighFive::File& file, const std::string& id); @@ -162,16 +161,29 @@ class SolutionArray /*! * Restore SolutionArray from a container file. * - * @param fname Name of container file - * @param id Identifier of SolutionArray within the container file + * @param root Root location + * @param id Identifier of SolutionArray within the file structure */ - void restore(const std::string& fname, const std::string& id); - void restore(const AnyMap& root, const std::string& id); + void readEntry(const AnyMap& root, const std::string& id); #if CT_USE_HIGHFIVE_HDF - void restore(const HighFive::File& file, const std::string& id); + void readEntry(const HighFive::File& file, const std::string& id); #endif + /*! + * Restore SolutionArray from a container file. + * + * @param fname Name of container file + * @param id Identifier of SolutionArray within the container file + */ + AnyMap restore(const std::string& fname, const std::string& id); + protected: + //! Detect storage mode of state data + std::string detectMode(std::set names, bool native=true); + + //! Retrieve set containing list of properties defining state + std::set stateProperties(std::string mode, bool alias=false); + shared_ptr m_sol; //!< Solution object associated with state data size_t m_size; //!< Number of entries in SolutionArray size_t m_stride; //!< Stride between SolutionArray entries diff --git a/include/cantera/oneD/Boundary1D.h b/include/cantera/oneD/Boundary1D.h index e73986dae50..1944cdb6ccf 100644 --- a/include/cantera/oneD/Boundary1D.h +++ b/include/cantera/oneD/Boundary1D.h @@ -172,6 +172,7 @@ class Empty1D : public Boundary1D integer* diagg, double rdt); virtual AnyMap serialize(const double* soln) const; + virtual std::shared_ptr asArray(const double* soln) const; virtual void restore(SolutionArray& arr, double* soln, int loglevel) {} }; @@ -197,6 +198,7 @@ class Symm1D : public Boundary1D integer* diagg, double rdt); virtual AnyMap serialize(const double* soln) const; + virtual std::shared_ptr asArray(const double* soln) const; virtual void restore(SolutionArray& arr, double* soln, int loglevel) {} }; @@ -255,6 +257,7 @@ class OutletRes1D : public Boundary1D virtual void eval(size_t jg, double* xg, double* rg, integer* diagg, double rdt); virtual AnyMap serialize(const double* soln) const; + virtual std::shared_ptr asArray(const double* soln) const; virtual void restore(const AnyMap& state, double* soln, int loglevel); virtual void restore(SolutionArray& arr, double* soln, int loglevel); @@ -288,6 +291,7 @@ class Surf1D : public Boundary1D integer* diagg, double rdt); virtual AnyMap serialize(const double* soln) const; + virtual std::shared_ptr asArray(const double* soln) const; virtual void restore(const AnyMap& state, double* soln, int loglevel); virtual void restore(SolutionArray& arr, double* soln, int loglevel); @@ -325,6 +329,7 @@ class ReactingSurf1D : public Boundary1D integer* diagg, double rdt); virtual AnyMap serialize(const double* soln) const; + virtual std::shared_ptr asArray(const double* soln) const; virtual void restore(const AnyMap& state, double* soln, int loglevel); virtual void restore(SolutionArray& arr, double* soln, int loglevel); diff --git a/include/cantera/oneD/Sim1D.h b/include/cantera/oneD/Sim1D.h index 06a52135c0c..e9c7c20ea01 100644 --- a/include/cantera/oneD/Sim1D.h +++ b/include/cantera/oneD/Sim1D.h @@ -121,6 +121,12 @@ class Sim1D : public OneDim void save(const std::string& fname, const std::string& id, const std::string& desc, int loglevel=1); + /** + * Save the current solution to YAML (legacy implementation). @see save + */ + void write_yaml(const std::string& fname, const std::string& id, + const std::string& desc, int loglevel=1); + /** * Save the residual of the current solution to a container file. * @param fname Name of output container file @@ -139,6 +145,12 @@ class Sim1D : public OneDim */ void restore(const std::string& fname, const std::string& id, int loglevel=2); + /** + * Initialize the solution with a previously-saved solution (legacy implementation). + * @see restore + */ + void read_yaml(const std::string& fname, const std::string& id, int loglevel=2); + //! @} // @deprecated To be removed after Cantera 3.0 (unused) diff --git a/src/base/SolutionArray.cpp b/src/base/SolutionArray.cpp index a18bbf682a1..3d1ffe4fade 100644 --- a/src/base/SolutionArray.cpp +++ b/src/base/SolutionArray.cpp @@ -21,6 +21,20 @@ namespace Cantera { +const std::map aliasMap = { + {"T", "temperature"}, + {"P", "pressure"}, + {"D", "density"}, + {"Y", "mass-fractions"}, + {"X", "mole-fractions"}, + {"C", "coverages"}, + {"U", "specific-internal-energy"}, + {"V", "specific-volume"}, + {"H", "specific-enthalpy"}, + {"S", "specific-entropy"}, + {"Q", "vapor-fraction"}, +}; + SolutionArray::SolutionArray( const shared_ptr& sol, size_t size, @@ -257,16 +271,82 @@ void SolutionArray::writeEntry(h5::File& file, const std::string& id) } #endif +AnyMap& openField(AnyMap& root, const std::string& id) +{ + // locate field based on 'id' + std::vector tokens; + tokenizePath(id, tokens); + AnyMap* ptr = &root; // use raw pointer to avoid copying + std::string path = ""; + for (auto& field : tokens) { + path += "/" + field; + AnyMap& sub = *ptr; + if (sub.hasKey(field) && !sub[field].is()) { + throw CanteraError("openField", + "Encountered invalid existing field '{}'", path); + } else if (!sub.hasKey(field)) { + sub[field] = AnyMap(); + } + ptr = &sub[field].as(); // AnyMap lacks 'operator=' for const AnyMap + } + return *ptr; +} + void SolutionArray::writeEntry(AnyMap& root, const std::string& id) { - throw CanteraError("SolutionArray::save", "Not implemented."); + AnyMap& data = openField(root, id); + bool preexisting = !data.empty(); + data["points"] = int(m_size); + data.update(m_meta); - // bool preexisting = data.hasKey(id); + for (auto& other : m_other) { + data[other.first] = *(other.second); + } + + auto phase = m_sol->thermo(); + if (m_size == 1) { + setIndex(0); + data["temperature"] = phase->temperature(); + data["pressure"] = phase->pressure(); + auto surf = std::dynamic_pointer_cast(phase); + auto nSpecies = phase->nSpecies(); + vector_fp values(nSpecies); + if (surf) { + surf->invalidateCache(); + surf->getCoverages(&values[0]); + } else { + phase->getMassFractions(&values[0]); + } + AnyMap items; + for (size_t k = 0; k < nSpecies; k++) { + if (values[k] != 0.0) { + items[phase->speciesName(k)] = values[k]; + } + } + if (surf) { + data["coverages"] = std::move(items); + } else { + data["mass-fractions"] = std::move(items); + } + } else if (m_size > 1) { + const auto& nativeState = phase->nativeState(); + for (auto& state : nativeState) { + std::string name = state.first; + if (name == "X" || name == "Y") { + data["basis"] = name == "X" ? "mole" : "mass"; + for (auto& name : phase->speciesNames()) { + data[name] = getComponent(name); + } + } else { + data[name] = getComponent(name); + } + } + } - // // If this is not replacing an existing solution, put it at the end - // if (!preexisting) { - // data[id].setLoc(INT_MAX, 0); - // } + // If this is not replacing an existing solution, put it at the end + if (!preexisting) { + data.setLoc(INT_MAX, 0); + } } void SolutionArray::save( @@ -292,6 +372,7 @@ void SolutionArray::save( data = AnyMap::fromYamlFile(fname); } writeEntry(data, id); + writeHeader(data, id, desc); // Write the output file and remove the now-outdated cached file std::ofstream out(fname); @@ -303,25 +384,6 @@ void SolutionArray::save( "Unknown file extension '{}'", extension); } -AnyMap SolutionArray::readHeader(const std::string& fname, const std::string& id) -{ - size_t dot = fname.find_last_of("."); - std::string extension = (dot != npos) ? toLowerCopy(fname.substr(dot + 1)) : ""; - if (extension == "h5" || extension == "hdf") { -#if CT_USE_HIGHFIVE_HDF - return readHeader(h5::File(fname, h5::File::ReadOnly), id); -#else - throw CanteraError("SolutionArray::readHeader", - "Restoring from HDF requires HighFive installation."); -#endif - } - if (extension == "yaml" || extension == "yml") { - return readHeader(AnyMap::fromYamlFile(fname), id); - } - throw CanteraError("SolutionArray::readHeader", - "Unknown file extension '{}'", extension); -} - #if CT_USE_HIGHFIVE_HDF AnyMap SolutionArray::readHeader(const h5::File& file, const std::string& id) { @@ -334,29 +396,90 @@ AnyMap SolutionArray::readHeader(const AnyMap& root, const std::string& id) throw CanteraError("SolutionArray::readHeader", "Not implemented."); } -void SolutionArray::restore(const std::string& fname, const std::string& id) +AnyMap SolutionArray::restore(const std::string& fname, const std::string& id) { size_t dot = fname.find_last_of("."); std::string extension = (dot != npos) ? toLowerCopy(fname.substr(dot + 1)) : ""; if (extension == "h5" || extension == "hdf") { #if CT_USE_HIGHFIVE_HDF - restore(h5::File(fname, h5::File::ReadOnly), id); + h5::File file(fname, h5::File::ReadOnly); + readEntry(file, id); + return readHeader(file, id); #else throw CanteraError("SolutionArray::restore", "Restoring from HDF requires HighFive installation."); #endif - } else if (extension == "yaml" || extension == "yml") { - restore(AnyMap::fromYamlFile(fname), id); + } + if (extension == "yaml" || extension == "yml") { + const AnyMap& root = AnyMap::fromYamlFile(fname); + readEntry(root, id); + return readHeader(root, id); + } + throw CanteraError("SolutionArray::restore", + "Unknown file extension '{}'", extension); +} + +std::string SolutionArray::detectMode(std::set names, bool native) +{ + // identify storage mode of state data + std::string mode = ""; + const auto& nativeState = m_sol->thermo()->nativeState(); + bool usesNativeState; + auto surf = std::dynamic_pointer_cast(m_sol->thermo()); + for (const auto& item : m_sol->thermo()->fullStates()) { + bool found = true; + std::string name; + usesNativeState = true; + for (size_t i = 0; i < item.size(); i++) { + name = std::string(1, item[i]); + if (surf && (name == "X" || name == "Y")) { + // override native state + name = "C"; + usesNativeState = false; + break; + } + if (names.count(name)) { + usesNativeState &= nativeState.count(name); + } else if (aliasMap.count(name) && names.count(aliasMap.at(name))) { + usesNativeState &= nativeState.count(name); + } else { + found = false; + break; + } + } + if (found) { + mode = (name == "C") ? item.substr(0, 2) + "C" : item; + break; + } + } + if (usesNativeState && native) { + return "native"; + } + return mode; +} + +std::set SolutionArray::stateProperties(std::string mode, bool alias) +{ + std::set states; + if (mode == "native") { + for (const auto& item : m_sol->thermo()->nativeState()) { + states.insert(alias ? aliasMap.at(item.first) : item.first); + } } else { - throw CanteraError("SolutionArray::restore", - "Unknown file extension '{}'", extension); + for (const auto& m : mode) { + const std::string name = std::string(1, m); + states.insert(alias ? aliasMap.at(name) : name); + } } + + return states; } #if CT_USE_HIGHFIVE_HDF -void SolutionArray::restore(const h5::File& file, const std::string& id) +void SolutionArray::readEntry(const h5::File& file, const std::string& id) { auto sub = locateH5Group(file, id); + m_meta = readH5Attributes(sub, true); std::set names; size_t nDims = npos; @@ -377,50 +500,28 @@ void SolutionArray::restore(const h5::File& file, const std::string& id) initialize({}); - m_meta = readH5Attributes(sub, true); - if (m_size == 0) { return; } - // identify storage mode of state data - std::string mode = ""; - const auto& nativeState = m_sol->thermo()->nativeState(); - bool usesNativeState; - std::set state; - for (const auto& item : m_sol->thermo()->fullStates()) { - bool found = true; - usesNativeState = true; - state.clear(); - for (size_t i = 0; i < item.size(); i++) { - std::string name(1, item[i]); - if (names.count(name)) { - state.insert(name); - usesNativeState &= nativeState.count(name); - } else { - found = false; - break; - } - } - if (found) { - mode = item; - break; - } - } - if (mode == "") { - throw CanteraError("SolutionArray::restore", - "Data are not consistent with full state modes."); + // determine storage mode of state data + std::string mode = detectMode(names); + std::set states = stateProperties(mode); + if (states.count("C")) { + states.erase("C"); + states.insert("X"); } // restore state data size_t nSpecies = m_sol->thermo()->nSpecies(); size_t nState = m_sol->thermo()->stateSize(); - if (usesNativeState) { + const auto& nativeStates = m_sol->thermo()->nativeState(); + if (mode == "native") { // native state can be written directly into data storage - for (const auto& name : state) { - // h5::DataSet data = sub.getDataSet(name); + for (const auto& item : nativeStates) { + std::string name = item.first; if (name == "X" || name == "Y") { - size_t offset = nativeState.find(name)->second; + size_t offset = item.second; auto prop = readH5FloatMatrix(sub, name, m_size, nSpecies); for (size_t i = 0; i < m_size; i++) { std::copy(prop[i].begin(), prop[i].end(), @@ -430,7 +531,7 @@ void SolutionArray::restore(const h5::File& file, const std::string& id) setComponent(name, readH5FloatVector(sub, name, m_size)); } } - } else if (mode == "TPX") { + } else if (mode == "TPX" || mode == "TPC") { // data format used by Python h5py export (Cantera 2.5) vector_fp T = readH5FloatVector(sub, "T", m_size); vector_fp P = readH5FloatVector(sub, "P", m_size); @@ -439,6 +540,9 @@ void SolutionArray::restore(const h5::File& file, const std::string& id) m_sol->thermo()->setState_TPX(T[i], P[i], X[i].data()); m_sol->thermo()->saveState(nState, &m_data[i * m_stride]); } + } else if (mode == "") { + throw CanteraError("SolutionArray::restore", + "Data are not consistent with full state modes."); } else { throw NotImplementedError("SolutionArray::restore", "Import of '{}' data is not supported.", mode); @@ -446,7 +550,7 @@ void SolutionArray::restore(const h5::File& file, const std::string& id) // restore other data for (const auto& name : names) { - if (!state.count(name)) { + if (!states.count(name)) { vector_fp data = readH5FloatVector(sub, name, m_size); m_other.emplace(name, std::make_shared(m_size)); auto& extra = m_other[name]; @@ -456,9 +560,9 @@ void SolutionArray::restore(const h5::File& file, const std::string& id) } #endif -void SolutionArray::restore(const AnyMap& root, const std::string& id) +const AnyMap& locateField(const AnyMap& root, const std::string& id) { - // locate SolutionArray based on 'id' + // locate field based on 'id' std::vector tokens; tokenizePath(id, tokens); const AnyMap* ptr = &root; // use raw pointer to avoid copying @@ -472,7 +576,12 @@ void SolutionArray::restore(const AnyMap& root, const std::string& id) } ptr = &sub[field].as(); // AnyMap lacks 'operator=' for const AnyMap } - const AnyMap& sub = *ptr; + return *ptr; +} + +void SolutionArray::readEntry(const AnyMap& root, const std::string& id) +{ + auto sub = locateField(root, id); // set size and initialize m_size = sub.getInt("points", 0); @@ -484,48 +593,45 @@ void SolutionArray::restore(const AnyMap& root, const std::string& id) // restore data std::set exclude = {"points", "X", "Y"}; + std::set names = sub.keys(); + size_t nState = m_sol->thermo()->stateSize(); if (m_size == 0) { // no data points } else if (m_size == 1) { // single data point - double T = sub["temperature"].asDouble(); - double P = sub.getDouble("pressure", OneAtm); // missing - Sim1D (Cantera 2.6) - std::set props = {"temperature", "pressure"}; - exclude.insert(props.begin(), props.end()); - if (sub.hasKey("mass-fractions")) { + std::string mode = detectMode(names, false); + if (mode == "") { + // missing property - Sim1D (Cantera 2.6) + names.insert("pressure"); + mode = detectMode(names, false); + } + if (mode == "TPY") { + // single data point uses long names + double T = sub["temperature"].asDouble(); + double P = sub.getDouble("pressure", OneAtm); // missing - Sim1D (Cantera 2.6) auto Y = sub["mass-fractions"].asMap(); m_sol->thermo()->setState_TPY(T, P, Y); - exclude.insert("mass-fractions"); - } else if (sub.hasKey("coverages")) { - m_sol->thermo()->setState_TP(T, P); - auto cov = sub["coverages"].asMap(); - exclude.insert("coverages"); + } else if (mode == "TPC") { auto surf = std::dynamic_pointer_cast(m_sol->thermo()); if (!surf) { throw CanteraError("SolutionArray::restore", "Restoring of coverages requires surface phase"); } + double T = sub["temperature"].asDouble(); + double P = sub.getDouble("pressure", OneAtm); // missing - Sim1D (Cantera 2.6) + m_sol->thermo()->setState_TP(T, P); + auto cov = sub["coverages"].asMap(); surf->setCoveragesByName(cov); + } else if (mode == "") { + throw CanteraError("SolutionArray::restore", + "Data are not consistent with full state modes."); } else { throw NotImplementedError("SolutionArray::restore", - "Unknown YAML serialization format."); - } - for (const auto& prop : m_sol->thermo()->nativeState()) { - if (prop.first == "T") { - m_data[prop.second] = m_sol->thermo()->temperature(); - } else if (prop.first == "D") { - m_data[prop.second] = m_sol->thermo()->density(); - } else if (prop.first == "P") { - m_data[prop.second] = m_sol->thermo()->pressure(); - } else if (prop.first == "Y") { - m_sol->thermo()->getMassFractions(&m_data[prop.second]); - } else if (prop.first == "X") { - m_sol->thermo()->getMoleFractions(&m_data[prop.second]); - } else { - throw NotImplementedError("SolutionArray::restore", - "Unable to restore property '{}'.", prop.first); - } + "Import of '{}' data is not supported.", mode); } + m_sol->thermo()->saveState(nState, m_data); + auto props = stateProperties(mode, true); + exclude.insert(props.begin(), props.end()); } else { // multiple data points const auto& nativeState = m_sol->thermo()->nativeState(); @@ -576,6 +682,7 @@ void SolutionArray::restore(const AnyMap& root, const std::string& id) m_meta[item.first] = item.second; } } + m_meta.erase("points"); } } diff --git a/src/oneD/Boundary1D.cpp b/src/oneD/Boundary1D.cpp index 75998a2b189..f90e36b8fed 100644 --- a/src/oneD/Boundary1D.cpp +++ b/src/oneD/Boundary1D.cpp @@ -308,6 +308,14 @@ AnyMap Empty1D::serialize(const double* soln) const return state; } +std::shared_ptr Empty1D::asArray(const double* soln) const +{ + AnyMap meta = Boundary1D::getMeta(); + meta["type"] = "empty"; + auto arr = SolutionArray::create(m_solution, 0, meta); + return arr; +} + // -------------- Symm1D -------------- void Symm1D::init() @@ -361,6 +369,14 @@ AnyMap Symm1D::serialize(const double* soln) const return state; } +std::shared_ptr Symm1D::asArray(const double* soln) const +{ + AnyMap meta = Boundary1D::getMeta(); + meta["type"] = "symmetry"; + auto arr = SolutionArray::create(m_solution, 0, meta); + return arr; +} + // -------- Outlet1D -------- OutletRes1D::OutletRes1D() @@ -560,6 +576,25 @@ AnyMap OutletRes1D::serialize(const double* soln) const return state; } +std::shared_ptr OutletRes1D::asArray(const double* soln) const +{ + AnyMap meta = Boundary1D::getMeta(); + meta["type"] = "outlet-reservoir"; + meta["temperature"] = m_temp; + + // set gas state (using pressure from adjacent domain) + m_flow->setGas(soln, 0); + double pressure = m_flow->phase().pressure(); + auto phase = m_solution->thermo(); + phase->setState_TPY(m_temp, pressure, &m_yres[0]); + vector_fp data(phase->stateSize()); + phase->saveState(data); + + auto arr = SolutionArray::create(m_solution, 1, meta); + arr->setState(data, 0); + return arr; +} + void OutletRes1D::restore(const AnyMap& state, double* soln, int loglevel) { Boundary1D::restore(state, soln, loglevel); @@ -633,6 +668,15 @@ AnyMap Surf1D::serialize(const double* soln) const return state; } +std::shared_ptr Surf1D::asArray(const double* soln) const +{ + AnyMap meta = Boundary1D::getMeta(); + meta["type"] = "surface"; + meta["temperature"] = m_temp; + auto arr = SolutionArray::create(m_solution, 0, meta); + return arr; +} + void Surf1D::restore(const AnyMap& state, double* soln, int loglevel) { Boundary1D::restore(state, soln, loglevel); @@ -827,6 +871,26 @@ AnyMap ReactingSurf1D::serialize(const double* soln) const return state; } +std::shared_ptr ReactingSurf1D::asArray(const double* soln) const +{ + AnyMap meta = Boundary1D::getMeta(); + meta["type"] = "reacting-surface"; + meta["temperature"] = m_temp; + meta["phase"]["name"] = m_sphase->name(); + AnyValue source = m_sphase->input().getMetadata("filename"); + meta["phase"]["source"] = source.empty() ? "" : source.asString(); + + // set state of surface phase + m_sphase->setState_TP(m_temp, m_sphase->pressure()); + m_sphase->setCoverages(soln); + vector_fp data(m_sphase->stateSize()); + m_sphase->saveState(data.size(), &data[0]); + + auto arr = SolutionArray::create(m_solution, 1, meta); + arr->setState(data, 0); + return arr; +} + void ReactingSurf1D::restore(const AnyMap& state, double* soln, int loglevel) { Boundary1D::restore(state, soln, loglevel); diff --git a/src/oneD/Sim1D.cpp b/src/oneD/Sim1D.cpp index bb63073a25b..6af3af2f4dd 100644 --- a/src/oneD/Sim1D.cpp +++ b/src/oneD/Sim1D.cpp @@ -110,21 +110,43 @@ void Sim1D::save(const std::string& fname, const std::string& id, if (extension == "h5" || extension == "hdf") { #if CT_USE_HIGHFIVE_HDF h5::File file(fname, h5::File::OpenOrCreate); - SolutionArray::writeHeader(file, id, desc); - for (auto dom : m_dom) { auto arr = dom->asArray(m_x.data()); arr->writeEntry(file, id + "/" + dom->id()); } + SolutionArray::writeHeader(file, id, desc); return; #else throw CanteraError("Sim1D::save", "Saving to HDF requires HighFive installation."); #endif - } else if (extension != "yaml" && extension != "yml") { - throw CanteraError("Sim1D::save", - "Unsupported file format '{}'", extension); } + if (extension == "yaml" || extension == "yml") { + // Check for an existing file and load it if present + AnyMap data; + if (std::ifstream(fname).good()) { + data = AnyMap::fromYamlFile(fname); + } + SolutionArray::writeHeader(data, id, desc); + + for (auto dom : m_dom) { + auto arr = dom->asArray(m_x.data()); + arr->writeEntry(data, id + "/" + dom->id()); + } + + // Write the output file and remove the now-outdated cached file + std::ofstream out(fname); + out << data.toYamlString(); + AnyMap::clearCachedFile(fname); + return; + } + throw CanteraError("Sim1D::save", + "Unsupported file format '{}'", extension); +} + +void Sim1D::write_yaml(const std::string& fname, const std::string& id, + const std::string& desc, int loglevel) +{ // Check for an existing file and load it if present AnyMap data; if (ifstream(fname).good()) { @@ -240,7 +262,7 @@ AnyMap legacyH5(shared_ptr arr, const AnyMap& header={}) // {"grid-min", "???"}, // missing {"max-points", "max_grid_points"}, }; - for (const auto& item : header_pairs) { + for (const auto& item : refiner_pairs) { if (header.hasKey(item.second)) { out["refine-criteria"][item.first] = header[item.second]; } @@ -272,10 +294,10 @@ void Sim1D::restore(const std::string& fname, const std::string& id, #if CT_USE_HIGHFIVE_HDF h5::File file(fname, h5::File::ReadOnly); std::map> arrs; - auto header = SolutionArray::readHeader(fname, id); + auto header = SolutionArray::readHeader(file, id); for (auto dom : m_dom) { auto arr = SolutionArray::create(dom->solution()); - arr->restore(fname, id + "/" + dom->id()); + arr->readEntry(file, id + "/" + dom->id()); dom->resize(dom->nComponents(), arr->size()); if (!header.hasKey("generator")) { arr->meta() = legacyH5(arr, header); @@ -295,10 +317,9 @@ void Sim1D::restore(const std::string& fname, const std::string& id, } else if (extension == "yaml" || extension == "yml") { AnyMap root = AnyMap::fromYamlFile(fname); std::map> arrs; - // const auto& state = root[id]; for (auto dom : m_dom) { auto arr = SolutionArray::create(dom->solution()); - arr->restore(fname, id + "/" + dom->id()); + arr->readEntry(root, id + "/" + dom->id()); dom->resize(dom->nComponents(), arr->size()); arrs[dom->id()] = arr; } @@ -306,8 +327,6 @@ void Sim1D::restore(const std::string& fname, const std::string& id, m_xlast_ts.clear(); for (auto dom : m_dom) { dom->restore(*arrs[dom->id()], m_x.data() + dom->loc(), loglevel); - // dom->restore(state[dom->id()].as(), m_x.data() + dom->loc(), - // loglevel); } finalize(); } else { @@ -316,6 +335,38 @@ void Sim1D::restore(const std::string& fname, const std::string& id, } } +void Sim1D::read_yaml(const std::string& fname, const std::string& id, + int loglevel) +{ + size_t dot = fname.find_last_of("."); + string extension = (dot != npos) ? toLowerCopy(fname.substr(dot+1)) : ""; + if (extension == "xml") { + throw CanteraError("Sim1D::restore", + "Restoring from XML is no longer supported."); + } + AnyMap root = AnyMap::fromYamlFile(fname); + if (!root.hasKey(id)) { + throw InputFileError("Sim1D::restore", root, + "No solution with id '{}'", id); + } + const auto& state = root[id]; + for (auto dom : m_dom) { + if (!state.hasKey(dom->id())) { + throw InputFileError("Sim1D::restore", state, + "Saved state '{}' does not contain a domain named '{}'.", + id, dom->id()); + } + dom->resize(dom->nComponents(), state[dom->id()]["points"].asInt()); + } + resize(); + m_xlast_ts.clear(); + for (auto dom : m_dom) { + dom->restore(state[dom->id()].as(), m_x.data() + dom->loc(), + loglevel); + } + finalize(); +} + void Sim1D::setFlatProfile(size_t dom, size_t comp, doublereal v) { size_t np = domain(dom).nPoints(); From d9c576016c03676b007967792027fb126690c6c8 Mon Sep 17 00:00:00 2001 From: Ingmar Schoegl Date: Thu, 1 Dec 2022 11:19:24 -0600 Subject: [PATCH 58/93] [Python] Add methods to test pre-existing YAML converters --- interfaces/cython/cantera/_onedim.pxd | 2 ++ interfaces/cython/cantera/_onedim.pyx | 17 +++++++++++++++++ 2 files changed, 19 insertions(+) diff --git a/interfaces/cython/cantera/_onedim.pxd b/interfaces/cython/cantera/_onedim.pxd index 7e8ed4d8eaf..12900b565fd 100644 --- a/interfaces/cython/cantera/_onedim.pxd +++ b/interfaces/cython/cantera/_onedim.pxd @@ -125,6 +125,8 @@ cdef extern from "cantera/oneD/Sim1D.h": vector[double] getRefineCriteria(int) except +translate_exception void save(string, string, string, int) except +translate_exception void restore(string, string, int) except +translate_exception + void write_yaml(string, string, string, int) except +translate_exception + void read_yaml(string, string, int) except +translate_exception void writeStats(int) except +translate_exception void clearStats() void resize() except +translate_exception diff --git a/interfaces/cython/cantera/_onedim.pyx b/interfaces/cython/cantera/_onedim.pyx index 9389b8887ac..433bb450f2a 100644 --- a/interfaces/cython/cantera/_onedim.pyx +++ b/interfaces/cython/cantera/_onedim.pyx @@ -1482,6 +1482,15 @@ cdef class Sim1D: self.sim.save(stringify(str(filename)), stringify(name), stringify(description), loglevel) + def write_yaml(self, filename, name='solution', description='none', + quiet=True): + """ + Save the solution in YAML format (legacy implementation) + """ + loglevel = 1 - quiet + self.sim.write_yaml(stringify(str(filename)), stringify(name), + stringify(description), loglevel) + def restore(self, filename='soln.yaml', name='solution', loglevel=2): """Set the solution vector to a previously-saved solution. @@ -1498,6 +1507,14 @@ cdef class Sim1D: self.sim.restore(stringify(str(filename)), stringify(name), loglevel) self._initialized = True + def read_yaml(self, filename, name='solution', description='none', quiet=True): + """ + Set the solution vector to a previously-saved solution (legacy implementation) + """ + loglevel = 2 * (1 - quiet) + self.sim.read_yaml(stringify(str(filename)), stringify(name), loglevel) + self._initialized = True + def restore_time_stepping_solution(self): """ Set the current solution vector to the last successful time-stepping From 169aa7e1a96c188cd9b18b72295b4ed67239979a Mon Sep 17 00:00:00 2001 From: Ingmar Schoegl Date: Thu, 1 Dec 2022 14:01:31 -0600 Subject: [PATCH 59/93] [SCons] Install HighFive submodule on demand --- .gitmodules | 3 ++ SConstruct | 52 +++++++++++++++++++++++++++++--- ext/HighFive | 1 + ext/SConscript | 8 +++++ include/cantera/base/config.h.in | 1 + src/base/hdfUtils.h | 21 +++++++++---- src/oneD/Sim1D.cpp | 8 +++-- 7 files changed, 81 insertions(+), 13 deletions(-) create mode 160000 ext/HighFive diff --git a/.gitmodules b/.gitmodules index 61b5a9e6d22..1ae62107777 100644 --- a/.gitmodules +++ b/.gitmodules @@ -13,3 +13,6 @@ [submodule "ext/yaml-cpp"] path = ext/yaml-cpp url = https://github.com/jbeder/yaml-cpp.git +[submodule "ext/HighFive"] + path = ext/HighFive + url = https://github.com/BlueBrain/HighFive.git diff --git a/SConstruct b/SConstruct index 01226dc56f7..d014456e139 100644 --- a/SConstruct +++ b/SConstruct @@ -356,6 +356,15 @@ config_options = [ must include the shared version of the library, for example, 'libfmt.so'.""", "default", ("default", "y", "n")), + EnumOption( + "system_highfive", + """Select whether to use HighFive from a system installation ('y'), from a + Git submodule ('n'), or to decide automatically ('default'). If HighFive + is not installed directly into a system include directory, for example, it + is installed in '/opt/include/HighFive', then you will need to add + '/opt/include/HighFive' to 'extra_inc_dirs'. + """, + "default", ("default", "y", "n")), EnumOption( "system_yamlcpp", """Select whether to use the yaml-cpp library from a system installation @@ -1497,8 +1506,42 @@ else: # env['system_sundials'] == 'n' env['sundials_version'] = '5.3' env['has_sundials_lapack'] = int(env['use_lapack']) -env["has_highfive"] = conf.CheckLibWithHeader( - "hdf5", "highfive/H5File.hpp", language="C++", autoadd=False) +if not conf.CheckLib("hdf5", autoadd=False): + env["uses_highfive"] = False + +elif env["system_highfive"] in ("n", "default"): + env["system_highfive"] = False + if not os.path.exists("ext/eigen/HighFive/include"): + if not os.path.exists(".git"): + config_error("HighFive is missing. Install HighFive in ext/HighFive.") + + try: + code = subprocess.call(["git", "submodule", "update", "--init", + "--recursive", "ext/HighFive"]) + except Exception: + code = -1 + if code: + config_error("HighFive not found and submodule checkout failed.\n" + "Try manually checking out the submodule with:\n\n" + " git submodule update --init --recursive ext/HighFive\n") + + env["uses_highfive"] = conf.CheckLibWithHeader( + "hdf5", "../ext/HighFive/include/highfive/H5File.hpp", + language="C++", autoadd=False) + + if env["uses_highfive"]: + logger.info("Using private installation of HighFive.") + else: + logger.error("HighFive is not configured correctly.") + +elif env["system_highfive"] in ("y", "default"): + env["system_highfive"] = True + env["uses_highfive"] = conf.CheckLibWithHeader( + "hdf5", "highfive/H5File.hpp", language="C++", autoadd=False) + if env["uses_highfive"]: + logger.info("Using system installation of HighFive.") + else: + logger.warning("Unable to locate HighFive installation.") def set_fortran(pattern, value): # Set compiler / flags for all Fortran versions to be the same @@ -2026,13 +2069,14 @@ cdefine('LAPACK_FTN_TRAILING_UNDERSCORE', 'lapack_ftn_trailing_underscore') cdefine('FTN_TRAILING_UNDERSCORE', 'lapack_ftn_trailing_underscore') cdefine('LAPACK_NAMES_LOWERCASE', 'lapack_names', 'lower') cdefine('CT_USE_LAPACK', 'use_lapack') +cdefine("CT_USE_HIGHFIVE_HDF", "uses_highfive") +cdefine('CT_USE_SYSTEM_HIGHFIVE', 'system_highfive') cdefine("CT_USE_SYSTEM_EIGEN", "system_eigen") cdefine("CT_USE_SYSTEM_EIGEN_PREFIXED", "system_eigen_prefixed") cdefine('CT_USE_SYSTEM_FMT', 'system_fmt') cdefine('CT_USE_SYSTEM_YAMLCPP', 'system_yamlcpp') cdefine('CT_USE_DEMANGLE', 'has_demangle') cdefine('CT_HAS_PYTHON', 'python_package', 'full') -cdefine("CT_USE_HIGHFIVE_HDF", "has_highfive") config_h_build = env.Command('build/src/config.h.build', 'include/cantera/base/config.h.in', @@ -2116,7 +2160,7 @@ else: env["external_libs"] = [] env["external_libs"].extend(env["sundials_libs"]) -if env["has_highfive"]: +if env["uses_highfive"]: if env["OS"] == "Windows": # see https://github.com/microsoft/vcpkg/issues/24293 env.Append(CPPDEFINES=["H5_BUILT_AS_DYNAMIC_LIB"]) diff --git a/ext/HighFive b/ext/HighFive new file mode 160000 index 00000000000..5513f28dcce --- /dev/null +++ b/ext/HighFive @@ -0,0 +1 @@ +Subproject commit 5513f28dcced33872a3e40a63e28d49272da20fc diff --git a/ext/SConscript b/ext/SConscript index 664b036acb1..eb1073e9980 100644 --- a/ext/SConscript +++ b/ext/SConscript @@ -125,6 +125,14 @@ if not env['system_eigen']: copyenv.Depends(copyenv['config_h_target'], h) ext_copies.extend(h) +if not env["system_highfive"]: + localenv = prep_default(env) + license_files["HighFive"] = File("#ext/HighFive/LICENSE") + h = build(copyenv.Command('#include/cantera/ext/HighFive', '#ext/HighFive/include/highfive', + Copy('$TARGET', '$SOURCE'))) + copyenv.Depends(copyenv['config_h_target'], h) + ext_copies.extend(h) + # Google Test: Used internally for Cantera unit tests. if env['googletest'] == 'submodule': localenv = prep_gtest(env) diff --git a/include/cantera/base/config.h.in b/include/cantera/base/config.h.in index 2a28e45c9e0..70d88a1d868 100644 --- a/include/cantera/base/config.h.in +++ b/include/cantera/base/config.h.in @@ -68,5 +68,6 @@ typedef int ftnlen; // Fortran hidden string length type // Enable export/import of HDF data via C++ HighFive {CT_USE_HIGHFIVE_HDF!s} +{CT_USE_SYSTEM_HIGHFIVE!s} #endif diff --git a/src/base/hdfUtils.h b/src/base/hdfUtils.h index 01396e96b78..e85a569a5a9 100644 --- a/src/base/hdfUtils.h +++ b/src/base/hdfUtils.h @@ -6,12 +6,21 @@ #ifndef CT_HDF_UTILS_H #define CT_HDF_UTILS_H -#include -#include -#include -#include -#include -#include +#if CT_USE_SYSTEM_HIGHFIVE + #include + #include + #include + #include + #include + #include +#else + #include "cantera/ext/HighFive/H5Attribute.hpp" + #include "cantera/ext/HighFive/H5DataSet.hpp" + #include "cantera/ext/HighFive/H5DataSpace.hpp" + #include "cantera/ext/HighFive/H5DataType.hpp" + #include "cantera/ext/HighFive/H5File.hpp" + #include "cantera/ext/HighFive/H5Group.hpp" +#endif namespace h5 = HighFive; diff --git a/src/oneD/Sim1D.cpp b/src/oneD/Sim1D.cpp index 6af3af2f4dd..ab2cd8ef454 100644 --- a/src/oneD/Sim1D.cpp +++ b/src/oneD/Sim1D.cpp @@ -20,9 +20,11 @@ using namespace std; #if CT_USE_HIGHFIVE_HDF -#include -#include -#include +#if CT_USE_SYSTEM_HIGHFIVE + #include +#else + #include "cantera/ext/HighFive/H5File.hpp" +#endif namespace h5 = HighFive; #endif From 84de1cf1d1f1a12518cf97098453288d373bf72d Mon Sep 17 00:00:00 2001 From: Ingmar Schoegl Date: Thu, 1 Dec 2022 20:22:50 -0600 Subject: [PATCH 60/93] [base] Create Storage class to handle HDF files --- include/cantera/base/SolutionArray.h | 43 +-- include/cantera/base/Storage.h | 465 +++++++++++++++++++++++++++ src/base/SolutionArray.cpp | 139 ++++---- src/base/hdfUtils.h | 234 -------------- src/oneD/Sim1D.cpp | 26 +- 5 files changed, 557 insertions(+), 350 deletions(-) create mode 100644 include/cantera/base/Storage.h delete mode 100644 src/base/hdfUtils.h diff --git a/include/cantera/base/SolutionArray.h b/include/cantera/base/SolutionArray.h index d0dd79680f0..dcc5734bda8 100644 --- a/include/cantera/base/SolutionArray.h +++ b/include/cantera/base/SolutionArray.h @@ -9,13 +9,6 @@ #include "cantera/base/global.h" #include "cantera/base/AnyMap.h" -#if CT_USE_HIGHFIVE_HDF -namespace HighFive -{ - class File; -} -#endif - namespace Cantera { @@ -117,32 +110,28 @@ class SolutionArray /*! * Write header data to container file. * - * @param root Root location + * @param fname Name of HDF container file * @param id Identifier of SolutionArray root within the container file * @param desc Description */ - static void writeHeader(AnyMap& root, const std::string& id, + static void writeHeader(const std::string& fname, const std::string& id, const std::string& desc); -#if CT_USE_HIGHFIVE_HDF - static void writeHeader(HighFive::File& file, const std::string& id, + static void writeHeader(AnyMap& root, const std::string& id, const std::string& desc); -#endif /*! * Write SolutionArray data to container file. * - * @param root Root location + * @param fname Name of HDF container file * @param id Identifier of SolutionArray within the container file */ + void writeEntry(const std::string& fname, const std::string& id); void writeEntry(AnyMap& root, const std::string& id); -#if CT_USE_HIGHFIVE_HDF - void writeEntry(HighFive::File& file, const std::string& id); -#endif /*! - * Save the current SolutionArray to a container file. + * Save current SolutionArray and header to a container file. * - * @param fname Name of output container file + * @param fname Name of output container file (YAML or HDF) * @param id Identifier of SolutionArray within the container file */ void save(const std::string& fname, const std::string& id, const std::string& desc); @@ -150,29 +139,25 @@ class SolutionArray /*! * Read header data from container file. * - * @param root Root location + * @param fname Name of HDF container file * @param id Identifier of SolutionArray within the file structure */ + static AnyMap readHeader(const std::string& fname, const std::string& id); static AnyMap readHeader(const AnyMap& root, const std::string& id); -#if CT_USE_HIGHFIVE_HDF - static AnyMap readHeader(const HighFive::File& file, const std::string& id); -#endif /*! - * Restore SolutionArray from a container file. + * Restore SolutionArray entry from a container file. * - * @param root Root location + * @param fname Name of HDF container file * @param id Identifier of SolutionArray within the file structure */ + void readEntry(const std::string& fname, const std::string& id); void readEntry(const AnyMap& root, const std::string& id); -#if CT_USE_HIGHFIVE_HDF - void readEntry(const HighFive::File& file, const std::string& id); -#endif /*! - * Restore SolutionArray from a container file. + * Restore SolutionArray entry and header from a container file. * - * @param fname Name of container file + * @param fname Name of container file (YAML or HDF) * @param id Identifier of SolutionArray within the container file */ AnyMap restore(const std::string& fname, const std::string& id); diff --git a/include/cantera/base/Storage.h b/include/cantera/base/Storage.h new file mode 100644 index 00000000000..9a27eaabfb8 --- /dev/null +++ b/include/cantera/base/Storage.h @@ -0,0 +1,465 @@ +//! @file Storage.h + +// This file is part of Cantera. See License.txt in the top-level directory or +// at https://cantera.org/license.txt for license and copyright information. + +#ifndef CT_STORAGE_H +#define CT_STORAGE_H + +#include "cantera/base/ct_defs.h" +#include "cantera/base/stringUtils.h" +#include + +#if CT_USE_HIGHFIVE_HDF +#if CT_USE_SYSTEM_HIGHFIVE + #include + #include + #include + #include + #include + #include +#else + #include "cantera/ext/HighFive/H5Attribute.hpp" + #include "cantera/ext/HighFive/H5DataSet.hpp" + #include "cantera/ext/HighFive/H5DataSpace.hpp" + #include "cantera/ext/HighFive/H5DataType.hpp" + #include "cantera/ext/HighFive/H5File.hpp" + #include "cantera/ext/HighFive/H5Group.hpp" +#endif + +namespace h5 = HighFive; + +enum class H5Boolean { + FALSE = 0, + TRUE = 1, +}; + +h5::EnumType create_enum_boolean() { + return {{"FALSE", H5Boolean::FALSE}, + {"TRUE", H5Boolean::TRUE}}; +} + +HIGHFIVE_REGISTER_TYPE(H5Boolean, create_enum_boolean) + +#endif + +namespace Cantera +{ + +/*! + * A wrapper class handling storage to HDF; acts as a thin wrapper for HighFive + */ +class Storage +{ +public: +#if CT_USE_HIGHFIVE_HDF + Storage(h5::File file, bool write) : m_file(file), m_write(write) {} +#else + Storage() { + throw CanteraError("Storage::Storage", + "Instantiation of Storage requires HighFive::File object."); + } +#endif + + //! Flush file contents + void flush(); + + //! Check whether path go location exists + //! If the file has write access, create location if necessary + bool checkGroup(const std::string& id); + + //! Retrieve contents of file from a specified location + std::pair> contents(const std::string& id) const; + + //! Read attributes from a specified location + AnyMap readAttributes(const std::string& id, bool recursive) const; + + //! Write attributes to a specified location + void writeAttributes(const std::string& id, const AnyMap& meta); + + //! Read data vector from a specified location + vector_fp readVector(const std::string& id, + const std::string& name, size_t size) const; + + //! Write data vector to a specified location + void writeVector(const std::string& id, + const std::string& name, const vector_fp& data); + + //! Read matrix from a specified location + std::vector readMatrix(const std::string& id, + const std::string& name, + size_t rows, size_t cols) const; + + //! Write matrix to a specified location + void writeMatrix(const std::string& id, + const std::string& name, const std::vector& data); + +private: +#if CT_USE_HIGHFIVE_HDF + bool checkGroupRead(const std::string& id) const; + bool checkGroupWrite(const std::string& id); + + h5::File m_file; +#endif + + bool m_write; +}; + +#if CT_USE_HIGHFIVE_HDF + +void Storage::flush() +{ + m_file.flush(); +} + +bool Storage::checkGroupRead(const std::string& id) const +{ + std::vector tokens; + tokenizePath(id, tokens); + std::string grp = tokens[0]; + if (!m_file.exist(grp) || m_file.getObjectType(grp) != h5::ObjectType::Group) { + throw CanteraError("Storage::checkGroup", + "No group with id '{}' found", grp); + } + + std::string path = grp; + h5::Group sub = m_file.getGroup(grp); + tokens.erase(tokens.begin()); + for (auto& grp : tokens) { + path += "/" + grp; + if (!sub.exist(grp) || sub.getObjectType(grp) != h5::ObjectType::Group) { + throw CanteraError("Storage::checkGroup", + "No group with id '{}' found", path); + } + sub = sub.getGroup(grp); + } + return true; +} + +bool Storage::checkGroupWrite(const std::string& id) +{ + if (!m_file.exist(id)) { + m_file.createGroup(id); + return true; + } + if (m_file.getObjectType(id) != h5::ObjectType::Group) { + throw CanteraError("Storage::checkGroup", + "Invalid object with id '{}' exists", id); + } + return true; +} + +bool Storage::checkGroup(const std::string& id) { + if (m_write) { + return checkGroupWrite(id); + } + return checkGroupRead(id); +} + +std::pair> Storage::contents(const std::string& id) const +{ + h5::Group sub = m_file.getGroup(id); + std::set names; + size_t nDims = npos; + size_t nElements = 0; + for (auto& name : sub.listObjectNames()) { + if (sub.getObjectType(name) == h5::ObjectType::Dataset) { + h5::DataSpace space = sub.getDataSet(name).getSpace(); + names.insert(name); + if (space.getNumberDimensions() < nDims) { + nDims = space.getNumberDimensions(); + nElements = space.getElementCount(); + } + } + } + if (nDims != 1 && nDims != npos) { + throw NotImplementedError("Storage::content", + "Unable to restore data with {} dimensions.", nDims); + } + return std::make_pair(nElements, names); +} + +AnyMap readH5Attributes(const h5::Group& sub, bool recursive) +{ + // restore meta data from attributes + AnyMap out; + for (auto& name : sub.listAttributeNames()) { + h5::Attribute attr = sub.getAttribute(name); + h5::DataType dtype = attr.getDataType(); + h5::DataTypeClass dclass = dtype.getClass(); + if (dclass == h5::DataTypeClass::Float) { + if (attr.getSpace().getElementCount() > 1) { + std::vector values; + attr.read(values); + out[name] = values; + } else { + double value; + attr.read(value); + out[name] = value; + } + } else if (dclass == h5::DataTypeClass::Integer) { + if (attr.getSpace().getElementCount() > 1) { + std::vector values; + attr.read(values); + out[name] = values; + } else { + int value; + attr.read(value); + out[name] = value; + } + } else if (dclass == h5::DataTypeClass::String) { + if (attr.getSpace().getElementCount() > 1) { + std::vector values; + attr.read(values); + out[name] = values; + } else { + std::string value; + attr.read(value); + out[name] = value; + } + } else if (dclass == h5::DataTypeClass::Enum) { + // only booleans are supported + if (attr.getSpace().getElementCount() > 1) { + std::vector values; + attr.read(values); + std::vector bValues; + for (auto v : values) { + bValues.push_back(bool(v)); + } + out[name] = bValues; + } else { + H5Boolean value; + attr.read(value); + out[name] = bool(value); + } + } else { + throw NotImplementedError("readH5Attributes", + "Unable to read attribute '{}' with type '{}'", name, dtype.string()); + } + } + + if (recursive) { + for (auto& name : sub.listObjectNames()) { + if (sub.getObjectType(name) == h5::ObjectType::Group) { + out[name] = readH5Attributes(sub.getGroup(name), recursive); + } + } + } + + return out; +} + +AnyMap Storage::readAttributes(const std::string& id, bool recursive) const +{ + h5::Group sub = m_file.getGroup(id); + return readH5Attributes(sub, recursive); +} + +void writeH5Attributes(h5::Group sub, const AnyMap& meta) +{ + for (auto& item : meta) { + if (item.second.is()) { + double value = item.second.asDouble(); + h5::Attribute attr = sub.createAttribute( + item.first, h5::DataSpace::From(value)); + attr.write(value); + } else if (item.second.is() || item.second.is()) { + int value = item.second.asInt(); + h5::Attribute attr = sub.createAttribute( + item.first, h5::DataSpace::From(value)); + attr.write(value); + } else if (item.second.is()) { + std::string value = item.second.asString(); + h5::Attribute attr = sub.createAttribute( + item.first, h5::DataSpace::From(value)); + attr.write(value); + } else if (item.second.is()) { + bool bValue = item.second.asBool(); + H5Boolean value = bValue ? H5Boolean::TRUE : H5Boolean::FALSE; + h5::Attribute attr = sub.createAttribute( + item.first, h5::DataSpace::From(value)); + attr.write(value); + } else if (item.second.is>()) { + auto values = item.second.as>(); + h5::Attribute attr = sub.createAttribute( + item.first, h5::DataSpace::From(values)); + attr.write(values); + } else if (item.second.is>()) { + auto values = item.second.as>(); + h5::Attribute attr = sub.createAttribute( + item.first, h5::DataSpace::From(values)); + attr.write(values); + } else if (item.second.is>()) { + auto values = item.second.as>(); + h5::Attribute attr = sub.createAttribute( + item.first, h5::DataSpace::From(values)); + attr.write(values); + } else if (item.second.is>()) { + auto bValue = item.second.as>(); + std::vector values; + for (auto b : bValue) { + values.push_back(b ? H5Boolean::TRUE : H5Boolean::FALSE); + } + h5::Attribute attr = sub.createAttribute( + item.first, h5::DataSpace::From(values)); + attr.write(values); + } else if (item.second.is()) { + // step into recursion + auto value = item.second.as(); + auto grp = sub.createGroup(item.first); + writeH5Attributes(grp, value); + } else { + throw NotImplementedError("Storage::writeAttributes", + "Unable to write attribute '{}' with type '{}'", + item.first, item.second.type_str()); + } + } +} + +void Storage::writeAttributes(const std::string& id, const AnyMap& meta) +{ + h5::Group sub = m_file.getGroup(id); + writeH5Attributes(sub, meta); +} + +vector_fp Storage::readVector(const std::string& id, + const std::string& name, size_t size) const +{ + h5::Group sub = m_file.getGroup(id); + if (!sub.exist(name)) { + throw CanteraError("Storage::readVector", + "DataSet '{}' not found in path '{}'.", name, id); + } + h5::DataSet dataset = sub.getDataSet(name); + if (dataset.getDataType().getClass() != h5::DataTypeClass::Float) { + throw CanteraError("Storage::readVector", + "Type of DataSet '{}' is inconsistent; expected HDF float.", name); + } + if (dataset.getElementCount() != size) { + throw CanteraError("Storage::readVector", + "Size of DataSet '{}' is inconsistent; expected {} elements but " + "received {} elements.", name, size, dataset.getElementCount()); + } + vector_fp out; + dataset.read(out); + return out; +} + +void Storage::writeVector(const std::string& id, + const std::string& name, const vector_fp& data) +{ + h5::Group sub = m_file.getGroup(id); + std::vector dims{data.size()}; + h5::DataSet dataset = sub.createDataSet(name, h5::DataSpace(dims)); + dataset.write(data); +} + +std::vector Storage::readMatrix(const std::string& id, + const std::string& name, + size_t rows, size_t cols) const +{ + h5::Group sub = m_file.getGroup(id); + if (!sub.exist(name)) { + throw CanteraError("Storage::readVector", + "DataSet '{}' not found in path '{}'.", name, id); + } + h5::DataSet dataset = sub.getDataSet(name); + if (dataset.getDataType().getClass() != h5::DataTypeClass::Float) { + throw CanteraError("Storage::readMatrix", + "Type of DataSet '{}' is inconsistent; expected HDF float.", name); + } + h5::DataSpace space = dataset.getSpace(); + if (space.getNumberDimensions() != 2) { + throw CanteraError("Storage::readMatrix", + "Shape of DataSet '{}' is inconsistent; expected two dimensions.", name); + } + const auto& shape = space.getDimensions(); + if (shape[0] != rows) { + throw CanteraError("Storage::readMatrix", + "Shape of DataSet '{}' is inconsistent; expected {} rows.", name, rows); + } + if (shape[1] != cols) { + throw CanteraError("Storage::readMatrix", + "Shape of DataSet '{}' is inconsistent; expected {} columns.", name, cols); + } + std::vector out; + dataset.read(out); + return out; +} + +void Storage::writeMatrix(const std::string& id, + const std::string& name, const std::vector& data) +{ + h5::Group sub = m_file.getGroup(id); + std::vector dims{data.size()}; + dims.push_back(data.size() ? data[0].size() : 0); + h5::DataSet dataset = sub.createDataSet(name, h5::DataSpace(dims)); + dataset.write(data); +} + +#else + +void Storage::flush() +{ + throw CanteraError("Storage::flush", + "Saving to HDF requires HighFive installation."); +} + +bool Storage::checkGroup(const std::string& id) +{ + throw CanteraError("Storage::checkGroup", + "Saving to HDF requires HighFive installation."); +} + +std::pair> Storage::contents(const std::string& id) const +{ + throw CanteraError("Storage::contents", + "Saving to HDF requires HighFive installation."); +} + +AnyMap Storage::readAttributes(const std::string& id, bool recursive) const +{ + throw CanteraError("Storage::readAttributes", + "Saving to HDF requires HighFive installation."); +} + +void Storage::writeAttributes(const std::string& id, const AnyMap& meta) +{ + throw CanteraError("Storage::writeAttributes", + "Saving to HDF requires HighFive installation."); +} + +vector_fp Storage::readVector(const std::string& id, + const std::string& name, size_t size) const +{ + throw CanteraError("Storage::readVector", + "Saving to HDF requires HighFive installation."); +} + +void Storage::writeVector(const std::string& id, + const std::string& name, const vector_fp& data) +{ + throw CanteraError("Storage::writeVector", + "Saving to HDF requires HighFive installation."); +} + +std::vector Storage::readMatrix(const std::string& id, + const std::string& name, + size_t rows, size_t cols) const +{ + throw CanteraError("Storage::readMatrix", + "Saving to HDF requires HighFive installation."); +} + +void Storage::writeMatrix(const std::string& id, + const std::string& name, const std::vector& data) +{ + throw CanteraError("Storage::writeMatrix", + "Saving to HDF requires HighFive installation."); +} + +#endif + +} + +#endif diff --git a/src/base/SolutionArray.cpp b/src/base/SolutionArray.cpp index 3d1ffe4fade..256a110615d 100644 --- a/src/base/SolutionArray.cpp +++ b/src/base/SolutionArray.cpp @@ -8,15 +8,13 @@ #include "cantera/base/SolutionArray.h" #include "cantera/base/Solution.h" +#include "cantera/base/Storage.h" #include "cantera/base/stringUtils.h" #include "cantera/thermo/ThermoPhase.h" #include "cantera/thermo/SurfPhase.h" #include #include -#if CT_USE_HIGHFIVE_HDF -#include "hdfUtils.h" -#endif namespace Cantera { @@ -223,14 +221,21 @@ AnyMap preamble(const std::string& desc) return data; } -#if CT_USE_HIGHFIVE_HDF -void SolutionArray::writeHeader(h5::File& file, const std::string& id, +void SolutionArray::writeHeader(const std::string& fname, const std::string& id, const std::string& desc) { - auto sub = openH5Group(file, id); - writeH5Attributes(sub, preamble(desc)); -} +#if CT_USE_HIGHFIVE_HDF + h5::File hdf(fname, h5::File::OpenOrCreate); + Storage file(hdf, true); +#else + throw CanteraError("SolutionArray::writeHeader", + "Saving to HDF requires HighFive installation."); + Storage file; #endif + file.checkGroup(id); + file.writeAttributes(id, preamble(desc)); + file.flush(); +} void SolutionArray::writeHeader(AnyMap& root, const std::string& id, const std::string& desc) @@ -238,13 +243,20 @@ void SolutionArray::writeHeader(AnyMap& root, const std::string& id, root[id] = preamble(desc); } -#if CT_USE_HIGHFIVE_HDF -void SolutionArray::writeEntry(h5::File& file, const std::string& id) +void SolutionArray::writeEntry(const std::string& fname, const std::string& id) { - auto sub = openH5Group(file, id); - writeH5Attributes(sub, m_meta); - +#if CT_USE_HIGHFIVE_HDF + h5::File hdf(fname, h5::File::OpenOrCreate); + Storage file(hdf, true); +#else + throw CanteraError("SolutionArray::writeEntry", + "Saving to HDF requires HighFive installation."); + Storage file; +#endif + file.checkGroup(id); + file.writeAttributes(id, m_meta); if (!m_size) { + file.flush(); return; } @@ -259,17 +271,18 @@ void SolutionArray::writeEntry(h5::File& file, const std::string& id) size_t first = offset + i * m_stride; prop.push_back(vector_fp(&m_data[first], &m_data[first + nSpecies])); } - writeH5FloatMatrix(sub, name, prop); + file.writeMatrix(id, name, prop); } else { - writeH5FloatVector(sub, name, getComponent(name)); + auto data = getComponent(name); + file.writeVector(id, name, data); } } for (auto& other : m_other) { - writeH5FloatVector(sub, other.first, *(other.second)); + file.writeVector(id, other.first, *(other.second)); } + file.flush(); } -#endif AnyMap& openField(AnyMap& root, const std::string& id) { @@ -354,16 +367,10 @@ void SolutionArray::save( { size_t dot = fname.find_last_of("."); std::string extension = (dot != npos) ? toLowerCopy(fname.substr(dot + 1)) : ""; - if (extension == "h5" || extension == "hdf") { -#if CT_USE_HIGHFIVE_HDF - h5::File file(fname, h5::File::OpenOrCreate); - writeHeader(file, id, desc); - writeEntry(file, id); + if (extension == "h5" || extension == "hdf" || extension == "hdf5") { + writeHeader(fname, id, desc); + writeEntry(fname, id); return; -#else - throw CanteraError("SolutionArray::writeHeader", - "Saving to HDF requires HighFive installation."); -#endif } if (extension == "yaml" || extension == "yml") { // Check for an existing file and load it if present @@ -384,12 +391,19 @@ void SolutionArray::save( "Unknown file extension '{}'", extension); } -#if CT_USE_HIGHFIVE_HDF -AnyMap SolutionArray::readHeader(const h5::File& file, const std::string& id) +AnyMap SolutionArray::readHeader(const std::string& fname, const std::string& id) { - return readH5Attributes(locateH5Group(file, id), false); -} +#if CT_USE_HIGHFIVE_HDF + h5::File hdf(fname, h5::File::ReadOnly); + Storage file(hdf, false); +#else + throw CanteraError("SolutionArray::readHeader", + "Saving to HDF requires HighFive installation."); + Storage file; #endif + file.checkGroup(id); + return file.readAttributes(id, false); +} AnyMap SolutionArray::readHeader(const AnyMap& root, const std::string& id) { @@ -400,15 +414,9 @@ AnyMap SolutionArray::restore(const std::string& fname, const std::string& id) { size_t dot = fname.find_last_of("."); std::string extension = (dot != npos) ? toLowerCopy(fname.substr(dot + 1)) : ""; - if (extension == "h5" || extension == "hdf") { -#if CT_USE_HIGHFIVE_HDF - h5::File file(fname, h5::File::ReadOnly); - readEntry(file, id); - return readHeader(file, id); -#else - throw CanteraError("SolutionArray::restore", - "Restoring from HDF requires HighFive installation."); -#endif + if (extension == "h5" || extension == "hdf" || extension == "hdf5") { + readEntry(fname, id); + return readHeader(fname, id); } if (extension == "yaml" || extension == "yml") { const AnyMap& root = AnyMap::fromYamlFile(fname); @@ -439,9 +447,9 @@ std::string SolutionArray::detectMode(std::set names, bool native) break; } if (names.count(name)) { - usesNativeState &= nativeState.count(name); + usesNativeState &= nativeState.count(name) > 0; } else if (aliasMap.count(name) && names.count(aliasMap.at(name))) { - usesNativeState &= nativeState.count(name); + usesNativeState &= nativeState.count(name) > 0; } else { found = false; break; @@ -475,28 +483,22 @@ std::set SolutionArray::stateProperties(std::string mode, bool alia return states; } -#if CT_USE_HIGHFIVE_HDF -void SolutionArray::readEntry(const h5::File& file, const std::string& id) +void SolutionArray::readEntry(const std::string& fname, const std::string& id) { - auto sub = locateH5Group(file, id); - m_meta = readH5Attributes(sub, true); - - std::set names; - size_t nDims = npos; - for (auto& name : sub.listObjectNames()) { - if (sub.getObjectType(name) == h5::ObjectType::Dataset) { - h5::DataSpace space = sub.getDataSet(name).getSpace(); - names.insert(name); - if (space.getNumberDimensions() < nDims) { - nDims = space.getNumberDimensions(); - m_size = space.getElementCount(); - } - } - } - if (nDims != 1 && nDims != npos) { - throw NotImplementedError("SolutionArray::restore", - "Unable to restore SolutionArray with {} dimensions.", nDims); - } +#if CT_USE_HIGHFIVE_HDF + h5::File hdf(fname, h5::File::ReadOnly); + Storage file(hdf, false); +#else + throw CanteraError("SolutionArray::readEntry", + "Saving to HDF requires HighFive installation."); + Storage file; +#endif + file.checkGroup(id); + m_meta = file.readAttributes(id, true); + + auto contents = file.contents(id); + m_size = contents.first; + std::set names = contents.second; initialize({}); @@ -522,20 +524,20 @@ void SolutionArray::readEntry(const h5::File& file, const std::string& id) std::string name = item.first; if (name == "X" || name == "Y") { size_t offset = item.second; - auto prop = readH5FloatMatrix(sub, name, m_size, nSpecies); + auto prop = file.readMatrix(id, name, m_size, nSpecies); for (size_t i = 0; i < m_size; i++) { std::copy(prop[i].begin(), prop[i].end(), &m_data[offset + i * m_stride]); } } else { - setComponent(name, readH5FloatVector(sub, name, m_size)); + setComponent(name, file.readVector(id, name, m_size)); } } } else if (mode == "TPX" || mode == "TPC") { // data format used by Python h5py export (Cantera 2.5) - vector_fp T = readH5FloatVector(sub, "T", m_size); - vector_fp P = readH5FloatVector(sub, "P", m_size); - auto X = readH5FloatMatrix(sub, "X", m_size, nSpecies); + vector_fp T = file.readVector(id, "T", m_size); + vector_fp P = file.readVector(id, "P", m_size); + auto X = file.readMatrix(id, "X", m_size, nSpecies); for (size_t i = 0; i < m_size; i++) { m_sol->thermo()->setState_TPX(T[i], P[i], X[i].data()); m_sol->thermo()->saveState(nState, &m_data[i * m_stride]); @@ -551,14 +553,13 @@ void SolutionArray::readEntry(const h5::File& file, const std::string& id) // restore other data for (const auto& name : names) { if (!states.count(name)) { - vector_fp data = readH5FloatVector(sub, name, m_size); + vector_fp data = file.readVector(id, name, m_size); m_other.emplace(name, std::make_shared(m_size)); auto& extra = m_other[name]; std::copy(data.begin(), data.end(), extra->begin()); } } } -#endif const AnyMap& locateField(const AnyMap& root, const std::string& id) { diff --git a/src/base/hdfUtils.h b/src/base/hdfUtils.h deleted file mode 100644 index e85a569a5a9..00000000000 --- a/src/base/hdfUtils.h +++ /dev/null @@ -1,234 +0,0 @@ -//! @file hdfUtils.h - -// This file is part of Cantera. See License.txt in the top-level directory or -// at https://cantera.org/license.txt for license and copyright information. - -#ifndef CT_HDF_UTILS_H -#define CT_HDF_UTILS_H - -#if CT_USE_SYSTEM_HIGHFIVE - #include - #include - #include - #include - #include - #include -#else - #include "cantera/ext/HighFive/H5Attribute.hpp" - #include "cantera/ext/HighFive/H5DataSet.hpp" - #include "cantera/ext/HighFive/H5DataSpace.hpp" - #include "cantera/ext/HighFive/H5DataType.hpp" - #include "cantera/ext/HighFive/H5File.hpp" - #include "cantera/ext/HighFive/H5Group.hpp" -#endif - -namespace h5 = HighFive; - -enum class H5Boolean { - FALSE = 0, - TRUE = 1, -}; - -h5::EnumType create_enum_boolean() { - return {{"FALSE", H5Boolean::FALSE}, - {"TRUE", H5Boolean::TRUE}}; -} - -HIGHFIVE_REGISTER_TYPE(H5Boolean, create_enum_boolean) - -namespace Cantera -{ - -h5::Group locateH5Group(const h5::File& file, const std::string& id) -{ - std::vector tokens; - tokenizePath(id, tokens); - std::string grp = tokens[0]; - if (!file.exist(grp) || file.getObjectType(grp) != h5::ObjectType::Group) { - throw CanteraError("locateH5Group", "No group with id '{}' found", grp); - } - - std::string path = grp; - h5::Group sub = file.getGroup(grp); - tokens.erase(tokens.begin()); - for (auto& grp : tokens) { - path += "/" + grp; - if (!sub.exist(grp) || sub.getObjectType(grp) != h5::ObjectType::Group) { - throw CanteraError("locateH5Group", "No group with id '{}' found", path); - } - sub = sub.getGroup(grp); - } - return sub; -} - -h5::Group openH5Group(h5::File& file, const std::string& id) -{ - if (!file.exist(id)) { - return file.createGroup(id); - } - if (file.getObjectType(id) != h5::ObjectType::Group) { - throw CanteraError("openH5Group", "Invalid object with id '{}' exists", id); - } - return file.getGroup(id); -} - -AnyMap readH5Attributes(const h5::Group& sub, bool recursive) -{ - // restore meta data from attributes - AnyMap out; - for (auto& name : sub.listAttributeNames()) { - h5::Attribute attr = sub.getAttribute(name); - h5::DataType dtype = attr.getDataType(); - h5::DataTypeClass dclass = dtype.getClass(); - if (dclass == h5::DataTypeClass::Float) { - double value; - attr.read(value); - out[name] = value; - } else if (dclass == h5::DataTypeClass::Integer) { - int value; - attr.read(value); - out[name] = value; - } else if (dclass == h5::DataTypeClass::String) { - std::string value; - attr.read(value); - out[name] = value; - } else if (dclass == h5::DataTypeClass::Enum) { - // only booleans are supported - if (attr.getSpace().getElementCount() > 1) { - std::vector values; - attr.read(values); - std::vector bValues; - for (auto v : values) { - bValues.push_back(bool(v)); - } - out[name] = bValues; - } else { - H5Boolean value; - attr.read(value); - out[name] = bool(value); - } - } else { - throw NotImplementedError("readH5Attributes", - "Unable to read attribute '{}' with type '{}'", name, dtype.string()); - } - } - - if (recursive) { - for (auto& name : sub.listObjectNames()) { - if (sub.getObjectType(name) == h5::ObjectType::Group) { - out[name] = readH5Attributes(sub.getGroup(name), recursive); - } - } - } - - return out; -} - -void writeH5Attributes(h5::Group& sub, const AnyMap& meta) -{ - for (auto& item : meta) { - if (item.second.is()) { - double value = item.second.asDouble(); - h5::Attribute attr = sub.createAttribute( - item.first, h5::DataSpace::From(value)); - attr.write(value); - } else if (item.second.is() || item.second.is()) { - int value = item.second.asInt(); - h5::Attribute attr = sub.createAttribute( - item.first, h5::DataSpace::From(value)); - attr.write(value); - } else if (item.second.is()) { - std::string value = item.second.asString(); - h5::Attribute attr = sub.createAttribute( - item.first, h5::DataSpace::From(value)); - attr.write(value); - } else if (item.second.is()) { - bool bValue = item.second.asBool(); - H5Boolean value = bValue ? H5Boolean::TRUE : H5Boolean::FALSE; - h5::Attribute attr = sub.createAttribute( - item.first, h5::DataSpace::From(value)); - attr.write(value); - } else if (item.second.is>()) { - auto bValue = item.second.as>(); - std::vector value; - for (auto b : bValue) { - value.push_back(b ? H5Boolean::TRUE : H5Boolean::FALSE); - } - h5::Attribute attr = sub.createAttribute( - item.first, h5::DataSpace::From(value)); - attr.write(value); - } else if (item.second.is()) { - // step into recursion - auto value = item.second.as(); - auto grp = sub.createGroup(item.first); - writeH5Attributes(grp, value); - } else { - throw NotImplementedError("writeH5Attributes", - "Unable to write attribute '{}' with type '{}'", - item.first, item.second.type_str()); - } - } -} - -void writeH5FloatVector(h5::Group& sub, std::string id, vector_fp data) -{ - std::vector dims{data.size()}; - h5::DataSet dataset = sub.createDataSet(id, h5::DataSpace(dims)); - dataset.write(data); -} - -vector_fp readH5FloatVector(h5::Group& sub, std::string id, size_t size) -{ - h5::DataSet dataset = sub.getDataSet(id); - if (dataset.getDataType().getClass() != h5::DataTypeClass::Float) { - throw CanteraError("readH5FloatVector", - "Type of DataSet '{}' is inconsistent; expected HDF float.", id); - } - if (dataset.getElementCount() != size) { - throw CanteraError("readH5FloatVector", - "Size of DataSet '{}' is inconsistent; expected {} elements but " - "received {} elements.", id, size, dataset.getElementCount()); - } - vector_fp out; - dataset.read(out); - return out; -} - -void writeH5FloatMatrix(h5::Group& sub, std::string id, std::vector data) -{ - std::vector dims{data.size()}; - dims.push_back(data.size() ? data[0].size() : 0); - h5::DataSet dataset = sub.createDataSet(id, h5::DataSpace(dims)); - dataset.write(data); -} - -std::vector readH5FloatMatrix(h5::Group& sub, std::string id, - size_t rows, size_t cols) -{ - h5::DataSet dataset = sub.getDataSet(id); - if (dataset.getDataType().getClass() != h5::DataTypeClass::Float) { - throw CanteraError("readH5FloatMatrix", - "Type of DataSet '{}' is inconsistent; expected HDF float.", id); - } - h5::DataSpace space = dataset.getSpace(); - if (space.getNumberDimensions() != 2) { - throw CanteraError("readH5FloatMatrix", - "Shape of DataSet '{}' is inconsistent; expected two dimensions.", id); - } - const auto& shape = space.getDimensions(); - if (shape[0] != rows) { - throw CanteraError("readH5FloatMatrix", - "Shape of DataSet '{}' is inconsistent; expected {} rows.", id, rows); - } - if (shape[1] != cols) { - throw CanteraError("readH5FloatMatrix", - "Shape of DataSet '{}' is inconsistent; expected {} columns.", id, cols); - } - std::vector out; - dataset.read(out); - return out; -} - -} - -#endif diff --git a/src/oneD/Sim1D.cpp b/src/oneD/Sim1D.cpp index ab2cd8ef454..a162f3fd18b 100644 --- a/src/oneD/Sim1D.cpp +++ b/src/oneD/Sim1D.cpp @@ -109,19 +109,13 @@ void Sim1D::save(const std::string& fname, const std::string& id, { size_t dot = fname.find_last_of("."); string extension = (dot != npos) ? toLowerCopy(fname.substr(dot+1)) : ""; - if (extension == "h5" || extension == "hdf") { -#if CT_USE_HIGHFIVE_HDF - h5::File file(fname, h5::File::OpenOrCreate); + if (extension == "h5" || extension == "hdf" || extension == "hdf5") { for (auto dom : m_dom) { auto arr = dom->asArray(m_x.data()); - arr->writeEntry(file, id + "/" + dom->id()); + arr->writeEntry(fname, id + "/" + dom->id()); } - SolutionArray::writeHeader(file, id, desc); + SolutionArray::writeHeader(fname, id, desc); return; -#else - throw CanteraError("Sim1D::save", - "Saving to HDF requires HighFive installation."); -#endif } if (extension == "yaml" || extension == "yml") { // Check for an existing file and load it if present @@ -292,14 +286,14 @@ void Sim1D::restore(const std::string& fname, const std::string& id, if (extension == "xml") { throw CanteraError("Sim1D::restore", "Restoring from XML is no longer supported."); - } else if (extension == "h5" || extension == "hdf") { -#if CT_USE_HIGHFIVE_HDF - h5::File file(fname, h5::File::ReadOnly); + } + if (extension == "h5" || extension == "hdf" || extension == "hdf5") { std::map> arrs; - auto header = SolutionArray::readHeader(file, id); + auto header = SolutionArray::readHeader(fname, id); + for (auto dom : m_dom) { auto arr = SolutionArray::create(dom->solution()); - arr->readEntry(file, id + "/" + dom->id()); + arr->readEntry(fname, id + "/" + dom->id()); dom->resize(dom->nComponents(), arr->size()); if (!header.hasKey("generator")) { arr->meta() = legacyH5(arr, header); @@ -312,10 +306,6 @@ void Sim1D::restore(const std::string& fname, const std::string& id, dom->restore(*arrs[dom->id()], m_x.data() + dom->loc(), loglevel); } finalize(); -#else - throw CanteraError("Sim1D::restore", - "Restoring from HDF requires HighFive installation."); -#endif } else if (extension == "yaml" || extension == "yml") { AnyMap root = AnyMap::fromYamlFile(fname); std::map> arrs; From 88c306b4729d4f0de3112272e2d744762ebb9478 Mon Sep 17 00:00:00 2001 From: Ingmar Schoegl Date: Sat, 3 Dec 2022 10:55:49 -0600 Subject: [PATCH 61/93] Fix SolutionArray serialization for Boundary1D --- src/base/SolutionArray.cpp | 21 ++++++++++++++++++--- src/oneD/Boundary1D.cpp | 12 ++++++++---- src/oneD/Sim1D.cpp | 10 ++++++++-- src/oneD/StFlow.cpp | 2 ++ 4 files changed, 36 insertions(+), 9 deletions(-) diff --git a/src/base/SolutionArray.cpp b/src/base/SolutionArray.cpp index 256a110615d..ee17c8e199b 100644 --- a/src/base/SolutionArray.cpp +++ b/src/base/SolutionArray.cpp @@ -510,8 +510,15 @@ void SolutionArray::readEntry(const std::string& fname, const std::string& id) std::string mode = detectMode(names); std::set states = stateProperties(mode); if (states.count("C")) { - states.erase("C"); - states.insert("X"); + if (names.count("X")) { + states.erase("C"); + states.insert("X"); + mode = "TPX"; + } else if (names.count("Y")) { + states.erase("C"); + states.insert("Y"); + mode = "TPY"; + } } // restore state data @@ -533,7 +540,7 @@ void SolutionArray::readEntry(const std::string& fname, const std::string& id) setComponent(name, file.readVector(id, name, m_size)); } } - } else if (mode == "TPX" || mode == "TPC") { + } else if (mode == "TPX") { // data format used by Python h5py export (Cantera 2.5) vector_fp T = file.readVector(id, "T", m_size); vector_fp P = file.readVector(id, "P", m_size); @@ -542,6 +549,14 @@ void SolutionArray::readEntry(const std::string& fname, const std::string& id) m_sol->thermo()->setState_TPX(T[i], P[i], X[i].data()); m_sol->thermo()->saveState(nState, &m_data[i * m_stride]); } + } else if (mode == "TPY") { + vector_fp T = file.readVector(id, "T", m_size); + vector_fp P = file.readVector(id, "P", m_size); + auto Y = file.readMatrix(id, "Y", m_size, nSpecies); + for (size_t i = 0; i < m_size; i++) { + m_sol->thermo()->setState_TPY(T[i], P[i], Y[i].data()); + m_sol->thermo()->saveState(nState, &m_data[i * m_stride]); + } } else if (mode == "") { throw CanteraError("SolutionArray::restore", "Data are not consistent with full state modes."); diff --git a/src/oneD/Boundary1D.cpp b/src/oneD/Boundary1D.cpp index f90e36b8fed..e8d70b498a2 100644 --- a/src/oneD/Boundary1D.cpp +++ b/src/oneD/Boundary1D.cpp @@ -242,7 +242,6 @@ std::shared_ptr Inlet1D::asArray(const double* soln) const meta["mass-flux"] = m_mdot; // set gas state (using pressure from adjacent domain) - m_flow->setGas(soln, 0); double pressure = m_flow->phase().pressure(); auto phase = m_solution->thermo(); phase->setState_TPY(m_temp, pressure, &m_yin[0]); @@ -282,9 +281,15 @@ void Inlet1D::restore(SolutionArray& arr, double* soln, int loglevel) Boundary1D::restore(arr.meta(), soln, loglevel); arr.setIndex(0); auto phase = arr.thermo(); - auto aux = arr.getAuxiliary(0); + auto meta = arr.meta(); m_temp = phase->temperature(); - m_mdot = phase->density() * aux["velocity"]; + if (meta.hasKey("mass-flux")) { + m_mdot = meta.at("mass-flux").asDouble(); + } else { + // convert data format used by Python h5py export (Cantera < 3.0) + auto aux = arr.getAuxiliary(0); + m_mdot = phase->density() * aux["velocity"]; + } auto Y = phase->massFractions(); std::copy(Y, Y + m_nsp, &m_yin[0]); } @@ -583,7 +588,6 @@ std::shared_ptr OutletRes1D::asArray(const double* soln) const meta["temperature"] = m_temp; // set gas state (using pressure from adjacent domain) - m_flow->setGas(soln, 0); double pressure = m_flow->phase().pressure(); auto phase = m_solution->thermo(); phase->setState_TPY(m_temp, pressure, &m_yres[0]); diff --git a/src/oneD/Sim1D.cpp b/src/oneD/Sim1D.cpp index a162f3fd18b..07fff6897e6 100644 --- a/src/oneD/Sim1D.cpp +++ b/src/oneD/Sim1D.cpp @@ -111,10 +111,13 @@ void Sim1D::save(const std::string& fname, const std::string& id, string extension = (dot != npos) ? toLowerCopy(fname.substr(dot+1)) : ""; if (extension == "h5" || extension == "hdf" || extension == "hdf5") { for (auto dom : m_dom) { - auto arr = dom->asArray(m_x.data()); + auto arr = dom->asArray(m_x.data() + dom->loc()); arr->writeEntry(fname, id + "/" + dom->id()); } SolutionArray::writeHeader(fname, id, desc); + if (loglevel > 0) { + writelog("Solution saved to file {} as group '{}'.\n", fname, id); + } return; } if (extension == "yaml" || extension == "yml") { @@ -126,7 +129,7 @@ void Sim1D::save(const std::string& fname, const std::string& id, SolutionArray::writeHeader(data, id, desc); for (auto dom : m_dom) { - auto arr = dom->asArray(m_x.data()); + auto arr = dom->asArray(m_x.data() + dom->loc()); arr->writeEntry(data, id + "/" + dom->id()); } @@ -134,6 +137,9 @@ void Sim1D::save(const std::string& fname, const std::string& id, std::ofstream out(fname); out << data.toYamlString(); AnyMap::clearCachedFile(fname); + if (loglevel > 0) { + writelog("Solution saved to file {} as entry '{}'.\n", fname, id); + } return; } throw CanteraError("Sim1D::save", diff --git a/src/oneD/StFlow.cpp b/src/oneD/StFlow.cpp index bddd44dfdd4..ee35f2ef539 100644 --- a/src/oneD/StFlow.cpp +++ b/src/oneD/StFlow.cpp @@ -727,6 +727,8 @@ AnyMap StFlow::getMeta() const state["fixed-point"]["temperature"] = m_tfixed; } + state["species-names"] = m_thermo->speciesNames(); + return state; } From 2d97dafbd91f941edc1253a4ab8cd6d59e75d5ae Mon Sep 17 00:00:00 2001 From: Ingmar Schoegl Date: Sat, 3 Dec 2022 13:31:43 -0600 Subject: [PATCH 62/93] Mark new features --- include/cantera/base/AnyMap.h | 1 + include/cantera/base/SolutionArray.h | 4 ++++ include/cantera/base/Storage.h | 4 ++++ include/cantera/base/global.h | 1 + include/cantera/base/stringUtils.h | 2 ++ include/cantera/oneD/Domain1D.h | 4 ++++ include/cantera/oneD/Sim1D.h | 2 ++ interfaces/cython/cantera/_onedim.pyx | 4 ++++ interfaces/cython/cantera/_utils.pyx | 2 ++ 9 files changed, 24 insertions(+) diff --git a/include/cantera/base/AnyMap.h b/include/cantera/base/AnyMap.h index 54f51c852e2..7bb8cb7a381 100644 --- a/include/cantera/base/AnyMap.h +++ b/include/cantera/base/AnyMap.h @@ -449,6 +449,7 @@ class AnyMap : public AnyBase std::string keys_str() const; //! Return an unordered set of keys + //! @since New in Cantera 3.0. std::set keys() const; //! Set a metadata value that applies to this AnyMap and its children. diff --git a/include/cantera/base/SolutionArray.h b/include/cantera/base/SolutionArray.h index dcc5734bda8..7a839bce26c 100644 --- a/include/cantera/base/SolutionArray.h +++ b/include/cantera/base/SolutionArray.h @@ -20,6 +20,10 @@ class ThermoPhase; * thermodynamic states using the same Solution object. C++ SolutionArray objects are * one-dimensional by design; extensions to multi-dimensional arrays need to be * implemented in high-level API's. + * + * @since New in Cantera 3.0. + * @warning This function is an experimental part of the %Cantera API and may be + * changed or removed without notice. */ class SolutionArray { diff --git a/include/cantera/base/Storage.h b/include/cantera/base/Storage.h index 9a27eaabfb8..0b6d267396b 100644 --- a/include/cantera/base/Storage.h +++ b/include/cantera/base/Storage.h @@ -48,6 +48,10 @@ namespace Cantera /*! * A wrapper class handling storage to HDF; acts as a thin wrapper for HighFive + * + * @since New in Cantera 3.0. + * @warning This function is an experimental part of the %Cantera API and may be + * changed or removed without notice. */ class Storage { diff --git a/include/cantera/base/global.h b/include/cantera/base/global.h index f4459f848dd..96e5c5b3bc9 100644 --- a/include/cantera/base/global.h +++ b/include/cantera/base/global.h @@ -106,6 +106,7 @@ std::string gitCommit(); bool debugModeEnabled(); //! Returns true if Cantera was compiled with C++ HighFive HDF support. +//! @since New in Cantera 3.0. bool usesHighFive(); /*! diff --git a/include/cantera/base/stringUtils.h b/include/cantera/base/stringUtils.h index ced0f648b08..46591a06580 100644 --- a/include/cantera/base/stringUtils.h +++ b/include/cantera/base/stringUtils.h @@ -112,6 +112,8 @@ void tokenizeString(const std::string& oval, * * @param oval String to be broken up * @param v Output vector of tokens. + * + * @since New in Cantera 3.0. */ void tokenizePath(const std::string& oval, std::vector& v); diff --git a/include/cantera/oneD/Domain1D.h b/include/cantera/oneD/Domain1D.h index 96aa3af7a89..3dc96d11f24 100644 --- a/include/cantera/oneD/Domain1D.h +++ b/include/cantera/oneD/Domain1D.h @@ -319,6 +319,8 @@ class Domain1D //! Save the state of this domain as a SolutionArray /*! * @param soln local solution vector for this domain + * + * @since New in Cantera 3.0. */ virtual std::shared_ptr asArray(const double* soln) const; @@ -337,6 +339,8 @@ class Domain1D * @param[out] soln Value of the solution vector, local to this domain * @param[in] loglevel 0 to suppress all output; 1 to show warnings; 2 for * verbose output + * + * @since New in Cantera 3.0. */ virtual void restore(SolutionArray& arr, double* soln, int loglevel); diff --git a/include/cantera/oneD/Sim1D.h b/include/cantera/oneD/Sim1D.h index e9c7c20ea01..99cecd89142 100644 --- a/include/cantera/oneD/Sim1D.h +++ b/include/cantera/oneD/Sim1D.h @@ -123,6 +123,7 @@ class Sim1D : public OneDim /** * Save the current solution to YAML (legacy implementation). @see save + * @since New in Cantera 3.0. */ void write_yaml(const std::string& fname, const std::string& id, const std::string& desc, int loglevel=1); @@ -148,6 +149,7 @@ class Sim1D : public OneDim /** * Initialize the solution with a previously-saved solution (legacy implementation). * @see restore + * @since New in Cantera 3.0. */ void read_yaml(const std::string& fname, const std::string& id, int loglevel=2); diff --git a/interfaces/cython/cantera/_onedim.pyx b/interfaces/cython/cantera/_onedim.pyx index 433bb450f2a..10591a16584 100644 --- a/interfaces/cython/cantera/_onedim.pyx +++ b/interfaces/cython/cantera/_onedim.pyx @@ -1486,6 +1486,8 @@ cdef class Sim1D: quiet=True): """ Save the solution in YAML format (legacy implementation) + + .. versionadded:: 3.0 """ loglevel = 1 - quiet self.sim.write_yaml(stringify(str(filename)), stringify(name), @@ -1510,6 +1512,8 @@ cdef class Sim1D: def read_yaml(self, filename, name='solution', description='none', quiet=True): """ Set the solution vector to a previously-saved solution (legacy implementation) + + .. versionadded:: 3.0 """ loglevel = 2 * (1 - quiet) self.sim.read_yaml(stringify(str(filename)), stringify(name), loglevel) diff --git a/interfaces/cython/cantera/_utils.pyx b/interfaces/cython/cantera/_utils.pyx index 674a81271a7..4f7a133de7f 100644 --- a/interfaces/cython/cantera/_utils.pyx +++ b/interfaces/cython/cantera/_utils.pyx @@ -98,6 +98,8 @@ def hdf_support(): Returns list of libraries that include HDF support: - 'h5py': HDF support by Python package 'h5py'. - 'HighFive': if Cantera was compiled with C++ HighFive HDF support. + + .. versionadded:: 3.0 """ out = [] try: From 3e48d4691295899515da361fd5f71c225a982c09 Mon Sep 17 00:00:00 2001 From: Ingmar Schoegl Date: Tue, 29 Nov 2022 15:58:09 -0600 Subject: [PATCH 63/93] [UnitTest] Add test for restoring from HDF Also test for backward compatibility for legacy implementations and formats. --- test/python/test_onedim.py | 186 ++++++++++++++++++++++++++++++------- 1 file changed, 153 insertions(+), 33 deletions(-) diff --git a/test/python/test_onedim.py b/test/python/test_onedim.py index e65488761e8..ed7317dd5dd 100644 --- a/test/python/test_onedim.py +++ b/test/python/test_onedim.py @@ -517,7 +517,20 @@ def test_prune(self): # TODO: check that the solution is actually correct (that is, that the # residual satisfies the error tolerances) on the new grid. - def test_save_restore_yaml(self): + def test_save_restore_yaml_legacy(self): + self.run_save_restore("legacy") + + def test_save_restore_yaml_transition(self): + self.run_save_restore("transition") + + def test_save_restore_yaml_array(self): + self.run_save_restore("array") + + @utilities.unittest.skipIf("HighFive" not in ct.hdf_support(), "HighFive not installed") + def test_save_restore_hdf_array(self): + self.run_save_restore("hdf") + + def run_save_restore(self, mode): reactants = "H2:1.1, O2:1, AR:5" p = 2 * ct.one_atm Tin = 400 @@ -528,10 +541,11 @@ def test_save_restore_yaml(self): self.sim.flame.set_steady_tolerances(T=(T_rtol, T_atol)) self.solve_fixed_T() - filename = self.test_work_path / "onedim-fixed-T.yaml" - # In Python >= 3.8, this can be replaced by the missing_ok argument - if filename.is_file(): - filename.unlink() + if mode == "hdf": + filename = self.test_work_path / f"onedim-fixed-T.h5" + else: + filename = self.test_work_path / f"onedim-fixed-T-{mode}.yaml" + filename.unlink(missing_ok=True) Y1 = self.sim.Y u1 = self.sim.velocity @@ -539,16 +553,25 @@ def test_save_restore_yaml(self): P1 = self.sim.P T1 = self.sim.T - self.sim.save(filename, "test", loglevel=0) + if mode in {"array", "hdf"}: + self.sim.save(filename, "test", loglevel=0) + else: + self.sim.write_yaml(filename, "test", quiet=True) # Save a second solution to the same file self.sim.radiation_enabled = True self.sim.boundary_emissivities = 0.3, 0.8 - self.sim.save(filename, "test2", loglevel=0) + if mode in {"array", "hdf"}: + self.sim.save(filename, "test2", loglevel=0) + else: + self.sim.write_yaml(filename, "test2", quiet=True) # Create flame object with dummy initial grid self.sim = ct.FreeFlame(self.gas) - self.sim.restore(filename, "test", loglevel=0) + if mode == "legacy": + self.sim.read_yaml(filename, "test", quiet=True) + else: + self.sim.restore(filename, "test", loglevel=0) # Sim is initially in "steady-state" mode, so this returns the # steady-state tolerances @@ -586,10 +609,15 @@ def test_save_restore_yaml(self): self.assertFalse(self.sim.radiation_enabled) self.assertFalse(self.sim.soret_enabled) - self.sim.restore(filename, "test2", loglevel=0) + if mode == "legacy": + self.sim.read_yaml(filename, "test2", quiet=True) + else: + self.sim.restore(filename, "test2", loglevel=0) self.assertTrue(self.sim.radiation_enabled) self.assertEqual(self.sim.boundary_emissivities, (0.3, 0.8)) + self.sim.solve(loglevel=0) + def test_array_properties(self): self.create_sim(ct.one_atm, 300, 'H2:1.1, O2:1, AR:5') grid_shape = self.sim.grid.shape @@ -702,27 +730,43 @@ def test_write_csv(self): k = self.gas.species_index('H2') self.assertArrayNear(data.X[:, k], self.sim.X[k, :]) - @utilities.unittest.skipIf(isinstance(_h5py, ImportError), "h5py is not installed") - def test_write_hdf(self): - filename = self.test_work_path / "onedim-write_hdf.h5" - # In Python >= 3.8, this can be replaced by the missing_ok argument - if filename.is_file(): - filename.unlink() + @utilities.unittest.skipIf("h5py" not in ct.hdf_support(), "h5py not installed") + def test_write_hdf_legacy(self): + self.run_freeflame_write_hdf("legacy") + + @utilities.unittest.skipIf(ct.hdf_support() != {'h5py', 'HighFive'}, "h5py and/or HighFive not installed") + def test_write_hdf_transition(self): + self.run_freeflame_write_hdf("transition") + + @utilities.unittest.skipIf("HighFive" not in ct.hdf_support(), "HighFive not installed") + def test_write_hdf_native(self): + self.run_freeflame_write_hdf("native") + + def run_freeflame_write_hdf(self, mode): + filename = self.test_work_path / f"onedim-write_hdf_{mode}.h5" + filename.unlink(missing_ok=True) self.run_mix(phi=1.1, T=350, width=2.0, p=2.0, refine=False) desc = 'mixture-averaged simulation' - self.sim.write_hdf(filename, description=desc) + if mode == "native": + self.sim.save(filename, "test", description=desc, loglevel=0) + else: + self.sim.write_hdf(filename, group="test", description=desc) f = ct.FreeFlame(self.gas) - meta = f.read_hdf(filename, normalize=False) + if mode == "legacy": + meta = f.read_hdf(filename, group="test", normalize=False) + self.assertEqual(meta['description'], desc) + self.assertEqual(meta['cantera_version'], ct.__version__) + self.assertEqual(meta['git_commit'], ct.__git_commit__) + else: + f.restore(filename, "test", loglevel=0) + self.assertArrayNear(f.grid, self.sim.grid) self.assertArrayNear(f.T, self.sim.T) - self.assertEqual(meta['description'], desc) k = self.gas.species_index('H2') self.assertArrayNear(f.X[k, :], self.sim.X[k, :]) self.assertArrayNear(f.inlet.X, self.sim.inlet.X) - self.assertEqual(meta['cantera_version'], ct.__version__) - self.assertEqual(meta['git_commit'], ct.__git_commit__) settings = self.sim.settings for k, v in f.settings.items(): @@ -734,6 +778,8 @@ def test_write_hdf(self): self.assertIn(k, settings) self.assertEqual(settings[k], v) + f.solve(loglevel=0) + def test_refine_criteria_boundscheck(self): self.create_sim(ct.one_atm, 300.0, 'H2:1.1, O2:1, AR:5') good = [3.0, 0.1, 0.2, 0.05] @@ -1267,26 +1313,46 @@ def test_reacting_surface_case2(self): def test_reacting_surface_case3(self): self.run_reacting_surface(xch4=0.2, tsurf=800.0, mdot=0.1, width=0.2) - @utilities.unittest.skipIf(isinstance(_h5py, ImportError), "h5py is not installed") - def test_write_hdf(self): - filename = self.test_work_path / "impingingjet-write_hdf.h5" - # In Python >= 3.8, this can be replaced by the missing_ok argument - if filename.is_file(): - filename.unlink() + @utilities.unittest.skipIf("h5py" not in ct.hdf_support(), "h5py not installed") + def test_write_hdf_legacy(self): + self.run_impingingjet_write("legacy") + + @utilities.unittest.skipIf(ct.hdf_support() != {'h5py', 'HighFive'}, "h5py and/or HighFive not installed") + def test_write_hdf_transition(self): + self.run_impingingjet_write("transition") + + @utilities.unittest.skipIf("HighFive" not in ct.hdf_support(), "HighFive not installed") + def test_write_hdf_native(self): + self.run_impingingjet_write("native") + + def test_write_yaml_native(self): + self.run_impingingjet_write("yaml") + + def run_impingingjet_write(self, mode): + if mode == "yaml": + filename = self.test_work_path / f"impingingjet-write_yaml.yaml" + else: + filename = self.test_work_path / f"impingingjet-write_hdf_{mode}.h5" + filename.unlink(missing_ok=True) self.run_reacting_surface(xch4=0.095, tsurf=900.0, mdot=0.06, width=0.1) - self.sim.write_hdf(filename) + if mode in {"native", "yaml"}: + self.sim.save(filename, "test", loglevel=0) + else: + self.sim.write_hdf(filename, group="test") tinlet = 300.0 # inlet temperature comp = {'CH4': .1, 'O2':0.21, 'N2':0.79} jet = self.create_reacting_surface(comp, 700.0, 500., width=0.2) - jet.read_hdf(filename) + if mode == "legacy": + jet.read_hdf(filename, group="test") + else: + jet.restore(filename, "test", loglevel=0) self.assertArrayNear(jet.grid, self.sim.grid) self.assertArrayNear(jet.T, self.sim.T) k = self.sim.gas.species_index('H2') self.assertArrayNear(jet.X[k, :], self.sim.X[k, :]) - self.assertArrayNear(jet.surface.surface.X, self.sim.surface.surface.X) settings = self.sim.settings for k, v in jet.settings.items(): @@ -1294,7 +1360,28 @@ def test_write_hdf(self): if k != 'fixed_temperature': self.assertEqual(settings[k], v) - def test_save_restore(self): + if mode == "legacy": + # legacy HDF restore does not set state + return + self.assertArrayNear(jet.surface.surface.X, self.sim.surface.surface.X) + for i in range(self.sim.surface.n_components): + self.assertNear( + self.sim.value("surface", i, 0), + jet.value("surface", i, 0) + ) + + jet.solve(loglevel=0) + + def test_save_restore_yaml_legacy(self): + self.run_save_restore_yaml("legacy") + + def test_save_restore_yaml_transition(self): + self.run_save_restore_yaml("transition") + + def test_save_restore_yaml_array(self): + self.run_save_restore_yaml("array") + + def run_save_restore_yaml(self, mode): comp = {'CH4': 0.095, 'O2': 0.21, 'N2': 0.79} self.sim = self.create_reacting_surface(comp, tsurf=900, tinlet=300, width=0.1) @@ -1305,21 +1392,32 @@ def test_save_restore(self): self.sim.solve(loglevel=0, auto=False) - filename = self.test_work_path / "impingingjet1.yaml" - self.sim.save(filename) + filename = self.test_work_path / f"impingingjet-{mode}.yaml" + filename.unlink(missing_ok=True) + + if mode == "array": + self.sim.save(filename, "test", loglevel=0) + else: + self.sim.write_yaml(filename, "test", quiet=True) self.surf_phase.TPX = 300, ct.one_atm, "PT(S):1" sim2 = ct.ImpingingJet(gas=self.gas, width=0.12, surface=self.surf_phase) - sim2.restore(filename) + if mode == "legacy": + sim2.read_yaml(filename, "test", quiet=True) + else: + sim2.restore(filename, "test", loglevel=0) self.assertArrayNear(self.sim.grid, sim2.grid) self.assertArrayNear(self.sim.Y, sim2.Y) + self.assertArrayNear(self.sim.surface.surface.X, sim2.surface.surface.X) for i in range(self.sim.surface.n_components): self.assertNear( self.sim.value("surface", i, 0), sim2.value("surface", i, 0) ) + sim2.solve(loglevel=0) + class TestTwinFlame(utilities.CanteraTest): def solve(self, phi, T, width, P): @@ -1349,6 +1447,28 @@ def test_restart(self): self.assertNear(mdot[0], sim.reactants.mdot, 1e-4) self.assertNear(sim.T[0], sim.reactants.T, 1e-4) + def test_save_restore_yaml(self): + self.run_save_restore("yaml") + + def test_save_restore_hdf(self): + self.run_save_restore("hdf") + + def run_save_restore(self, mode): + filename = self.test_work_path / f"twinflame.{mode}" + filename.unlink(missing_ok=True) + + sim = self.solve(phi=0.4, T=300, width=0.05, P=0.1) + sim.save(filename, loglevel=0) + + gas = ct.Solution("h2o2.yaml") + sim2 = ct.CounterflowTwinPremixedFlame(gas=gas) + sim2.restore(filename) + + self.assertArrayNear(sim.grid, sim2.grid) + self.assertArrayNear(sim.Y, sim2.Y) + + sim2.solve(loglevel=0) + class TestIonFreeFlame(utilities.CanteraTest): @utilities.slow_test From 4962c924f46d606d691dee06e4c86ef378f83088 Mon Sep 17 00:00:00 2001 From: Ingmar Schoegl Date: Sat, 3 Dec 2022 16:20:01 -0600 Subject: [PATCH 64/93] [samples] Update diffusion_flame_batch.py --- .../python/onedim/diffusion_flame_batch.py | 109 ++++++------------ test/python/test_onedim.py | 1 + 2 files changed, 38 insertions(+), 72 deletions(-) diff --git a/samples/python/onedim/diffusion_flame_batch.py b/samples/python/onedim/diffusion_flame_batch.py index 6937ba700fc..ffa78cdd273 100644 --- a/samples/python/onedim/diffusion_flame_batch.py +++ b/samples/python/onedim/diffusion_flame_batch.py @@ -13,13 +13,12 @@ This example can, for example, be used to iterate to a counterflow diffusion flame to an awkward pressure and strain rate, or to create the basis for a flamelet table. -Requires: cantera >= 2.5.0, matplotlib >= 2.0 +Requires: cantera >= 3.0, matplotlib >= 2.0 Keywords: combustion, 1D flow, extinction, diffusion flame, strained flame, saving output, plotting """ -import os -import importlib +from pathlib import Path import numpy as np import matplotlib.pyplot as plt @@ -30,18 +29,21 @@ class FlameExtinguished(Exception): pass -hdf_output = importlib.util.find_spec('h5py') is not None +hdf_output = "HighFive" in ct.hdf_support() -if not hdf_output: - # Create directory for output data files - data_directory = 'diffusion_flame_batch_data' - if not os.path.exists(data_directory): - os.makedirs(data_directory) - fig_name = os.path.join(data_directory, 'figure_{0}.png') -else: - fig_name = 'diffusion_flame_batch_{0}.png' +output_path = Path() / "diffusion_flame_batch_data" +output_path.mkdir(parents=True, exist_ok=True) +def names(test): + if hdf_output: + # use internal container structure for HDF + file_name = output_path / "flame_data.h5" + return file_name, test + # use separate files for YAML + file_name = output_path / f"{test}.yaml".replace("-", "_").replace("/", "_") + return file_name, "solution" + # PART 1: INITIALIZATION # Set up an initial hydrogen-oxygen counterflow flame at 1 bar and low strain @@ -83,17 +85,9 @@ def interrupt_extinction(t): f.solve(loglevel=0, auto=True) # Save to data directory -if hdf_output: - # save to HDF container file if h5py is installed - file_name = 'diffusion_flame_batch.h5' - f.write_hdf(file_name, group='initial_solution', mode='w', quiet=False, - description=('Initial hydrogen-oxygen counterflow flame ' - 'at 1 bar and low strain rate')) -else: - file_name = 'initial_solution.yaml' - f.save(os.path.join(data_directory, file_name), name='solution', - description='Initial hydrogen-oxygen counterflow flame ' - 'at 1 bar and low strain rate') +file_name, entry = names("initial-solution") +desc = "Initial hydrogen-oxygen counterflow flame at 1 bar and low strain rate" +f.save(file_name, name=entry, description=desc) # PART 2: BATCH PRESSURE LOOP @@ -139,23 +133,13 @@ def interrupt_extinction(t): try: # Try solving the flame f.solve(loglevel=0) - if hdf_output: - group = 'pressure_loop/{:05.1f}'.format(p) - f.write_hdf(file_name, group=group, quiet=False, - description='pressure = {0} bar'.format(p)) - else: - file_name = 'pressure_loop_' + format(p, '05.1f') + '.yaml' - f.save(os.path.join(data_directory, file_name), name='solution', loglevel=1, - description='pressure = {0} bar'.format(p)) + file_name, entry = names(f"pressure-loop/{p:05.1f}") + f.save(file_name, name=entry, loglevel=1, description=f"pressure = {p} bar") p_previous = p except ct.CanteraError as e: print('Error occurred while solving:', e, 'Try next pressure level') # If solution failed: Restore the last successful solution and continue - if hdf_output: - f.read_hdf(file_name, group=group) - else: - f.restore(filename=os.path.join(data_directory, file_name), name='solution', - loglevel=0) + f.restore(file_name, name=entry, loglevel=0) # PART 3: STRAIN RATE LOOP @@ -174,11 +158,8 @@ def interrupt_extinction(t): exp_mdot_a = 1. / 2. # Restore initial solution -if hdf_output: - f.read_hdf(file_name, group='initial_solution') -else: - file_name = 'initial_solution.yaml' - f.restore(filename=os.path.join(data_directory, file_name), name='solution', loglevel=0) +file_name, entry = names("initial-solution") +f.restore(file_name, name=entry, loglevel=0) # Counter to identify the loop n = 0 @@ -203,14 +184,9 @@ def interrupt_extinction(t): try: # Try solving the flame f.solve(loglevel=0) - if hdf_output: - group = 'strain_loop/{:02d}'.format(n) - f.write_hdf(file_name, group=group, quiet=False, - description='strain rate iteration {}'.format(n)) - else: - file_name = 'strain_loop_' + format(n, '02d') + '.yaml' - f.save(os.path.join(data_directory, file_name), name='solution', loglevel=1, - description='strain rate iteration {}'.format(n)) + file_name, entry = names(f"strain-loop/{n:02d}") + f.save(file_name, name=entry, loglevel=1, + description=f"strain rate iteration {n}") except FlameExtinguished: print('Flame extinguished') break @@ -228,30 +204,25 @@ def interrupt_extinction(t): p_selected = p_range[::7] for p in p_selected: - if hdf_output: - group = 'pressure_loop/{0:05.1f}'.format(p) - f.read_hdf(file_name, group=group) - else: - file_name = 'pressure_loop_{0:05.1f}.yaml'.format(p) - f.restore(filename=os.path.join(data_directory, file_name), name='solution', loglevel=0) + file_name, entry = names(f"pressure-loop/{p:05.1f}") + f.restore(file_name, name=entry) # Plot the temperature profiles for selected pressures - ax1.plot(f.grid / f.grid[-1], f.T, label='{0:05.1f} bar'.format(p)) + ax1.plot(f.grid / f.grid[-1], f.T, label=f"{p:05.1f} bar") # Plot the axial velocity profiles (normalized by the fuel inlet velocity) # for selected pressures - ax2.plot(f.grid / f.grid[-1], f.velocity / f.velocity[0], - label='{0:05.1f} bar'.format(p)) + ax2.plot(f.grid / f.grid[-1], f.velocity / f.velocity[0], label=f"{p:05.1f} bar") ax1.legend(loc=0) ax1.set_xlabel(r'$z/z_{max}$') ax1.set_ylabel(r'$T$ [K]') -fig1.savefig(fig_name.format('T_p')) +fig1.savefig(output_path / "figure_T_p.png") ax2.legend(loc=0) ax2.set_xlabel(r'$z/z_{max}$') ax2.set_ylabel(r'$u/u_f$') -fig2.savefig(fig_name.format('u_p')) +fig1.savefig(output_path / "figure_u_p.png") fig3 = plt.figure() fig4 = plt.figure() @@ -259,29 +230,23 @@ def interrupt_extinction(t): ax4 = fig4.add_subplot(1, 1, 1) n_selected = range(1, n, 5) for n in n_selected: - if hdf_output: - group = 'strain_loop/{0:02d}'.format(n) - f.read_hdf(file_name, group=group) - else: - file_name = 'strain_loop_{0:02d}.yaml'.format(n) - f.restore(filename=os.path.join(data_directory, file_name), - name='solution', loglevel=0) + file_name, entry = names(f"strain-loop/{n:02d}") + f.restore(file_name, name=entry, loglevel=0) a_max = f.strain_rate('max') # the maximum axial strain rate # Plot the temperature profiles for the strain rate loop (selected) - ax3.plot(f.grid / f.grid[-1], f.T, label='{0:.2e} 1/s'.format(a_max)) + ax3.plot(f.grid / f.grid[-1], f.T, label=f"{a_max:.2e} 1/s") # Plot the axial velocity profiles (normalized by the fuel inlet velocity) # for the strain rate loop (selected) - ax4.plot(f.grid / f.grid[-1], f.velocity / f.velocity[0], - label=format(a_max, '.2e') + ' 1/s') + ax4.plot(f.grid / f.grid[-1], f.velocity / f.velocity[0], label=f"{a_max:.2e} 1/s") ax3.legend(loc=0) ax3.set_xlabel(r'$z/z_{max}$') ax3.set_ylabel(r'$T$ [K]') -fig3.savefig(fig_name.format('T_a')) +fig1.savefig(output_path / "figure_T_a.png") ax4.legend(loc=0) ax4.set_xlabel(r'$z/z_{max}$') ax4.set_ylabel(r'$u/u_f$') -fig4.savefig(fig_name.format('u_a')) +fig1.savefig(output_path / "figure_u_a.png") diff --git a/test/python/test_onedim.py b/test/python/test_onedim.py index ed7317dd5dd..ce4c6397be1 100644 --- a/test/python/test_onedim.py +++ b/test/python/test_onedim.py @@ -1450,6 +1450,7 @@ def test_restart(self): def test_save_restore_yaml(self): self.run_save_restore("yaml") + @utilities.unittest.skipIf("HighFive" not in ct.hdf_support(), "HighFive not installed") def test_save_restore_hdf(self): self.run_save_restore("hdf") From 54ee85742a46dcfda8658ce12336c4a95665c4f3 Mon Sep 17 00:00:00 2001 From: Ingmar Schoegl Date: Sun, 4 Dec 2022 23:41:09 -0600 Subject: [PATCH 65/93] Isolate HighFive to Storage.h --- include/cantera/base/Storage.h | 70 +++++++++++++++++++--------------- src/base/SolutionArray.cpp | 36 ++--------------- src/oneD/Sim1D.cpp | 10 ----- 3 files changed, 43 insertions(+), 73 deletions(-) diff --git a/include/cantera/base/Storage.h b/include/cantera/base/Storage.h index 0b6d267396b..0532467eda2 100644 --- a/include/cantera/base/Storage.h +++ b/include/cantera/base/Storage.h @@ -56,14 +56,7 @@ namespace Cantera class Storage { public: -#if CT_USE_HIGHFIVE_HDF - Storage(h5::File file, bool write) : m_file(file), m_write(write) {} -#else - Storage() { - throw CanteraError("Storage::Storage", - "Instantiation of Storage requires HighFive::File object."); - } -#endif + Storage(std::string fname, bool write); //! Flush file contents void flush(); @@ -103,7 +96,7 @@ class Storage bool checkGroupRead(const std::string& id) const; bool checkGroupWrite(const std::string& id); - h5::File m_file; + std::unique_ptr m_file; #endif bool m_write; @@ -111,9 +104,18 @@ class Storage #if CT_USE_HIGHFIVE_HDF +Storage::Storage(std::string fname, bool write) : m_write(write) +{ + if (m_write) { + m_file.reset(new h5::File(fname, h5::File::OpenOrCreate)); + } else { + m_file.reset(new h5::File(fname, h5::File::ReadOnly)); + } +} + void Storage::flush() { - m_file.flush(); + m_file->flush(); } bool Storage::checkGroupRead(const std::string& id) const @@ -121,13 +123,13 @@ bool Storage::checkGroupRead(const std::string& id) const std::vector tokens; tokenizePath(id, tokens); std::string grp = tokens[0]; - if (!m_file.exist(grp) || m_file.getObjectType(grp) != h5::ObjectType::Group) { + if (!m_file->exist(grp) || m_file->getObjectType(grp) != h5::ObjectType::Group) { throw CanteraError("Storage::checkGroup", "No group with id '{}' found", grp); } std::string path = grp; - h5::Group sub = m_file.getGroup(grp); + h5::Group sub = m_file->getGroup(grp); tokens.erase(tokens.begin()); for (auto& grp : tokens) { path += "/" + grp; @@ -142,11 +144,11 @@ bool Storage::checkGroupRead(const std::string& id) const bool Storage::checkGroupWrite(const std::string& id) { - if (!m_file.exist(id)) { - m_file.createGroup(id); + if (!m_file->exist(id)) { + m_file->createGroup(id); return true; } - if (m_file.getObjectType(id) != h5::ObjectType::Group) { + if (m_file->getObjectType(id) != h5::ObjectType::Group) { throw CanteraError("Storage::checkGroup", "Invalid object with id '{}' exists", id); } @@ -162,7 +164,7 @@ bool Storage::checkGroup(const std::string& id) { std::pair> Storage::contents(const std::string& id) const { - h5::Group sub = m_file.getGroup(id); + h5::Group sub = m_file->getGroup(id); std::set names; size_t nDims = npos; size_t nElements = 0; @@ -255,7 +257,7 @@ AnyMap readH5Attributes(const h5::Group& sub, bool recursive) AnyMap Storage::readAttributes(const std::string& id, bool recursive) const { - h5::Group sub = m_file.getGroup(id); + h5::Group sub = m_file->getGroup(id); return readH5Attributes(sub, recursive); } @@ -322,14 +324,14 @@ void writeH5Attributes(h5::Group sub, const AnyMap& meta) void Storage::writeAttributes(const std::string& id, const AnyMap& meta) { - h5::Group sub = m_file.getGroup(id); + h5::Group sub = m_file->getGroup(id); writeH5Attributes(sub, meta); } vector_fp Storage::readVector(const std::string& id, const std::string& name, size_t size) const { - h5::Group sub = m_file.getGroup(id); + h5::Group sub = m_file->getGroup(id); if (!sub.exist(name)) { throw CanteraError("Storage::readVector", "DataSet '{}' not found in path '{}'.", name, id); @@ -352,7 +354,7 @@ vector_fp Storage::readVector(const std::string& id, void Storage::writeVector(const std::string& id, const std::string& name, const vector_fp& data) { - h5::Group sub = m_file.getGroup(id); + h5::Group sub = m_file->getGroup(id); std::vector dims{data.size()}; h5::DataSet dataset = sub.createDataSet(name, h5::DataSpace(dims)); dataset.write(data); @@ -362,7 +364,7 @@ std::vector Storage::readMatrix(const std::string& id, const std::string& name, size_t rows, size_t cols) const { - h5::Group sub = m_file.getGroup(id); + h5::Group sub = m_file->getGroup(id); if (!sub.exist(name)) { throw CanteraError("Storage::readVector", "DataSet '{}' not found in path '{}'.", name, id); @@ -394,7 +396,7 @@ std::vector Storage::readMatrix(const std::string& id, void Storage::writeMatrix(const std::string& id, const std::string& name, const std::vector& data) { - h5::Group sub = m_file.getGroup(id); + h5::Group sub = m_file->getGroup(id); std::vector dims{data.size()}; dims.push_back(data.size() ? data[0].size() : 0); h5::DataSet dataset = sub.createDataSet(name, h5::DataSpace(dims)); @@ -403,48 +405,54 @@ void Storage::writeMatrix(const std::string& id, #else +Storage::Storage(std::string fname, bool write) +{ + throw CanteraError("Storage::Storage", + "Saving to HDF requires HighFive installation."); +} + void Storage::flush() { throw CanteraError("Storage::flush", - "Saving to HDF requires HighFive installation."); + "Saving to HDF requires HighFive installation."); } bool Storage::checkGroup(const std::string& id) { throw CanteraError("Storage::checkGroup", - "Saving to HDF requires HighFive installation."); + "Saving to HDF requires HighFive installation."); } std::pair> Storage::contents(const std::string& id) const { throw CanteraError("Storage::contents", - "Saving to HDF requires HighFive installation."); + "Saving to HDF requires HighFive installation."); } AnyMap Storage::readAttributes(const std::string& id, bool recursive) const { throw CanteraError("Storage::readAttributes", - "Saving to HDF requires HighFive installation."); + "Saving to HDF requires HighFive installation."); } void Storage::writeAttributes(const std::string& id, const AnyMap& meta) { throw CanteraError("Storage::writeAttributes", - "Saving to HDF requires HighFive installation."); + "Saving to HDF requires HighFive installation."); } vector_fp Storage::readVector(const std::string& id, const std::string& name, size_t size) const { throw CanteraError("Storage::readVector", - "Saving to HDF requires HighFive installation."); + "Saving to HDF requires HighFive installation."); } void Storage::writeVector(const std::string& id, const std::string& name, const vector_fp& data) { throw CanteraError("Storage::writeVector", - "Saving to HDF requires HighFive installation."); + "Saving to HDF requires HighFive installation."); } std::vector Storage::readMatrix(const std::string& id, @@ -452,14 +460,14 @@ std::vector Storage::readMatrix(const std::string& id, size_t rows, size_t cols) const { throw CanteraError("Storage::readMatrix", - "Saving to HDF requires HighFive installation."); + "Saving to HDF requires HighFive installation."); } void Storage::writeMatrix(const std::string& id, const std::string& name, const std::vector& data) { throw CanteraError("Storage::writeMatrix", - "Saving to HDF requires HighFive installation."); + "Saving to HDF requires HighFive installation."); } #endif diff --git a/src/base/SolutionArray.cpp b/src/base/SolutionArray.cpp index ee17c8e199b..7cd18763241 100644 --- a/src/base/SolutionArray.cpp +++ b/src/base/SolutionArray.cpp @@ -224,14 +224,7 @@ AnyMap preamble(const std::string& desc) void SolutionArray::writeHeader(const std::string& fname, const std::string& id, const std::string& desc) { -#if CT_USE_HIGHFIVE_HDF - h5::File hdf(fname, h5::File::OpenOrCreate); - Storage file(hdf, true); -#else - throw CanteraError("SolutionArray::writeHeader", - "Saving to HDF requires HighFive installation."); - Storage file; -#endif + Storage file(fname, true); file.checkGroup(id); file.writeAttributes(id, preamble(desc)); file.flush(); @@ -245,14 +238,7 @@ void SolutionArray::writeHeader(AnyMap& root, const std::string& id, void SolutionArray::writeEntry(const std::string& fname, const std::string& id) { -#if CT_USE_HIGHFIVE_HDF - h5::File hdf(fname, h5::File::OpenOrCreate); - Storage file(hdf, true); -#else - throw CanteraError("SolutionArray::writeEntry", - "Saving to HDF requires HighFive installation."); - Storage file; -#endif + Storage file(fname, true); file.checkGroup(id); file.writeAttributes(id, m_meta); if (!m_size) { @@ -393,14 +379,7 @@ void SolutionArray::save( AnyMap SolutionArray::readHeader(const std::string& fname, const std::string& id) { -#if CT_USE_HIGHFIVE_HDF - h5::File hdf(fname, h5::File::ReadOnly); - Storage file(hdf, false); -#else - throw CanteraError("SolutionArray::readHeader", - "Saving to HDF requires HighFive installation."); - Storage file; -#endif + Storage file(fname, false); file.checkGroup(id); return file.readAttributes(id, false); } @@ -485,14 +464,7 @@ std::set SolutionArray::stateProperties(std::string mode, bool alia void SolutionArray::readEntry(const std::string& fname, const std::string& id) { -#if CT_USE_HIGHFIVE_HDF - h5::File hdf(fname, h5::File::ReadOnly); - Storage file(hdf, false); -#else - throw CanteraError("SolutionArray::readEntry", - "Saving to HDF requires HighFive installation."); - Storage file; -#endif + Storage file(fname, false); file.checkGroup(id); m_meta = file.readAttributes(id, true); diff --git a/src/oneD/Sim1D.cpp b/src/oneD/Sim1D.cpp index 07fff6897e6..60f527d6c25 100644 --- a/src/oneD/Sim1D.cpp +++ b/src/oneD/Sim1D.cpp @@ -19,16 +19,6 @@ using namespace std; -#if CT_USE_HIGHFIVE_HDF -#if CT_USE_SYSTEM_HIGHFIVE - #include -#else - #include "cantera/ext/HighFive/H5File.hpp" -#endif - -namespace h5 = HighFive; -#endif - namespace Cantera { From c6aa334830b79b32d2253a4c3983f4dca519859e Mon Sep 17 00:00:00 2001 From: Ingmar Schoegl Date: Mon, 5 Dec 2022 08:17:43 -0600 Subject: [PATCH 66/93] Refine HDF5 support infrastructure --- SConstruct | 87 ++++++++++++++++++++++------ include/cantera/base/Storage.h | 13 ++++- include/cantera/base/config.h.in | 2 +- include/cantera/base/global.h | 4 +- interfaces/cython/cantera/_utils.pxd | 2 +- interfaces/cython/cantera/_utils.pyx | 6 +- src/base/global.cpp | 4 +- test/python/test_onedim.py | 12 ++-- 8 files changed, 94 insertions(+), 36 deletions(-) diff --git a/SConstruct b/SConstruct index d014456e139..263a79147cc 100644 --- a/SConstruct +++ b/SConstruct @@ -356,14 +356,33 @@ config_options = [ must include the shared version of the library, for example, 'libfmt.so'.""", "default", ("default", "y", "n")), + EnumOption( + "hdf_support", + """Select whether to support HDF5 container files natively ('y'), disable HDF5 + support ('n'), or to decide automatically based on the system configuration + ('default'). Native HDF5 support uses the headers-only HDF5 wrapper HighFive + (see option 'system_highfive'). Specifying 'hdf_include' or 'hdf_libdir' + changes the default to 'y'.""", + "default", ("default", "y", "n")), + PathOption( + "hdf_include", + """The directory where the HDF5 header files are installed. This should be the + directory that contains files 'H5Version.h' and 'H5Public.h', amongst others. + Not needed if the headers are installed in a standard location, for example, + '/usr/include'.""", + "", PathVariable.PathAccept), + PathOption( + "hdf_libdir", + """The directory where the HDF5 libraries are installed. Not needed if the + libraries are installed in a standard location, for example, '/usr/lib'.""", + "", PathVariable.PathAccept), EnumOption( "system_highfive", """Select whether to use HighFive from a system installation ('y'), from a Git submodule ('n'), or to decide automatically ('default'). If HighFive is not installed directly into a system include directory, for example, it is installed in '/opt/include/HighFive', then you will need to add - '/opt/include/HighFive' to 'extra_inc_dirs'. - """, + '/opt/include/HighFive' to 'extra_inc_dirs'.""", "default", ("default", "y", "n")), EnumOption( "system_yamlcpp", @@ -1506,10 +1525,27 @@ else: # env['system_sundials'] == 'n' env['sundials_version'] = '5.3' env['has_sundials_lapack'] = int(env['use_lapack']) -if not conf.CheckLib("hdf5", autoadd=False): - env["uses_highfive"] = False +if env["hdf_include"]: + env["hdf_include"] = Path(env["hdf_include"]).as_posix() + env.Append(CPPPATH=[env["hdf_include"]]) + env["hdf_support"] = "y" + env["extra_inc_dirs"].append(env["hdf_include"]) +if env["hdf_libdir"]: + env["hdf_libdir"] = Path(env["hdf_libdir"]).as_posix() + env.Append(LIBPATH=[env["hdf_libdir"]]) + env["hdf_support"] = "y" + if env["use_rpath_linkage"]: + env.Append(RPATH=env["hdf_libdir"]) + env["extra_lib_dirs"].append(env["hdf_libdir"]) + +if env["hdf_support"] == "n": + env["use_hdf5"] = False +else: + env["use_hdf5"] = conf.CheckLib("hdf5", autoadd=False) + if not env["use_hdf5"] and env["hdf_support"] == "y": + config_error("HDF5 support has been specified but libraries were not found.") -elif env["system_highfive"] in ("n", "default"): +if env["use_hdf5"] and env["system_highfive"] in ("n", "default"): env["system_highfive"] = False if not os.path.exists("ext/eigen/HighFive/include"): if not os.path.exists(".git"): @@ -1525,23 +1561,41 @@ elif env["system_highfive"] in ("n", "default"): "Try manually checking out the submodule with:\n\n" " git submodule update --init --recursive ext/HighFive\n") - env["uses_highfive"] = conf.CheckLibWithHeader( + env["use_hdf5"] = conf.CheckLibWithHeader( "hdf5", "../ext/HighFive/include/highfive/H5File.hpp", language="C++", autoadd=False) - if env["uses_highfive"]: + if env["use_hdf5"]: logger.info("Using private installation of HighFive.") + elif env["hdf_support"] == "y": + config_error("HDF5 support has been specified but HighFive configuration failed.") else: - logger.error("HighFive is not configured correctly.") + logger.warning("HighFive is not configured correctly; skipping.") + env["use_hdf5"] = False -elif env["system_highfive"] in ("y", "default"): +elif env["use_hdf5"]: env["system_highfive"] = True - env["uses_highfive"] = conf.CheckLibWithHeader( + env["use_hdf5"] = conf.CheckLibWithHeader( "hdf5", "highfive/H5File.hpp", language="C++", autoadd=False) - if env["uses_highfive"]: + if env["use_hdf5"]: logger.info("Using system installation of HighFive.") else: - logger.warning("Unable to locate HighFive installation.") + config_error("Unable to locate system HighFive installation.") + +if env["use_hdf5"]: + hdf_version = textwrap.dedent("""\ + #include + #include "H5public.h" + int main(int argc, char** argv) { + std::cout << H5_VERS_MAJOR << "." << H5_VERS_MINOR << "." << H5_VERS_RELEASE; + return 0; + } + """) + retcode, hdf_version = conf.TryRun(hdf_version, ".cpp") + if retcode: + logger.info(f"Compiling against HDF5 version {hdf_version}") + else: + logger.warning("Failed to determine HDF5 version.") def set_fortran(pattern, value): # Set compiler / flags for all Fortran versions to be the same @@ -2069,8 +2123,8 @@ cdefine('LAPACK_FTN_TRAILING_UNDERSCORE', 'lapack_ftn_trailing_underscore') cdefine('FTN_TRAILING_UNDERSCORE', 'lapack_ftn_trailing_underscore') cdefine('LAPACK_NAMES_LOWERCASE', 'lapack_names', 'lower') cdefine('CT_USE_LAPACK', 'use_lapack') -cdefine("CT_USE_HIGHFIVE_HDF", "uses_highfive") -cdefine('CT_USE_SYSTEM_HIGHFIVE', 'system_highfive') +cdefine("CT_USE_HDF5", "use_hdf5") +cdefine("CT_USE_SYSTEM_HIGHFIVE", "system_highfive") cdefine("CT_USE_SYSTEM_EIGEN", "system_eigen") cdefine("CT_USE_SYSTEM_EIGEN_PREFIXED", "system_eigen_prefixed") cdefine('CT_USE_SYSTEM_FMT', 'system_fmt') @@ -2160,10 +2214,7 @@ else: env["external_libs"] = [] env["external_libs"].extend(env["sundials_libs"]) -if env["uses_highfive"]: - if env["OS"] == "Windows": - # see https://github.com/microsoft/vcpkg/issues/24293 - env.Append(CPPDEFINES=["H5_BUILT_AS_DYNAMIC_LIB"]) +if env["use_hdf5"]: env["external_libs"].append("hdf5") if env["system_fmt"]: diff --git a/include/cantera/base/Storage.h b/include/cantera/base/Storage.h index 0532467eda2..25b20262291 100644 --- a/include/cantera/base/Storage.h +++ b/include/cantera/base/Storage.h @@ -10,7 +10,14 @@ #include "cantera/base/stringUtils.h" #include -#if CT_USE_HIGHFIVE_HDF +#if CT_USE_HDF5 +#ifdef _WIN32 + // see https://github.com/microsoft/vcpkg/issues/24293 + #define H5_BUILT_AS_DYNAMIC_LIB +#else + #define H5_BUILT_AS_STATIC_LIB +#endif + #if CT_USE_SYSTEM_HIGHFIVE #include #include @@ -92,7 +99,7 @@ class Storage const std::string& name, const std::vector& data); private: -#if CT_USE_HIGHFIVE_HDF +#if CT_USE_HDF5 bool checkGroupRead(const std::string& id) const; bool checkGroupWrite(const std::string& id); @@ -102,7 +109,7 @@ class Storage bool m_write; }; -#if CT_USE_HIGHFIVE_HDF +#if CT_USE_HDF5 Storage::Storage(std::string fname, bool write) : m_write(write) { diff --git a/include/cantera/base/config.h.in b/include/cantera/base/config.h.in index 70d88a1d868..b7e80a3ca4b 100644 --- a/include/cantera/base/config.h.in +++ b/include/cantera/base/config.h.in @@ -67,7 +67,7 @@ typedef int ftnlen; // Fortran hidden string length type {CT_SUNDIALS_USE_LAPACK!s} // Enable export/import of HDF data via C++ HighFive -{CT_USE_HIGHFIVE_HDF!s} +{CT_USE_HDF5!s} {CT_USE_SYSTEM_HIGHFIVE!s} #endif diff --git a/include/cantera/base/global.h b/include/cantera/base/global.h index 96e5c5b3bc9..4021d8513bd 100644 --- a/include/cantera/base/global.h +++ b/include/cantera/base/global.h @@ -105,9 +105,9 @@ std::string gitCommit(); //! preprocessor macro is defined. bool debugModeEnabled(); -//! Returns true if Cantera was compiled with C++ HighFive HDF support. +//! Returns true if Cantera was compiled with C++ HDF5 support. //! @since New in Cantera 3.0. -bool usesHighFive(); +bool usesHDF5(); /*! * @defgroup logs Diagnostic Output diff --git a/interfaces/cython/cantera/_utils.pxd b/interfaces/cython/cantera/_utils.pxd index c5f97894264..453d1c4ca9d 100644 --- a/interfaces/cython/cantera/_utils.pxd +++ b/interfaces/cython/cantera/_utils.pxd @@ -72,7 +72,7 @@ cdef extern from "cantera/base/global.h" namespace "Cantera": cdef void Cxx_suppress_thermo_warnings "Cantera::suppress_thermo_warnings" (cbool) cdef void Cxx_use_legacy_rate_constants "Cantera::use_legacy_rate_constants" (cbool) cdef string CxxGitCommit "Cantera::gitCommit" () - cdef cbool CxxUsesHighFive "Cantera::usesHighFive" () + cdef cbool CxxUsesHDF5 "Cantera::usesHDF5" () cdef cbool CxxDebugModeEnabled "Cantera::debugModeEnabled" () diff --git a/interfaces/cython/cantera/_utils.pyx b/interfaces/cython/cantera/_utils.pyx index 4f7a133de7f..2083a33036b 100644 --- a/interfaces/cython/cantera/_utils.pyx +++ b/interfaces/cython/cantera/_utils.pyx @@ -97,7 +97,7 @@ def hdf_support(): """ Returns list of libraries that include HDF support: - 'h5py': HDF support by Python package 'h5py'. - - 'HighFive': if Cantera was compiled with C++ HighFive HDF support. + - 'native': if Cantera was compiled with C++ HighFive HDF5 support. .. versionadded:: 3.0 """ @@ -108,8 +108,8 @@ def hdf_support(): pass else: out.append("h5py") - if CxxUsesHighFive(): - out.append("HighFive") + if CxxUsesHDF5(): + out.append("native") return set(out) cdef Composition comp_map(X) except *: diff --git a/src/base/global.cpp b/src/base/global.cpp index 0f64d08478c..563326cd77c 100644 --- a/src/base/global.cpp +++ b/src/base/global.cpp @@ -170,9 +170,9 @@ bool debugModeEnabled() #endif } -bool usesHighFive() +bool usesHDF5() { -#if CT_USE_HIGHFIVE_HDF +#if CT_USE_HDF5 return true; #else return false; diff --git a/test/python/test_onedim.py b/test/python/test_onedim.py index ce4c6397be1..abec738c981 100644 --- a/test/python/test_onedim.py +++ b/test/python/test_onedim.py @@ -526,7 +526,7 @@ def test_save_restore_yaml_transition(self): def test_save_restore_yaml_array(self): self.run_save_restore("array") - @utilities.unittest.skipIf("HighFive" not in ct.hdf_support(), "HighFive not installed") + @utilities.unittest.skipIf("native" not in ct.hdf_support(), "HighFive not installed") def test_save_restore_hdf_array(self): self.run_save_restore("hdf") @@ -734,11 +734,11 @@ def test_write_csv(self): def test_write_hdf_legacy(self): self.run_freeflame_write_hdf("legacy") - @utilities.unittest.skipIf(ct.hdf_support() != {'h5py', 'HighFive'}, "h5py and/or HighFive not installed") + @utilities.unittest.skipIf(ct.hdf_support() != {"h5py", "native"}, "h5py and/or HighFive not installed") def test_write_hdf_transition(self): self.run_freeflame_write_hdf("transition") - @utilities.unittest.skipIf("HighFive" not in ct.hdf_support(), "HighFive not installed") + @utilities.unittest.skipIf("native" not in ct.hdf_support(), "HighFive not installed") def test_write_hdf_native(self): self.run_freeflame_write_hdf("native") @@ -1317,11 +1317,11 @@ def test_reacting_surface_case3(self): def test_write_hdf_legacy(self): self.run_impingingjet_write("legacy") - @utilities.unittest.skipIf(ct.hdf_support() != {'h5py', 'HighFive'}, "h5py and/or HighFive not installed") + @utilities.unittest.skipIf(ct.hdf_support() != {"h5py", "native"}, "h5py and/or HighFive not installed") def test_write_hdf_transition(self): self.run_impingingjet_write("transition") - @utilities.unittest.skipIf("HighFive" not in ct.hdf_support(), "HighFive not installed") + @utilities.unittest.skipIf("native" not in ct.hdf_support(), "HighFive not installed") def test_write_hdf_native(self): self.run_impingingjet_write("native") @@ -1450,7 +1450,7 @@ def test_restart(self): def test_save_restore_yaml(self): self.run_save_restore("yaml") - @utilities.unittest.skipIf("HighFive" not in ct.hdf_support(), "HighFive not installed") + @utilities.unittest.skipIf("native" not in ct.hdf_support(), "HighFive not installed") def test_save_restore_hdf(self): self.run_save_restore("hdf") From f5dc973ee57186be9780fb4bdc29cca5f1f09ff8 Mon Sep 17 00:00:00 2001 From: Ingmar Schoegl Date: Mon, 5 Dec 2022 09:49:15 -0600 Subject: [PATCH 67/93] Implement HDF5 compression --- include/cantera/base/SolutionArray.h | 9 ++++-- include/cantera/base/Storage.h | 43 ++++++++++++++++++++++++--- include/cantera/oneD/Sim1D.h | 3 +- interfaces/cython/cantera/_onedim.pxd | 2 +- interfaces/cython/cantera/_onedim.pyx | 8 +++-- src/base/SolutionArray.cpp | 12 +++++--- src/oneD/Sim1D.cpp | 8 ++--- test/python/test_onedim.py | 2 +- 8 files changed, 67 insertions(+), 20 deletions(-) diff --git a/include/cantera/base/SolutionArray.h b/include/cantera/base/SolutionArray.h index 7a839bce26c..1c3d5d9203b 100644 --- a/include/cantera/base/SolutionArray.h +++ b/include/cantera/base/SolutionArray.h @@ -128,8 +128,10 @@ class SolutionArray * * @param fname Name of HDF container file * @param id Identifier of SolutionArray within the container file + * @param compression Compression level; optional (default=0; HDF only) */ - void writeEntry(const std::string& fname, const std::string& id); + void writeEntry(const std::string& fname, const std::string& id, + int compression=0); void writeEntry(AnyMap& root, const std::string& id); /*! @@ -137,8 +139,11 @@ class SolutionArray * * @param fname Name of output container file (YAML or HDF) * @param id Identifier of SolutionArray within the container file + * @param desc Description + * @param compression Compression level; optional (default=0; HDF only) */ - void save(const std::string& fname, const std::string& id, const std::string& desc); + void save(const std::string& fname, const std::string& id, + const std::string& desc, int compression=0); /*! * Read header data from container file. diff --git a/include/cantera/base/Storage.h b/include/cantera/base/Storage.h index 25b20262291..cc8866073be 100644 --- a/include/cantera/base/Storage.h +++ b/include/cantera/base/Storage.h @@ -65,6 +65,14 @@ class Storage public: Storage(std::string fname, bool write); + //! Set compression level (0..9) + /*! + * Compression is only applied to species data; note that compression may increase + * file size for small data sets (compression requires setting of chunk sizes, + * which involves considerable overhead for metadata). + */ + void setCompressionLevel(int level); + //! Flush file contents void flush(); @@ -104,9 +112,9 @@ class Storage bool checkGroupWrite(const std::string& id); std::unique_ptr m_file; -#endif - bool m_write; + int m_compressionLevel=0; +#endif }; #if CT_USE_HDF5 @@ -120,6 +128,15 @@ Storage::Storage(std::string fname, bool write) : m_write(write) } } +void Storage::setCompressionLevel(int level) +{ + if (level < 0 || level > 9) { + throw CanteraError("Storage::setCompressionLevel", + "Invalid compression level '{}' (needs to be 0..9).", level); + } + m_compressionLevel = level; +} + void Storage::flush() { m_file->flush(); @@ -406,8 +423,20 @@ void Storage::writeMatrix(const std::string& id, h5::Group sub = m_file->getGroup(id); std::vector dims{data.size()}; dims.push_back(data.size() ? data[0].size() : 0); - h5::DataSet dataset = sub.createDataSet(name, h5::DataSpace(dims)); - dataset.write(data); + if (m_compressionLevel) { + // Set chunk size to single chunk and apply compression level; for caveats, see + // https://stackoverflow.com/questions/32994766/compressed-files-bigger-in-h5py + h5::DataSpace space(dims, dims); //{h5::DataSpace::UNLIMITED, dims[1]}); + h5::DataSetCreateProps props; + props.add(h5::Chunking(std::vector{dims[0], dims[1]})); + props.add(h5::Deflate(m_compressionLevel)); + h5::DataSet dataset = sub.createDataSet(name, space, props); + dataset.write(data); + } else { + h5::DataSpace space(dims); + h5::DataSet dataset = sub.createDataSet(name, space); + dataset.write(data); + } } #else @@ -418,6 +447,12 @@ Storage::Storage(std::string fname, bool write) "Saving to HDF requires HighFive installation."); } +void Storage::setCompressionLevel(int level) +{ + throw CanteraError("Storage::setCompressionLevel", + "Saving to HDF requires HighFive installation."); +} + void Storage::flush() { throw CanteraError("Storage::flush", diff --git a/include/cantera/oneD/Sim1D.h b/include/cantera/oneD/Sim1D.h index 99cecd89142..005e3268d44 100644 --- a/include/cantera/oneD/Sim1D.h +++ b/include/cantera/oneD/Sim1D.h @@ -117,9 +117,10 @@ class Sim1D : public OneDim * @param id Identifier of solution within the container file * @param desc Description of the solution * @param loglevel Level of diagnostic output + * @param compression Compression level (optional; HDF only) */ void save(const std::string& fname, const std::string& id, - const std::string& desc, int loglevel=1); + const std::string& desc, int loglevel=1, int compression=0); /** * Save the current solution to YAML (legacy implementation). @see save diff --git a/interfaces/cython/cantera/_onedim.pxd b/interfaces/cython/cantera/_onedim.pxd index 12900b565fd..c59b5941ea9 100644 --- a/interfaces/cython/cantera/_onedim.pxd +++ b/interfaces/cython/cantera/_onedim.pxd @@ -123,7 +123,7 @@ cdef extern from "cantera/oneD/Sim1D.h": void refine(int) except +translate_exception void setRefineCriteria(size_t, double, double, double, double) except +translate_exception vector[double] getRefineCriteria(int) except +translate_exception - void save(string, string, string, int) except +translate_exception + void save(string, string, string, int, int) except +translate_exception void restore(string, string, int) except +translate_exception void write_yaml(string, string, string, int) except +translate_exception void read_yaml(string, string, int) except +translate_exception diff --git a/interfaces/cython/cantera/_onedim.pyx b/interfaces/cython/cantera/_onedim.pyx index 10591a16584..3dfb910a5de 100644 --- a/interfaces/cython/cantera/_onedim.pyx +++ b/interfaces/cython/cantera/_onedim.pyx @@ -1464,9 +1464,9 @@ cdef class Sim1D: return self.sim.fixedTemperatureLocation() def save(self, filename='soln.yaml', name='solution', description='none', - loglevel=1): + loglevel=1, compression=0): """ - Save the solution in YAML format. + Save the solution in YAML or HDF format. :param filename: solution file @@ -1474,13 +1474,15 @@ cdef class Sim1D: solution name within the file :param description: custom description text + :param compression: + compression level 0..9; optional (HDF only) >>> s.save(filename='save.yaml', name='energy_off', ... description='solution with energy eqn. disabled') """ self.sim.save(stringify(str(filename)), stringify(name), - stringify(description), loglevel) + stringify(description), loglevel, compression) def write_yaml(self, filename, name='solution', description='none', quiet=True): diff --git a/src/base/SolutionArray.cpp b/src/base/SolutionArray.cpp index 7cd18763241..c2cd62dd4fb 100644 --- a/src/base/SolutionArray.cpp +++ b/src/base/SolutionArray.cpp @@ -236,9 +236,13 @@ void SolutionArray::writeHeader(AnyMap& root, const std::string& id, root[id] = preamble(desc); } -void SolutionArray::writeEntry(const std::string& fname, const std::string& id) +void SolutionArray::writeEntry(const std::string& fname, const std::string& id, + int compression) { Storage file(fname, true); + if (compression) { + file.setCompressionLevel(compression); + } file.checkGroup(id); file.writeAttributes(id, m_meta); if (!m_size) { @@ -348,14 +352,14 @@ void SolutionArray::writeEntry(AnyMap& root, const std::string& id) } } -void SolutionArray::save( - const std::string& fname, const std::string& id, const std::string& desc) +void SolutionArray::save(const std::string& fname, const std::string& id, + const std::string& desc, int compression) { size_t dot = fname.find_last_of("."); std::string extension = (dot != npos) ? toLowerCopy(fname.substr(dot + 1)) : ""; if (extension == "h5" || extension == "hdf" || extension == "hdf5") { writeHeader(fname, id, desc); - writeEntry(fname, id); + writeEntry(fname, id, compression); return; } if (extension == "yaml" || extension == "yml") { diff --git a/src/oneD/Sim1D.cpp b/src/oneD/Sim1D.cpp index 60f527d6c25..8cb2cc0ff2d 100644 --- a/src/oneD/Sim1D.cpp +++ b/src/oneD/Sim1D.cpp @@ -95,18 +95,18 @@ void Sim1D::setProfile(size_t dom, size_t comp, } void Sim1D::save(const std::string& fname, const std::string& id, - const std::string& desc, int loglevel) + const std::string& desc, int loglevel, int compression) { size_t dot = fname.find_last_of("."); string extension = (dot != npos) ? toLowerCopy(fname.substr(dot+1)) : ""; if (extension == "h5" || extension == "hdf" || extension == "hdf5") { for (auto dom : m_dom) { auto arr = dom->asArray(m_x.data() + dom->loc()); - arr->writeEntry(fname, id + "/" + dom->id()); + arr->writeEntry(fname, id + "/" + dom->id(), compression); } SolutionArray::writeHeader(fname, id, desc); if (loglevel > 0) { - writelog("Solution saved to file {} as group '{}'.\n", fname, id); + writelog("Solution saved to file '{}' as group '{}'.\n", fname, id); } return; } @@ -128,7 +128,7 @@ void Sim1D::save(const std::string& fname, const std::string& id, out << data.toYamlString(); AnyMap::clearCachedFile(fname); if (loglevel > 0) { - writelog("Solution saved to file {} as entry '{}'.\n", fname, id); + writelog("Solution saved to file '{}' as entry '{}'.\n", fname, id); } return; } diff --git a/test/python/test_onedim.py b/test/python/test_onedim.py index abec738c981..eab41796b84 100644 --- a/test/python/test_onedim.py +++ b/test/python/test_onedim.py @@ -1459,7 +1459,7 @@ def run_save_restore(self, mode): filename.unlink(missing_ok=True) sim = self.solve(phi=0.4, T=300, width=0.05, P=0.1) - sim.save(filename, loglevel=0) + sim.save(filename, loglevel=0, compression=7) gas = ct.Solution("h2o2.yaml") sim2 = ct.CounterflowTwinPremixedFlame(gas=gas) From 85efbc1bddb95fe12b9e4a6d58b677722661ad50 Mon Sep 17 00:00:00 2001 From: Ingmar Schoegl Date: Mon, 5 Dec 2022 12:06:01 -0600 Subject: [PATCH 68/93] Ensure HDF5 can be read by SolutionArray.read_hdf --- interfaces/cython/cantera/composite.py | 45 +++++++++++++++----------- interfaces/cython/cantera/onedim.py | 4 ++- test/python/test_composite.py | 2 +- test/python/test_onedim.py | 14 ++++++++ 4 files changed, 44 insertions(+), 21 deletions(-) diff --git a/interfaces/cython/cantera/composite.py b/interfaces/cython/cantera/composite.py index 25eae072fc3..9a4a71f040f 100644 --- a/interfaces/cython/cantera/composite.py +++ b/interfaces/cython/cantera/composite.py @@ -953,12 +953,14 @@ def join(species): # determine suitable thermo properties for reconstruction basis = 'mass' if self.basis == 'mass' else 'mole' - prop = {'T': ('T'), 'P': ('P'), 'Q': ('Q'), - 'D': ('density', 'density_{}'.format(basis)), - 'U': ('u', 'int_energy_{}'.format(basis)), - 'V': ('v', 'volume_{}'.format(basis)), - 'H': ('h', 'enthalpy_{}'.format(basis)), - 'S': ('s', 'entropy_{}'.format(basis))} + prop = {"T": ("T", "temperature"), + "P": ("P", "pressure"), + "Q": ("Q", "quality"), + "D": ("D", "density", f"density_{basis}"), + "U": ("u", f"int_energy_{basis}"), + "V": ("v", f"volume_{basis}"), + "H": ("h", f"enthalpy_{basis}"), + "S": ("s", f"entropy_{basis}")} for st in states: # identify property specifiers state = [{st[i]: p for p in prop[st[i]] if p in labels} @@ -1413,13 +1415,13 @@ def read_hdf(self, filename, group=None, subgroup=None, force=False, normalize=T root = hdf[group] # identify subgroup - sub_names = [key for key, value in root.items() - if isinstance(value, _h5py.Group)] - if not len(sub_names): - msg = "HDF group '{}' does not contain valid data" - raise IOError(msg.format(group)) - if subgroup is not None: + sub_names = [key for key, value in root.items() + if isinstance(value, _h5py.Group)] + if not len(sub_names): + msg = "HDF group '{}' does not contain valid data" + raise IOError(msg.format(group)) + if subgroup not in sub_names: msg = ("HDF file does not contain data set '{}' within " "group '{}'; available data sets are: {}") @@ -1440,20 +1442,25 @@ def strip_ext(source): return out # ensure that mechanisms are matching - sol_source = strip_ext(dgroup['phase'].attrs['source']) - source = strip_ext(self.source) - if sol_source != source and not force: - msg = ("Sources of thermodynamic phases do not match: '{}' vs " - "'{}'; use option 'force' to override this error.") - raise IOError(msg.format(sol_source, source)) + if "phase" in dgroup: + sol_source = strip_ext(dgroup['phase'].attrs['source']).split("/")[-1] + source = strip_ext(self.source) + if sol_source != source and not force: + msg = ("Sources of thermodynamic phases do not match: '{}' vs " + "'{}'; use option 'force' to override this error.") + raise IOError(msg.format(sol_source, source)) # load metadata self._meta = dict(dgroup.attrs.items()) + for name, value in dgroup.items(): + # support one level of recursion + if isinstance(value, _h5py.Group): + self._meta[name] = dict(value.attrs.items()) # load data data = OrderedDict() for name, value in dgroup.items(): - if name == 'phase': + if isinstance(value, _h5py.Group): continue elif value.dtype.type == np.bytes_: data[name] = np.array(value).astype('U') diff --git a/interfaces/cython/cantera/onedim.py b/interfaces/cython/cantera/onedim.py index 5749a2cc7c6..270b909b28f 100644 --- a/interfaces/cython/cantera/onedim.py +++ b/interfaces/cython/cantera/onedim.py @@ -2,6 +2,7 @@ # at https://cantera.org/license.txt for license and copyright information. from math import erf +from pathlib import Path from email.utils import formatdate import warnings import numpy as np @@ -124,7 +125,8 @@ def set_initial_guess(self, *args, data=None, group=None, **kwargs): # already a solution array arr = data - elif isinstance(data, str): + elif isinstance(data, (str, Path)): + data = str(data) if data.endswith('.hdf5') or data.endswith('.h5'): # data source identifies a HDF file arr = SolutionArray(self.gas, extra=self.other_components()) diff --git a/test/python/test_composite.py b/test/python/test_composite.py index eeaba699597..74f86841113 100644 --- a/test/python/test_composite.py +++ b/test/python/test_composite.py @@ -369,7 +369,7 @@ def test_write_hdf(self): hdf.create_group('spam') c = ct.SolutionArray(self.gas) - with self.assertRaisesRegex(IOError, 'does not contain valid data'): + with self.assertRaisesRegex(ValueError, 'requires a non-empty data dictionary'): c.read_hdf(outfile, group='spam') with self.assertRaisesRegex(IOError, 'does not contain group'): c.read_hdf(outfile, group='eggs') diff --git a/test/python/test_onedim.py b/test/python/test_onedim.py index eab41796b84..5212a551b00 100644 --- a/test/python/test_onedim.py +++ b/test/python/test_onedim.py @@ -1470,6 +1470,20 @@ def run_save_restore(self, mode): sim2.solve(loglevel=0) + @utilities.unittest.skipIf(ct.hdf_support() != {"h5py", "native"}, "h5py and/or HighFive not installed") + def test_backwards_compatibility(self): + filename = self.test_work_path / f"twinflame.h5" + filename.unlink(missing_ok=True) + + sim = self.solve(phi=0.4, T=300, width=0.05, P=0.1) + sim.save(filename, loglevel=0, compression=7) + + # load parts using h5py + for sub, points in {"flame": len(sim.grid), "reactants": 1}.items(): + arr = ct.SolutionArray(ct.Solution("h2o2.yaml")) + arr.read_hdf(filename, "solution", sub) + assert arr.size == points + class TestIonFreeFlame(utilities.CanteraTest): @utilities.slow_test From df3d6f96fc315516fd25575fbbdf9d3eb825da3c Mon Sep 17 00:00:00 2001 From: Ingmar Schoegl Date: Mon, 5 Dec 2022 17:23:53 -0600 Subject: [PATCH 69/93] [Python] Deprecate Sim1D.write_hdf --- interfaces/cython/cantera/onedim.py | 7 +++++++ test/python/test_onedim.py | 4 ++++ 2 files changed, 11 insertions(+) diff --git a/interfaces/cython/cantera/onedim.py b/interfaces/cython/cantera/onedim.py index 270b909b28f..135ba2a3db6 100644 --- a/interfaces/cython/cantera/onedim.py +++ b/interfaces/cython/cantera/onedim.py @@ -563,7 +563,14 @@ def write_hdf(self, filename, *args, group=None, species='X', mode='a', `SolutionArray.collect_data`. The method exports data using `SolutionArray.write_hdf` via `to_solution_array` and requires a working installation of *h5py* (``h5py`` can be installed using pip or conda). + + .. deprecated:: 3.0 + + Method to be removed after Cantera 3.0; replaceable by 'Sim1D.save'. """ + warnings.warn( + "Method to be removed after Cantera 3.0; use 'Sim1D.save' instead.", + DeprecationWarning) cols = ('extra', 'T', 'P', species) meta = self.settings meta['date'] = formatdate(localtime=True) diff --git a/test/python/test_onedim.py b/test/python/test_onedim.py index 5212a551b00..952d3a17160 100644 --- a/test/python/test_onedim.py +++ b/test/python/test_onedim.py @@ -730,10 +730,12 @@ def test_write_csv(self): k = self.gas.species_index('H2') self.assertArrayNear(data.X[:, k], self.sim.X[k, :]) + @pytest.mark.usefixtures("allow_deprecated") @utilities.unittest.skipIf("h5py" not in ct.hdf_support(), "h5py not installed") def test_write_hdf_legacy(self): self.run_freeflame_write_hdf("legacy") + @pytest.mark.usefixtures("allow_deprecated") @utilities.unittest.skipIf(ct.hdf_support() != {"h5py", "native"}, "h5py and/or HighFive not installed") def test_write_hdf_transition(self): self.run_freeflame_write_hdf("transition") @@ -1313,10 +1315,12 @@ def test_reacting_surface_case2(self): def test_reacting_surface_case3(self): self.run_reacting_surface(xch4=0.2, tsurf=800.0, mdot=0.1, width=0.2) + @pytest.mark.usefixtures("allow_deprecated") @utilities.unittest.skipIf("h5py" not in ct.hdf_support(), "h5py not installed") def test_write_hdf_legacy(self): self.run_impingingjet_write("legacy") + @pytest.mark.usefixtures("allow_deprecated") @utilities.unittest.skipIf(ct.hdf_support() != {"h5py", "native"}, "h5py and/or HighFive not installed") def test_write_hdf_transition(self): self.run_impingingjet_write("transition") From 90cb10802b3488e813b6869005f2e2ace7bf42f3 Mon Sep 17 00:00:00 2001 From: Ingmar Schoegl Date: Mon, 5 Dec 2022 17:23:19 -0600 Subject: [PATCH 70/93] [samples] Remove Sim1D.write_hdf from Python samples --- samples/python/onedim/adiabatic_flame.py | 29 +++--- samples/python/onedim/burner_flame.py | 22 +++-- samples/python/onedim/diffusion_flame.py | 16 ++-- .../python/onedim/diffusion_flame_batch.py | 7 +- .../onedim/diffusion_flame_extinction.py | 88 +++++++------------ samples/python/onedim/flame_fixed_T.py | 28 +++--- samples/python/onedim/flame_initial_guess.py | 25 +++--- .../python/onedim/flamespeed_sensitivity.py | 5 +- samples/python/onedim/ion_burner_flame.py | 17 ++-- samples/python/onedim/ion_free_flame.py | 20 +++-- .../onedim/premixed_counterflow_flame.py | 13 ++- .../onedim/premixed_counterflow_twin_flame.py | 24 +++-- samples/python/onedim/stagnation_flame.py | 31 +++---- 13 files changed, 155 insertions(+), 170 deletions(-) diff --git a/samples/python/onedim/adiabatic_flame.py b/samples/python/onedim/adiabatic_flame.py index 7e7d176e21b..20414c29e76 100644 --- a/samples/python/onedim/adiabatic_flame.py +++ b/samples/python/onedim/adiabatic_flame.py @@ -2,13 +2,21 @@ A freely-propagating, premixed hydrogen flat flame with multicomponent transport properties. -Requires: cantera >= 2.5.0 +Requires: cantera >= 3.0 Keywords: combustion, 1D flow, premixed flame, multicomponent transport, saving output """ +from pathlib import Path import cantera as ct + +if "native" in ct.hdf_support(): + output = Path() / "adiabatic_flame.h5" +else: + output = Path() / "adiabatic_flame.yaml" +output.unlink(missing_ok=True) + # Simulation parameters p = ct.one_atm # pressure [Pa] Tin = 300.0 # unburned gas temperature [K] @@ -31,28 +39,17 @@ f.solve(loglevel=loglevel, auto=True) # Solve with the energy equation enabled -try: - # save to HDF container file if h5py is installed - f.write_hdf('adiabatic_flame.h5', group='mix', mode='w', - description='solution with mixture-averaged transport') -except ImportError: - f.save('adiabatic_flame.yaml', 'mix', - 'solution with mixture-averaged transport') +f.save(output, name="mix", description="solution with mixture-averaged transport") f.show_solution() -print('mixture-averaged flamespeed = {0:7f} m/s'.format(f.velocity[0])) +print(f"mixture-averaged flamespeed = {f.velocity[0]:7f} m/s") # Solve with multi-component transport properties f.transport_model = 'Multi' f.solve(loglevel) # don't use 'auto' on subsequent solves f.show_solution() -print('multicomponent flamespeed = {0:7f} m/s'.format(f.velocity[0])) -try: - f.write_hdf('adiabatic_flame.h5', group='multi', - description='solution with multicomponent transport') -except ImportError: - f.save('adiabatic_flame.yaml', 'multi', - 'solution with multicomponent transport') +print(f"multicomponent flamespeed = {f.velocity[0]:7f} m/s") +f.save(output, name="multi", description="solution with multicomponent transport") # write the velocity, temperature, density, and mole fractions to a CSV file f.write_csv('adiabatic_flame.csv', quiet=False) diff --git a/samples/python/onedim/burner_flame.py b/samples/python/onedim/burner_flame.py index 6035bf4926e..776d11a821f 100644 --- a/samples/python/onedim/burner_flame.py +++ b/samples/python/onedim/burner_flame.py @@ -1,13 +1,20 @@ """ A burner-stabilized lean premixed hydrogen-oxygen flame at low pressure. -Requires: cantera >= 2.5.0 +Requires: cantera >= 3.0 Keywords: combustion, 1D flow, premixed flame, saving output, multicomponent transport """ +from pathlib import Path import cantera as ct +if "native" in ct.hdf_support(): + output = Path() / "burner_flame.h5" +else: + output = Path() / "burner_flame.yaml" +output.unlink(missing_ok=True) + p = 0.05 * ct.one_atm tburner = 373.0 mdot = 0.06 @@ -25,20 +32,11 @@ f.transport_model = 'Mix' f.solve(loglevel, auto=True) -try: - # save to HDF container file if h5py is installed - f.write_hdf('burner_flame.h5', group='mix', mode='w', - description='solution with mixture-averaged transport') -except ImportError: - f.save('burner_flame.yaml', 'mix', 'solution with mixture-averaged transport') +f.save(output, name="mix", description="solution with mixture-averaged transport") f.transport_model = 'Multi' f.solve(loglevel) # don't use 'auto' on subsequent solves f.show_solution() -try: - f.write_hdf('burner_flame.h5', group='multi', - description='solution with multicomponent transport') -except ImportError: - f.save('burner_flame.yaml', 'multi', 'solution with multicomponent transport') +f.save(output, name="multi", description="solution with multicomponent transport") f.write_csv('burner_flame.csv', quiet=False) diff --git a/samples/python/onedim/diffusion_flame.py b/samples/python/onedim/diffusion_flame.py index b707300e1da..0960844089c 100644 --- a/samples/python/onedim/diffusion_flame.py +++ b/samples/python/onedim/diffusion_flame.py @@ -1,14 +1,22 @@ """ An opposed-flow ethane/air diffusion flame -Requires: cantera >= 2.5.0, matplotlib >= 2.0 +Requires: cantera >= 3.0, matplotlib >= 2.0 Keywords: combustion, 1D flow, diffusion flame, strained flame, plotting, saving output """ +from pathlib import Path import cantera as ct import matplotlib.pyplot as plt + +if "native" in ct.hdf_support(): + output = Path() / "diffusion_flame.h5" +else: + output = Path() / "diffusion_flame.yaml" +output.unlink(missing_ok=True) + # Input parameters p = ct.one_atm # pressure tin_f = 300.0 # fuel inlet temperature @@ -52,11 +60,7 @@ # Solve the problem f.solve(loglevel, auto=True) f.show_solution() -try: - # save to HDF container file if h5py is installed - f.write_hdf('diffusion_flame.h5', mode='w') -except ImportError: - f.save('diffusion_flame.yaml') +f.save(output) # write the velocity, temperature, and mole fractions to a CSV file f.write_csv('diffusion_flame.csv', quiet=False) diff --git a/samples/python/onedim/diffusion_flame_batch.py b/samples/python/onedim/diffusion_flame_batch.py index ffa78cdd273..248d8d6f6f3 100644 --- a/samples/python/onedim/diffusion_flame_batch.py +++ b/samples/python/onedim/diffusion_flame_batch.py @@ -29,11 +29,13 @@ class FlameExtinguished(Exception): pass -hdf_output = "HighFive" in ct.hdf_support() - output_path = Path() / "diffusion_flame_batch_data" output_path.mkdir(parents=True, exist_ok=True) +hdf_output = "native" in ct.hdf_support() +if hdf_output: + file_name = output_path / "flame_data.h5" + file_name.unlink(missing_ok=True) def names(test): if hdf_output: @@ -44,6 +46,7 @@ def names(test): file_name = output_path / f"{test}.yaml".replace("-", "_").replace("/", "_") return file_name, "solution" + # PART 1: INITIALIZATION # Set up an initial hydrogen-oxygen counterflow flame at 1 bar and low strain diff --git a/samples/python/onedim/diffusion_flame_extinction.py b/samples/python/onedim/diffusion_flame_extinction.py index b6fbcb5bacc..86a05725e26 100644 --- a/samples/python/onedim/diffusion_flame_extinction.py +++ b/samples/python/onedim/diffusion_flame_extinction.py @@ -9,26 +9,34 @@ (doi:10.1155/2014/484372). Please refer to this publication for a detailed explanation. Also, please don't forget to cite it if you make use of it. -Requires: cantera >= 2.5.0, matplotlib >= 2.0 +Requires: cantera >= 3.0, matplotlib >= 2.0 Keywords: combustion, 1D flow, diffusion flame, strained flame, extinction, saving output, plotting """ -import os -import importlib +from pathlib import Path import numpy as np import matplotlib.pyplot as plt import cantera as ct -hdf_output = importlib.util.find_spec('h5py') is not None +output_path = Path() / "diffusion_flame_extinction_data" +output_path.mkdir(parents=True, exist_ok=True) -if not hdf_output: - # Create directory for output data files - data_directory = 'diffusion_flame_extinction_data' - if not os.path.exists(data_directory): - os.makedirs(data_directory) +hdf_output = "native" in ct.hdf_support() +if hdf_output: + file_name = output_path / "flame_data.h5" + file_name.unlink(missing_ok=True) + +def names(test): + if hdf_output: + # use internal container structure for HDF + file_name = output_path / "flame_data.h5" + return file_name, test + # use separate files for YAML + file_name = output_path / f"{test}.yaml".replace("-", "_").replace("/", "_") + return file_name, "solution" # PART 1: INITIALIZATION @@ -61,15 +69,8 @@ print('Creating the initial solution') f.solve(loglevel=0, auto=True) -if hdf_output: - file_name = 'diffusion_flame_extinction.h5' - f.write_hdf(file_name, group='initial_solution', mode='w', quiet=False, - description=('Initial solution')) -else: - # Save to data directory - file_name = 'initial_solution.yaml' - f.save(os.path.join(data_directory, file_name), name='solution', - description="Initial solution") +file_name, entry = names("initial-solution") +f.save(file_name, name=entry, description="Initial solution") # PART 2: COMPUTE EXTINCTION STRAIN @@ -132,33 +133,24 @@ f.solve(loglevel=0) except ct.CanteraError as e: print('Error: Did not converge at n =', n, e) + + T_max.append(np.max(f.T)) + a_max.append(np.max(np.abs(np.gradient(f.velocity) / np.gradient(f.grid)))) if not np.isclose(np.max(f.T), temperature_limit_extinction): # Flame is still burning, so proceed to next strain rate n_last_burning = n - if hdf_output: - group = 'extinction/{0:04d}'.format(n) - f.write_hdf(file_name, group=group, quiet=True) - else: - file_name = 'extinction_{0:04d}.yaml'.format(n) - f.save(os.path.join(data_directory, file_name), - name='solution', loglevel=0, - description=f"Solution at alpha = {alpha[-1]}") - T_max.append(np.max(f.T)) - a_max.append(np.max(np.abs(np.gradient(f.velocity) / np.gradient(f.grid)))) + file_name, entry = names(f"extinction/{n:04d}") + f.save(file_name, name=entry, description=f"Solution at alpha = {alpha[-1]}") + print('Flame burning at alpha = {:8.4F}. Proceeding to the next iteration, ' 'with delta_alpha = {}'.format(alpha[-1], delta_alpha)) elif ((T_max[-2] - T_max[-1] < delta_T_min) and (delta_alpha < delta_alpha_min)): # If the temperature difference is too small and the minimum relative # strain rate increase is reached, save the last, non-burning, solution # to the output file and break the loop - T_max.append(np.max(f.T)) - a_max.append(np.max(np.abs(np.gradient(f.velocity) / np.gradient(f.grid)))) - if hdf_output: - group = 'extinction/{0:04d}'.format(n) - f.write_hdf(file_name, group=group, quiet=True) - else: - file_name = 'extinction_{0:04d}.yaml'.format(n) - f.save(os.path.join(data_directory, file_name), name='solution', loglevel=0) + file_name, entry = names(f"extinction/{n:04d}") + f.save(file_name, name=entry, description=f"Flame extinguished at alpha={alpha[-1]}") + print('Flame extinguished at alpha = {0:8.4F}.'.format(alpha[-1]), 'Abortion criterion satisfied.') break @@ -172,24 +164,15 @@ alpha[-1], alpha[n_last_burning], delta_alpha)) # Restore last burning solution - if hdf_output: - group = 'extinction/{0:04d}'.format(n_last_burning) - f.read_hdf(file_name, group=group) - else: - file_name = 'extinction_{0:04d}.yaml'.format(n_last_burning) - f.restore(os.path.join(data_directory, file_name), - name='solution', loglevel=0) + file_name, entry = names(f"extinction/{n_last_burning:04d}") + f.restore(file_name, entry, loglevel=0) # Print some parameters at the extinction point, after restoring the last burning # solution -if hdf_output: - group = 'extinction/{0:04d}'.format(n_last_burning) - f.read_hdf(file_name, group=group) -else: - file_name = 'extinction_{0:04d}.yaml'.format(n_last_burning) - f.restore(os.path.join(data_directory, file_name), - name='solution', loglevel=0) +file_name, entry = names(f"extinction/{n_last_burning:04d}") +f.restore(file_name, entry, loglevel=0) + print('----------------------------------------------------------------------') print('Parameters at the extinction point:') print('Pressure p={0} bar'.format(f.P / 1e5)) @@ -208,7 +191,4 @@ plt.semilogx(a_max, T_max) plt.xlabel(r'$a_{max}$ [1/s]') plt.ylabel(r'$T_{max}$ [K]') -if hdf_output: - plt.savefig('diffusion_flame_extinction_T_max_a_max.png') -else: - plt.savefig(os.path.join(data_directory, 'figure_T_max_a_max.png')) +plt.savefig(output_path / "figure_T_max_a_max.png") diff --git a/samples/python/onedim/flame_fixed_T.py b/samples/python/onedim/flame_fixed_T.py index a9abda6701e..bc2ca95491f 100644 --- a/samples/python/onedim/flame_fixed_T.py +++ b/samples/python/onedim/flame_fixed_T.py @@ -2,14 +2,21 @@ A burner-stabilized, premixed methane/air flat flame with multicomponent transport properties and a specified temperature profile. -Requires: cantera >= 2.5.0 +Requires: cantera >= 3.0 Keywords: combustion, 1D flow, burner-stabilized flame, premixed flame, plotting, saving output """ -import cantera as ct -import numpy as np from pathlib import Path +import numpy as np +import cantera as ct + + +if "native" in ct.hdf_support(): + output = Path() / "flame_fixed_T.h5" +else: + output = Path() / "flame_fixed_T.yaml" +output.unlink(missing_ok=True) ################################################################ # parameter values @@ -60,25 +67,14 @@ f.set_refine_criteria(ratio=3.0, slope=0.3, curve=1) f.solve(loglevel, refine_grid) -try: - # save to HDF container file if h5py is installed - f.write_hdf('flame_fixed_T.h5', group='mix', mode='w', - description='solution with mixture-averaged transport') -except ImportError: - f.save('flame_fixed_T.yaml','mixav', - 'solution with mixture-averaged transport') +f.save(output, name="mix", description="solution with mixture-averaged transport") print('\n\n switching to multicomponent transport...\n\n') f.transport_model = 'Multi' f.set_refine_criteria(ratio=3.0, slope=0.1, curve=0.2) f.solve(loglevel, refine_grid) -try: - f.write_hdf('flame_fixed_T.h5', group='multi', - description='solution with multicomponent transport') -except ImportError: - f.save('flame_fixed_T.yaml','multi', - 'solution with multicomponent transport') +f.save(output, name="multi", description="solution with multicomponent transport") # write the velocity, temperature, density, and mole fractions to a CSV file f.write_csv('flame_fixed_T.csv', quiet=False) diff --git a/samples/python/onedim/flame_initial_guess.py b/samples/python/onedim/flame_initial_guess.py index 704da5d9a47..a26256c88cc 100644 --- a/samples/python/onedim/flame_initial_guess.py +++ b/samples/python/onedim/flame_initial_guess.py @@ -6,16 +6,16 @@ Requires: cantera >= 3.0 Keywords: combustion, 1D flow, flame speed, premixed flame, saving output """ -import os import sys +from pathlib import Path import cantera as ct try: import pandas as pd except ImportError: pd = None -data_directory = "flame_initial_guess_data" -os.makedirs(data_directory, exist_ok=True) +output_path = Path() / "flame_initial_guess_data" +output_path.mkdir(parents=True, exist_ok=True) # Simulation parameters p = ct.one_atm # pressure [Pa] @@ -53,25 +53,20 @@ def describe(flame): # Save the flame in a few different formats print("Save YAML") -yaml_filepath = os.path.join(data_directory, "flame.yaml") +yaml_filepath = output_path / "flame.yaml" f.save(yaml_filepath, name="solution", description="Initial methane flame") print("Save CSV") -csv_filepath = os.path.join(data_directory, "flame.csv") +csv_filepath = output_path / "flame.csv" f.write_csv(csv_filepath) try: # HDF is not a required dependency - hdf_filepath = os.path.join(data_directory, "flame.h5") - f.write_hdf( - hdf_filepath, - group="freeflame", - mode="w", - quiet=False, - description=("Initial methane flame"), - ) + hdf_filepath = output_path / "flame.h5" + hdf_filepath.unlink(missing_ok=True) + f.save(hdf_filepath, name="freeflame", description=("Initial methane flame")) print("Save HDF\n") -except ImportError as err: +except ct.CanteraError as err: print(f"Skipping HDF: {err}\n") hdf_filepath = None @@ -87,7 +82,7 @@ def describe(flame): print("Restore solution from HDF") gas.TPX = Tin, p, reactants f2 = ct.FreeFlame(gas, width=width) - f2.read_hdf(hdf_filepath, group="freeflame") + f2.restore(hdf_filepath, name="freeflame", loglevel=0) describe(f2) # Restore the flame via initial guess diff --git a/samples/python/onedim/flamespeed_sensitivity.py b/samples/python/onedim/flamespeed_sensitivity.py index d8e4de40793..be76729a1c8 100644 --- a/samples/python/onedim/flamespeed_sensitivity.py +++ b/samples/python/onedim/flamespeed_sensitivity.py @@ -25,7 +25,7 @@ f.set_refine_criteria(ratio=3, slope=0.07, curve=0.14) f.solve(loglevel=1, auto=True) -print('\nmixture-averaged flamespeed = {:7f} m/s\n'.format(f.velocity[0])) +print(f"\nmixture-averaged flamespeed = {f.velocity[0]:7f} m/s\n") # Use the adjoint method to calculate sensitivities sens = f.get_flame_speed_reaction_sensitivities() @@ -34,5 +34,4 @@ print('Rxn # k/S*dS/dk Reaction Equation') print('----- ---------- ----------------------------------') for m in range(gas.n_reactions): - print('{: 5d} {: 10.3e} {}'.format( - m, sens[m], gas.reaction(m).equation)) + print(f"{m: 5d} {sens[m]: 10.3e} {gas.reaction(m).equation}") diff --git a/samples/python/onedim/ion_burner_flame.py b/samples/python/onedim/ion_burner_flame.py index 2713445f03c..67ec54e2eb4 100644 --- a/samples/python/onedim/ion_burner_flame.py +++ b/samples/python/onedim/ion_burner_flame.py @@ -1,12 +1,20 @@ """ A burner-stabilized premixed methane-air flame with charged species. -Requires: cantera >= 2.5.0 +Requires: cantera >= 3.0 Keywords: combustion, 1D flow, burner-stabilized flame, plasma, premixed flame """ +from pathlib import Path import cantera as ct + +if "native" in ct.hdf_support(): + output = Path() / "ion_burner_flame.h5" +else: + output = Path() / "ion_burner_flame.yaml" +output.unlink(missing_ok=True) + p = ct.one_atm tburner = 600.0 reactants = 'CH4:1.0, O2:2.0, N2:7.52' # premixed gas composition @@ -25,11 +33,6 @@ f.transport_model = 'Ion' f.solve(loglevel, auto=True) f.solve(loglevel=loglevel, stage=2, enable_energy=True) -try: - # save to HDF container file if h5py is installed - f.write_hdf('ion_burner_flame.h5', group='ion', mode='w', - description='solution with ionized gas transport') -except ImportError: - f.save('ion_burner_flame.yaml', 'mix', 'solution with mixture-averaged transport') +f.save(output, name="mix", description="solution with mixture-averaged transport") f.write_csv('ion_burner_flame.csv', quiet=False) diff --git a/samples/python/onedim/ion_free_flame.py b/samples/python/onedim/ion_free_flame.py index df53cadfe8e..15677da9919 100644 --- a/samples/python/onedim/ion_free_flame.py +++ b/samples/python/onedim/ion_free_flame.py @@ -1,12 +1,20 @@ """ A freely-propagating, premixed methane-air flat flame with charged species. -Requires: cantera >= 2.5.0 +Requires: cantera >= 3.0 Keywords: combustion, 1D flow, burner-stabilized flame, plasma, premixed flame """ +from pathlib import Path import cantera as ct + +if "native" in ct.hdf_support(): + output = Path() / "ion_free_flame.h5" +else: + output = Path() / "ion_free_flame.yaml" +output.unlink(missing_ok=True) + # Simulation parameters p = ct.one_atm # pressure [Pa] Tin = 300.0 # unburned gas temperature [K] @@ -29,16 +37,10 @@ # stage two f.solve(loglevel=loglevel, stage=2, enable_energy=True) - -try: - # save to HDF container file if h5py is installed - f.write_hdf('ion_free_flame.h5', group='ion', mode='w', - description='solution with ionized gas transport') -except ImportError: - f.save('ion_free_flame.yaml', 'ion', 'solution with ionized gas transport') +f.save(output, name="ion", description="solution with ionized gas transport") f.show_solution() -print('mixture-averaged flamespeed = {0:7f} m/s'.format(f.velocity[0])) +print(f"mixture-averaged flamespeed = {f.velocity[0]:7f} m/s") # write the velocity, temperature, density, and mole fractions to a CSV file f.write_csv('ion_free_flame.csv', quiet=False) diff --git a/samples/python/onedim/premixed_counterflow_flame.py b/samples/python/onedim/premixed_counterflow_flame.py index 0e4e48dbc79..cb4023a0d5e 100644 --- a/samples/python/onedim/premixed_counterflow_flame.py +++ b/samples/python/onedim/premixed_counterflow_flame.py @@ -4,12 +4,20 @@ This script simulates a lean hydrogen-oxygen flame stabilized in a strained flowfield, with an opposed flow consisting of equilibrium products. -Requires: cantera >= 2.5.0 +Requires: cantera >= 3.0 Keywords: combustion, 1D flow, premixed flame, strained flame """ +from pathlib import Path import cantera as ct + +if "native" in ct.hdf_support(): + output = Path() / "premixed_counterflow_flame.h5" +else: + output = Path() / "premixed_counterflow_flame.yaml" +output.unlink(missing_ok=True) + # parameter values p = 0.05 * ct.one_atm # pressure T_in = 373.0 # inlet temperature @@ -41,8 +49,9 @@ sim.show_solution() sim.solve(loglevel, auto=True) +sim.save(output, name="mix", description="solution with mixture-averaged transport") # write the velocity, temperature, and mole fractions to a CSV file -sim.write_csv('premixed_counterflow.csv', quiet=False) +sim.write_csv("premixed_counterflow_flame.csv", quiet=False) sim.show_stats() sim.show_solution() diff --git a/samples/python/onedim/premixed_counterflow_twin_flame.py b/samples/python/onedim/premixed_counterflow_twin_flame.py index e04386b005d..cb3d95bceab 100644 --- a/samples/python/onedim/premixed_counterflow_twin_flame.py +++ b/samples/python/onedim/premixed_counterflow_twin_flame.py @@ -1,17 +1,16 @@ -# coding: utf-8 - """ Simulate two counter-flow jets of reactants shooting into each other. This simulation differs from the similar premixed_counterflow_flame.py example as the latter simulates a jet of reactants shooting into products. -Requires: cantera >= 2.5.0 +Requires: cantera >= 3.0 Keywords: combustion, 1D flow, premixed flame, strained flame, plotting """ -import cantera as ct -import numpy as np import sys +from pathlib import Path +import numpy as np +import cantera as ct # Differentiation function for data that has variable grid spacing Used here to @@ -81,6 +80,12 @@ def solveOpposedFlame(oppFlame, massFlux=0.12, loglevel=1, return np.max(oppFlame.T), K, strainRatePoint +if "native" in ct.hdf_support(): + output = Path() / "premixed_counterflow_twin_flame.h5" +else: + output = Path() / "premixed_counterflow_twin_flame.yaml" +output.unlink(missing_ok=True) + # Select the reaction mechanism gas = ct.Solution('gri30.yaml') @@ -116,11 +121,12 @@ def solveOpposedFlame(oppFlame, massFlux=0.12, loglevel=1, # Thus to plot temperature vs distance, use oppFlame.grid and oppFlame.T Sc = computeConsumptionSpeed(oppFlame) +oppFlame.save(output, name="mix") -print("Peak temperature: {0:.1f} K".format(T)) -print("Strain Rate: {0:.1f} 1/s".format(K)) -print("Consumption Speed: {0:.2f} cm/s".format(Sc*100)) -oppFlame.write_csv("premixed_twin_flame.csv", quiet=False) +print(f"Peak temperature: {T:.1f} K") +print(f"Strain Rate: {K:.1f} 1/s") +print(f"Consumption Speed: {Sc * 100:.2f} cm/s") +oppFlame.write_csv("premixed_counterflow_twin_flame.csv", quiet=False) # Generate plots to see results, if user desires if '--plot' in sys.argv: diff --git a/samples/python/onedim/stagnation_flame.py b/samples/python/onedim/stagnation_flame.py index 028fd9b7f3b..b05e4f208d6 100644 --- a/samples/python/onedim/stagnation_flame.py +++ b/samples/python/onedim/stagnation_flame.py @@ -14,17 +14,22 @@ points would be concentrated upsteam of the flame, where the flamefront had been previously. (To see this, try setting prune to zero.) -Requires: cantera >= 2.5.0 +Requires: cantera >= 3.0 Keywords: combustion, 1D flow, premixed flame, strained flame """ -import os -import importlib - +from pathlib import Path import cantera as ct -hdf_output = importlib.util.find_spec('h5py') is not None +output_path = Path() / "stagnation_flame_data" +output_path.mkdir(parents=True, exist_ok=True) + +if "native" in ct.hdf_support(): + output = output_path / "stagnation_flame.h5" +else: + output = output_path / "stagnation_flame.yaml" +output.unlink(missing_ok=True) # parameter values p = 0.05 * ct.one_atm # pressure @@ -74,24 +79,12 @@ sim.solve(loglevel, auto=True) -if hdf_output: - outfile = 'stagnation_flame.h5' -else: - outfile = 'stagnation_flame.yaml' -if os.path.exists(outfile): - os.remove(outfile) - for m, md in enumerate(mdot): sim.inlet.mdot = md sim.solve(loglevel) - if hdf_output: - sim.write_hdf(outfile, group='mdot{0}'.format(m), - description='mdot = {0} kg/m2/s'.format(md)) - else: - sim.save(outfile, 'mdot{0}'.format(m), - 'mdot = {0} kg/m2/s'.format(md)) + sim.save(output, name=f"mdot-{m}", description=f"mdot = {md} kg/m2/s") # write the velocity, temperature, and mole fractions to a CSV file - sim.write_csv('stagnation_flame_{0}.csv'.format(m), quiet=False) + sim.write_csv(output_path / f"stagnation_flame_{m}.csv", quiet=False) sim.show_stats() From 9ac0f056958fea8f1848023ab2e5449850a9a897 Mon Sep 17 00:00:00 2001 From: Ingmar Schoegl Date: Tue, 6 Dec 2022 08:02:40 -0600 Subject: [PATCH 71/93] [UnitTest] Skip PythonExtensibleRate if HDF5 is used --- test/kinetics/kineticsFromYaml.cpp | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/test/kinetics/kineticsFromYaml.cpp b/test/kinetics/kineticsFromYaml.cpp index f5e2970f869..078ee1606b8 100644 --- a/test/kinetics/kineticsFromYaml.cpp +++ b/test/kinetics/kineticsFromYaml.cpp @@ -518,6 +518,10 @@ TEST(Reaction, PythonExtensibleRate) #ifndef CT_HAS_PYTHON GTEST_SKIP(); #endif + #ifdef CT_USE_HDF5 + // potential mismatch of HDF libraries between h5py and HighFive + GTEST_SKIP(); + #endif auto sol = newSolution("extensible-reactions.yaml"); auto R = sol->kinetics()->reaction(0); EXPECT_EQ(R->type(), "square-rate"); From 0aca5f7004ab2d2ce679a316638729b5c25a5f1e Mon Sep 17 00:00:00 2001 From: Ingmar Schoegl Date: Thu, 1 Dec 2022 15:03:38 -0600 Subject: [PATCH 72/93] [CI] Test HighFive installation routes --- .github/workflows/main.yml | 78 ++++++++++++++++++++++++++++---------- 1 file changed, 59 insertions(+), 19 deletions(-) diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index 531e08d8906..b3b1b9ea5bc 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -28,6 +28,9 @@ jobs: python-version: ['3.8', '3.10', '3.11'] os: ['ubuntu-20.04', 'ubuntu-22.04'] fail-fast: false + env: + HDF5_LIBDIR: /usr/lib/x86_64-linux-gnu/hdf5/serial + HDF5_INCLUDEDIR: /usr/include/hdf5/serial steps: - uses: actions/checkout@v2 name: Checkout the repository @@ -42,15 +45,19 @@ jobs: run: | sudo apt update sudo apt install libboost-dev gfortran libopenmpi-dev libpython3-dev \ - libblas-dev liblapack-dev + libblas-dev liblapack-dev libhdf5-dev - name: Upgrade pip run: python3 -m pip install -U pip setuptools wheel - name: Install Python dependencies + # h5py is optional; some versions don't have binaries (yet) run: | - python3 -m pip install ruamel.yaml scons==3.1.2 numpy cython pandas pytest pytest-github-actions-annotate-failures - python3 -m pip install h5py || true + python3 -m pip install ruamel.yaml scons==3.1.2 numpy cython pandas pytest \ + pytest-github-actions-annotate-failures + python3 -m pip install h5py || python3 -m pip install --no-binary=h5py h5py - name: Build Cantera - run: python3 `which scons` build env_vars=all -j2 debug=n --debug=time + run: | + python3 `which scons` build env_vars=all -j2 debug=n --debug=time \ + hdf_libdir=$HDF5_LIBDIR hdf_include=$HDF5_INCLUDEDIR - name: Upload shared library uses: actions/upload-artifact@v3 if: matrix.python-version == '3.10' && matrix.os == 'ubuntu-22.04' @@ -66,6 +73,9 @@ jobs: name: LLVM/Clang with Python 3.8 runs-on: ubuntu-22.04 timeout-minutes: 60 + env: + HDF5_LIBDIR: /usr/lib/x86_64-linux-gnu/hdf5/serial + HDF5_INCLUDEDIR: /usr/include/hdf5/serial steps: - uses: actions/checkout@v2 name: Checkout the repository @@ -79,16 +89,17 @@ jobs: - name: Install Apt dependencies run: | sudo apt update - sudo apt install libboost-dev gfortran libomp-dev libomp5 libopenblas-dev + sudo apt install libboost-dev gfortran libomp-dev libomp5 libopenblas-dev libhdf5-dev - name: Upgrade pip run: python3 -m pip install -U pip setuptools wheel - name: Install Python dependencies - run: python3 -m pip install ruamel.yaml scons numpy cython h5py pandas pytest - pytest-github-actions-annotate-failures + run: | + python3 -m pip install ruamel.yaml scons numpy cython pandas pytest pytest-github-actions-annotate-failures + python3 -m pip install --no-binary=h5py h5py - name: Build Cantera run: python3 `which scons` build env_vars=all CXX=clang++-12 CC=clang-12 f90_interface=n extra_lib_dirs=/usr/lib/llvm/lib - -j2 debug=n --debug=time + -j2 debug=n --debug=time hdf_libdir=$HDF5_LIBDIR hdf_include=$HDF5_INCLUDEDIR - name: Test Cantera run: python3 `which scons` test show_long_tests=yes verbose_tests=yes --debug=time @@ -122,7 +133,7 @@ jobs: python-version: 3.11 if: matrix.python-version == '3.11' - name: Install Brew dependencies - run: brew install boost libomp + run: brew install boost libomp hdf5 - name: Setup Homebrew Python # This path should work for future Python versions as well if: matrix.python-version != '3.11' @@ -162,6 +173,9 @@ jobs: name: Coverage runs-on: ubuntu-latest timeout-minutes: 90 + env: + HDF5_LIBDIR: /usr/lib/x86_64-linux-gnu/hdf5/serial + HDF5_INCLUDEDIR: /usr/include/hdf5/serial steps: - uses: actions/checkout@v2 name: Checkout the repository @@ -175,12 +189,14 @@ jobs: - name: Install Apt dependencies run: | sudo apt update - sudo apt install libboost-dev gfortran liblapack-dev libblas-dev libsundials-dev + sudo apt install libboost-dev gfortran liblapack-dev libblas-dev libsundials-dev libhdf5-dev - name: Upgrade pip run: python3 -m pip install -U pip setuptools wheel - name: Install Python dependencies - run: python3 -m pip install ruamel.yaml scons numpy cython h5py pandas scipy pytest + run: | + python3 -m pip install ruamel.yaml scons numpy cython pandas scipy pytest \ pytest-github-actions-annotate-failures pytest-cov gcovr + python3 -m pip install --no-binary=h5py h5py - name: Setup .NET Core SDK uses: actions/setup-dotnet@v2 with: @@ -189,7 +205,8 @@ jobs: run: | python3 `which scons` build blas_lapack_libs=lapack,blas coverage=y \ optimize=n skip_slow_tests=y no_optimize_flags='-DNDEBUG -O0' \ - FORTRANFLAGS='-O0' env_vars=all -j2 --debug=time + FORTRANFLAGS='-O0' env_vars=all -j2 --debug=time \ + hdf_libdir=$HDF5_LIBDIR hdf_include=$HDF5_INCLUDEDIR - name: Test Cantera run: python3 `which scons` test show_long_tests=yes verbose_tests=yes --debug=time @@ -327,6 +344,9 @@ jobs: matrix: python-version: ['3.8', '3.10', '3.11'] fail-fast: false + env: + HDF5_LIBDIR: /usr/lib/x86_64-linux-gnu/hdf5/serial + HDF5_INCLUDEDIR: /usr/include/hdf5/serial steps: - uses: actions/checkout@v2 name: Checkout the repository @@ -340,17 +360,26 @@ jobs: - name: Install Apt dependencies run: | sudo apt update - sudo apt install libboost-dev gfortran graphviz liblapack-dev libblas-dev gcc-9 g++-9 + sudo apt install libboost-dev gfortran graphviz liblapack-dev libblas-dev \ + gcc-9 g++-9 libhdf5-dev - name: Upgrade pip run: python3 -m pip install -U pip setuptools wheel - name: Install Python dependencies run: | python3 -m pip install ruamel.yaml scons numpy cython pandas matplotlib scipy - python3 -m pip install h5py || true + python3 -m pip install --no-binary=h5py h5py - name: Build Cantera # compile with GCC 9.4.0 on ubuntu-20.04 as an alternative to the default # (GCC 7.5.0 is both default and oldest supported version) run: python3 `which scons` build -j2 debug=n CC=gcc-9 CXX=g++-9 + if: matrix.python-version != '3.10' + - name: Build Cantera (Python 3.10 with HDF) + # compile with GCC 9.4.0 on ubuntu-20.04 as an alternative to the default + # (GCC 7.5.0 is both default and oldest supported version) + run: | + python3 `which scons` build -j2 debug=n CC=gcc-9 CXX=g++-9 \ + hdf_libdir=$HDF5_LIBDIR hdf_include=$HDF5_INCLUDEDIR + if: matrix.python-version == '3.10' - name: Run the examples # See https://unix.stackexchange.com/a/392973 for an explanation of the -exec part run: | @@ -406,7 +435,7 @@ jobs: - name: Build Cantera run: | scons build system_fmt=y system_eigen=y system_yamlcpp=y system_sundials=y \ - blas_lapack_libs='lapack,blas' -j2 logging=debug debug=n \ + system_highfive=y blas_lapack_libs='lapack,blas' -j2 logging=debug debug=n \ optimize_flags='-O3 -ffast-math -fno-finite-math-only' - name: Test Cantera run: scons test show_long_tests=yes verbose_tests=yes @@ -561,6 +590,8 @@ jobs: env: INTEL_REPO: https://apt.repos.intel.com INTEL_KEY: GPG-PUB-KEY-INTEL-SW-PRODUCTS-2023.PUB + HDF5_LIBDIR: /usr/lib/x86_64-linux-gnu/hdf5/serial + HDF5_INCLUDEDIR: /usr/include/hdf5/serial steps: - name: Intel Apt repository timeout-minutes: 1 @@ -574,7 +605,7 @@ jobs: timeout-minutes: 5 run: | sudo apt-get install intel-oneapi-compiler-fortran intel-oneapi-compiler-dpcpp-cpp-and-cpp-classic \ - intel-oneapi-mpi intel-oneapi-mpi-devel intel-oneapi-mkl ninja-build libboost-dev + intel-oneapi-mpi intel-oneapi-mpi-devel intel-oneapi-mkl ninja-build libboost-dev libhdf5-dev - uses: actions/checkout@v2 name: Checkout the repository with: @@ -587,17 +618,20 @@ jobs: - name: Upgrade pip run: python3 -m pip install -U pip setuptools wheel - name: Install Python dependencies - run: python3 -m pip install ruamel.yaml scons numpy cython h5py pandas pytest + run: | + python3 -m pip install ruamel.yaml scons numpy cython pandas pytest \ pytest-github-actions-annotate-failures + python3 -m pip install --no-binary=h5py h5py - name: Setup Intel oneAPI environment run: | source /opt/intel/oneapi/setvars.sh printenv >> $GITHUB_ENV - name: Build Cantera run: python3 `which scons` build env_vars=all CC=icx CXX=icpx -j2 debug=n + hdf_libdir=$HDF5_LIBDIR hdf_include=$HDF5_INCLUDEDIR --debug=time f90_interface=n # FORTRAN=ifx - name: Test Cantera - run: + run: | python3 `which scons` test show_long_tests=yes verbose_tests=yes --debug=time linux-intel-oneapi-classic: @@ -719,9 +753,15 @@ jobs: - name: Install Python dependencies run: python3 -m pip install ruamel.yaml - name: Install library dependencies with Conda (Windows) - run: mamba install -q yaml-cpp mkl + run: mamba install -q yaml-cpp mkl highfive shell: pwsh if: matrix.os == 'windows-2022' + - name: Install Brew dependencies (macOS) + run: brew install hdf5 + if: matrix.os == 'macos-11' + - name: Install Apt dependencies (Ubuntu) + run: sudo apt install libhdf5-dev + if: matrix.os == 'ubuntu-22.04' - name: Setup .NET Core SDK uses: actions/setup-dotnet@v2 with: From f550faa477ec79f1e02117f1afa774f2a79175b7 Mon Sep 17 00:00:00 2001 From: Ray Speth Date: Tue, 20 Dec 2022 16:10:51 -0500 Subject: [PATCH 73/93] [Python] Defer import of h5py until time of use This avoids potential conflicts between the versions of libhdf5 linked to the Cantera library and h5py, which could occur when a C++ main application made use of the Python ExtensibleRate class. --- .github/workflows/main.yml | 14 ++++------ interfaces/cython/cantera/composite.py | 37 +++++++++++++------------- test/kinetics/kineticsFromYaml.cpp | 4 --- test/python/test_composite.py | 21 ++++++++++----- test/python/test_onedim.py | 2 -- 5 files changed, 38 insertions(+), 40 deletions(-) diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index b3b1b9ea5bc..5b54b98e7df 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -53,7 +53,7 @@ jobs: run: | python3 -m pip install ruamel.yaml scons==3.1.2 numpy cython pandas pytest \ pytest-github-actions-annotate-failures - python3 -m pip install h5py || python3 -m pip install --no-binary=h5py h5py + python3 -m pip install h5py - name: Build Cantera run: | python3 `which scons` build env_vars=all -j2 debug=n --debug=time \ @@ -94,8 +94,7 @@ jobs: run: python3 -m pip install -U pip setuptools wheel - name: Install Python dependencies run: | - python3 -m pip install ruamel.yaml scons numpy cython pandas pytest pytest-github-actions-annotate-failures - python3 -m pip install --no-binary=h5py h5py + python3 -m pip install ruamel.yaml scons numpy cython pandas h5py pytest pytest-github-actions-annotate-failures - name: Build Cantera run: python3 `which scons` build env_vars=all CXX=clang++-12 CC=clang-12 f90_interface=n extra_lib_dirs=/usr/lib/llvm/lib @@ -194,9 +193,8 @@ jobs: run: python3 -m pip install -U pip setuptools wheel - name: Install Python dependencies run: | - python3 -m pip install ruamel.yaml scons numpy cython pandas scipy pytest \ + python3 -m pip install ruamel.yaml scons numpy cython pandas scipy pytest h5py \ pytest-github-actions-annotate-failures pytest-cov gcovr - python3 -m pip install --no-binary=h5py h5py - name: Setup .NET Core SDK uses: actions/setup-dotnet@v2 with: @@ -366,8 +364,7 @@ jobs: run: python3 -m pip install -U pip setuptools wheel - name: Install Python dependencies run: | - python3 -m pip install ruamel.yaml scons numpy cython pandas matplotlib scipy - python3 -m pip install --no-binary=h5py h5py + python3 -m pip install ruamel.yaml scons numpy cython pandas matplotlib scipy h5py - name: Build Cantera # compile with GCC 9.4.0 on ubuntu-20.04 as an alternative to the default # (GCC 7.5.0 is both default and oldest supported version) @@ -619,9 +616,8 @@ jobs: run: python3 -m pip install -U pip setuptools wheel - name: Install Python dependencies run: | - python3 -m pip install ruamel.yaml scons numpy cython pandas pytest \ + python3 -m pip install ruamel.yaml scons numpy cython pandas h5py pytest \ pytest-github-actions-annotate-failures - python3 -m pip install --no-binary=h5py h5py - name: Setup Intel oneAPI environment run: | source /opt/intel/oneapi/setvars.sh diff --git a/interfaces/cython/cantera/composite.py b/interfaces/cython/cantera/composite.py index 9a4a71f040f..2b26c3c4bf9 100644 --- a/interfaces/cython/cantera/composite.py +++ b/interfaces/cython/cantera/composite.py @@ -7,17 +7,20 @@ from collections import OrderedDict import csv as _csv -import pkg_resources - -# avoid explicit dependence of cantera on h5py -try: - pkg_resources.get_distribution('h5py') -except pkg_resources.DistributionNotFound: - _h5py = ImportError('Method requires a working h5py installation.') -else: - import h5py as _h5py +def _import_h5py(): + # avoid explicit dependence of cantera on h5py + import pkg_resources # local import to reduce overall import time + try: + pkg_resources.get_distribution('h5py') + except pkg_resources.DistributionNotFound: + raise ImportError('Method requires a working h5py installation.') + else: + import h5py + return h5py # avoid explicit dependence of cantera on pandas +import pkg_resources + try: pkg_resources.get_distribution('pandas') except pkg_resources.DistributionNotFound: @@ -1311,8 +1314,7 @@ def write_hdf(self, filename, *args, cols=None, group=None, subgroup=None, requires a working installation of *h5py* (``h5py`` can be installed using pip or conda). """ - if isinstance(_h5py, ImportError): - raise _h5py + h5py = _import_h5py() # collect data data = self.collect_data(*args, cols=cols, **kwargs) @@ -1322,7 +1324,7 @@ def write_hdf(self, filename, *args, cols=None, group=None, subgroup=None, hdf_kwargs = {k: v for k, v in hdf_kwargs.items() if v is not None} # save to container file - with _h5py.File(filename, mode) as hdf: + with h5py.File(filename, mode) as hdf: # check existence of tagged item if not group: @@ -1395,10 +1397,9 @@ def read_hdf(self, filename, group=None, subgroup=None, force=False, normalize=T The method imports data using `restore_data` and requires a working installation of *h5py* (``h5py`` can be installed using pip or conda). """ - if isinstance(_h5py, ImportError): - raise _h5py + h5py = _import_h5py() - with _h5py.File(filename, 'r') as hdf: + with h5py.File(filename, 'r') as hdf: groups = list(hdf.keys()) if not len(groups): @@ -1417,7 +1418,7 @@ def read_hdf(self, filename, group=None, subgroup=None, force=False, normalize=T # identify subgroup if subgroup is not None: sub_names = [key for key, value in root.items() - if isinstance(value, _h5py.Group)] + if isinstance(value, h5py.Group)] if not len(sub_names): msg = "HDF group '{}' does not contain valid data" raise IOError(msg.format(group)) @@ -1454,13 +1455,13 @@ def strip_ext(source): self._meta = dict(dgroup.attrs.items()) for name, value in dgroup.items(): # support one level of recursion - if isinstance(value, _h5py.Group): + if isinstance(value, h5py.Group): self._meta[name] = dict(value.attrs.items()) # load data data = OrderedDict() for name, value in dgroup.items(): - if isinstance(value, _h5py.Group): + if isinstance(value, h5py.Group): continue elif value.dtype.type == np.bytes_: data[name] = np.array(value).astype('U') diff --git a/test/kinetics/kineticsFromYaml.cpp b/test/kinetics/kineticsFromYaml.cpp index 078ee1606b8..f5e2970f869 100644 --- a/test/kinetics/kineticsFromYaml.cpp +++ b/test/kinetics/kineticsFromYaml.cpp @@ -518,10 +518,6 @@ TEST(Reaction, PythonExtensibleRate) #ifndef CT_HAS_PYTHON GTEST_SKIP(); #endif - #ifdef CT_USE_HDF5 - // potential mismatch of HDF libraries between h5py and HighFive - GTEST_SKIP(); - #endif auto sol = newSolution("extensible-reactions.yaml"); auto R = sol->kinetics()->reaction(0); EXPECT_EQ(R->type(), "square-rate"); diff --git a/test/python/test_composite.py b/test/python/test_composite.py index 74f86841113..4c980d5018d 100644 --- a/test/python/test_composite.py +++ b/test/python/test_composite.py @@ -5,7 +5,14 @@ import pickle import cantera as ct -from cantera.composite import _h5py, _pandas + +try: + h5py = ct.composite._import_h5py() + have_h5py = True +except ImportError: + have_h5py = False + +from cantera.composite import _pandas from . import utilities @@ -255,7 +262,7 @@ def test_append_no_norm_data(self): self.assertEqual(states[0].P, gas.P) self.assertArrayNear(states[0].Y, gas.Y) - @utilities.unittest.skipIf(isinstance(_h5py, ImportError), "h5py is not installed") + @utilities.unittest.skipIf(not have_h5py, "h5py is not installed") def test_import_no_norm_data(self): outfile = self.test_work_path / "solutionarray.h5" # In Python >= 3.8, this can be replaced by the missing_ok argument @@ -336,7 +343,7 @@ def test_to_pandas(self): with self.assertRaisesRegex(NotImplementedError, 'not supported'): states.to_pandas() - @utilities.unittest.skipIf(isinstance(_h5py, ImportError), "h5py is not installed") + @utilities.unittest.skipIf(not have_h5py, "h5py is not installed") def test_write_hdf(self): outfile = self.test_work_path / "solutionarray.h5" # In Python >= 3.8, this can be replaced by the missing_ok argument @@ -365,7 +372,7 @@ def test_write_hdf(self): gas = ct.Solution('gri30.yaml', transport_model=None) ct.SolutionArray(gas, 10).write_hdf(outfile) - with _h5py.File(outfile, 'a') as hdf: + with h5py.File(outfile, 'a') as hdf: hdf.create_group('spam') c = ct.SolutionArray(self.gas) @@ -382,7 +389,7 @@ def test_write_hdf(self): c.read_hdf(outfile, group='foo/bar/baz') self.assertArrayNear(states.T, c.T) - @utilities.unittest.skipIf(isinstance(_h5py, ImportError), "h5py is not installed") + @utilities.unittest.skipIf(not have_h5py, "h5py is not installed") def test_write_hdf_str_column(self): outfile = self.test_work_path / "solutionarray.h5" # In Python >= 3.8, this can be replaced by the missing_ok argument @@ -396,7 +403,7 @@ def test_write_hdf_str_column(self): b.read_hdf(outfile) self.assertEqual(list(states.spam), list(b.spam)) - @utilities.unittest.skipIf(isinstance(_h5py, ImportError), "h5py is not installed") + @utilities.unittest.skipIf(not have_h5py, "h5py is not installed") def test_write_hdf_multidim_column(self): outfile = self.test_work_path / "solutionarray.h5" # In Python >= 3.8, this can be replaced by the missing_ok argument @@ -571,7 +578,7 @@ def check(a, b): b.restore_data(data) check(a, b) - @utilities.unittest.skipIf(isinstance(_h5py, ImportError), "h5py is not installed") + @utilities.unittest.skipIf(not have_h5py, "h5py is not installed") def test_import_no_norm_water(self): outfile = self.test_work_path / "solutionarray.h5" # In Python >= 3.8, this can be replaced by the missing_ok argument diff --git a/test/python/test_onedim.py b/test/python/test_onedim.py index 952d3a17160..70f539da4ff 100644 --- a/test/python/test_onedim.py +++ b/test/python/test_onedim.py @@ -4,8 +4,6 @@ from .utilities import allow_deprecated import pytest -from cantera.composite import _h5py - class TestOnedim(utilities.CanteraTest): def test_instantiate(self): From f6cdcbc3dc6f7dae6ccba829ac5cb1037fb6a810 Mon Sep 17 00:00:00 2001 From: Ingmar Schoegl Date: Thu, 22 Dec 2022 20:03:40 +0100 Subject: [PATCH 74/93] Address review comments --- .github/workflows/main.yml | 2 + SConstruct | 8 +- include/cantera/base/SolutionArray.h | 78 +++++++++++++------ include/cantera/base/Storage.h | 25 +++++- include/cantera/base/stringUtils.h | 3 - include/cantera/oneD/Boundary1D.h | 14 ++-- include/cantera/oneD/Domain1D.h | 10 ++- include/cantera/oneD/StFlow.h | 4 +- samples/cxx/SConscript | 2 + samples/f77/SConscript | 4 +- samples/f90/SConscript | 4 +- samples/python/onedim/adiabatic_flame.py | 12 +-- samples/python/onedim/burner_flame.py | 13 ++-- samples/python/onedim/diffusion_flame.py | 13 ++-- .../python/onedim/diffusion_flame_batch.py | 36 ++++----- .../onedim/diffusion_flame_extinction.py | 36 ++++----- samples/python/onedim/flame_fixed_T.py | 13 ++-- samples/python/onedim/flame_initial_guess.py | 7 +- samples/python/onedim/ion_burner_flame.py | 13 ++-- samples/python/onedim/ion_free_flame.py | 13 ++-- .../onedim/premixed_counterflow_flame.py | 13 ++-- .../onedim/premixed_counterflow_twin_flame.py | 13 ++-- samples/python/onedim/stagnation_flame.py | 18 ++--- src/base/SolutionArray.cpp | 61 ++++++++------- src/base/stringUtils.cpp | 2 +- src/oneD/Boundary1D.cpp | 31 ++++---- src/oneD/Domain1D.cpp | 10 --- src/oneD/Sim1D.cpp | 9 ++- src/oneD/StFlow.cpp | 4 +- 29 files changed, 267 insertions(+), 204 deletions(-) diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index 5b54b98e7df..3b82af1433d 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -368,11 +368,13 @@ jobs: - name: Build Cantera # compile with GCC 9.4.0 on ubuntu-20.04 as an alternative to the default # (GCC 7.5.0 is both default and oldest supported version) + # compile without native HDF5 support run: python3 `which scons` build -j2 debug=n CC=gcc-9 CXX=g++-9 if: matrix.python-version != '3.10' - name: Build Cantera (Python 3.10 with HDF) # compile with GCC 9.4.0 on ubuntu-20.04 as an alternative to the default # (GCC 7.5.0 is both default and oldest supported version) + # compile with native HDF5 support run: | python3 `which scons` build -j2 debug=n CC=gcc-9 CXX=g++-9 \ hdf_libdir=$HDF5_LIBDIR hdf_include=$HDF5_INCLUDEDIR diff --git a/SConstruct b/SConstruct index 263a79147cc..70a8e738955 100644 --- a/SConstruct +++ b/SConstruct @@ -360,9 +360,9 @@ config_options = [ "hdf_support", """Select whether to support HDF5 container files natively ('y'), disable HDF5 support ('n'), or to decide automatically based on the system configuration - ('default'). Native HDF5 support uses the headers-only HDF5 wrapper HighFive - (see option 'system_highfive'). Specifying 'hdf_include' or 'hdf_libdir' - changes the default to 'y'.""", + ('default'). Native HDF5 support uses the HDF5 library as well as the + header-only HighFive C++ wrapper (see option 'system_highfive'). Specifying + 'hdf_include' or 'hdf_libdir' changes the default to 'y'.""", "default", ("default", "y", "n")), PathOption( "hdf_include", @@ -1529,14 +1529,12 @@ if env["hdf_include"]: env["hdf_include"] = Path(env["hdf_include"]).as_posix() env.Append(CPPPATH=[env["hdf_include"]]) env["hdf_support"] = "y" - env["extra_inc_dirs"].append(env["hdf_include"]) if env["hdf_libdir"]: env["hdf_libdir"] = Path(env["hdf_libdir"]).as_posix() env.Append(LIBPATH=[env["hdf_libdir"]]) env["hdf_support"] = "y" if env["use_rpath_linkage"]: env.Append(RPATH=env["hdf_libdir"]) - env["extra_lib_dirs"].append(env["hdf_libdir"]) if env["hdf_support"] == "n": env["use_hdf5"] = False diff --git a/include/cantera/base/SolutionArray.h b/include/cantera/base/SolutionArray.h index 1c3d5d9203b..350e587c905 100644 --- a/include/cantera/base/SolutionArray.h +++ b/include/cantera/base/SolutionArray.h @@ -35,54 +35,53 @@ class SolutionArray public: virtual ~SolutionArray() {} + /*! + * Instantiate a new SolutionArray reference + * + * @param sol Solution object defining phase definitions + * @param size Number of SolutionArray entries + * @param meta AnyMap holding SolutionArray meta data + */ static shared_ptr create(const shared_ptr& sol, size_t size=0, const AnyMap& meta={}) { - return shared_ptr( - new SolutionArray(sol, size, meta)); + return shared_ptr(new SolutionArray(sol, size, meta)); } /*! - * Initialize SolutionArray with independent memory management + * Initialize SolutionArray * * @param extra Names of auxiliary data */ void initialize(const std::vector& extra={}); - /*! - * Size of SolutionArray (number of entries) - */ + //! Size of SolutionArray (number of entries) int size() const { return m_size; } - /*! - * SolutionArray meta data. - */ + //! SolutionArray meta data. AnyMap& meta() { return m_meta; } - /*! - * Retrieve associated ThermoPhase object - */ - std::shared_ptr thermo(); + //! Retrieve associated ThermoPhase object + shared_ptr thermo(); /*! - * Check whether SolutionArray contains a component. + * Check whether SolutionArray contains a component (property defining state or + * auxiliary variable) */ bool hasComponent(const std::string& name) const; - /*! - * Retrieve a component of the SolutionArray by name. - */ + //! Retrieve a component of the SolutionArray by name vector_fp getComponent(const std::string& name) const; /*! * Set a component of the SolutionArray by name. * - * @param name Component name + * @param name Name of component (property defining state or auxiliary variable) * @param data Component data * @param force If true, add new component to SolutionArray */ @@ -115,11 +114,19 @@ class SolutionArray * Write header data to container file. * * @param fname Name of HDF container file - * @param id Identifier of SolutionArray root within the container file + * @param id Identifier of SolutionArray within the container file * @param desc Description */ static void writeHeader(const std::string& fname, const std::string& id, const std::string& desc); + + /*! + * Write header data to AnyMap. + * + * @param root Root node of AnyMap structure + * @param id Identifier of SolutionArray node within AnyMap structure + * @param desc Description + */ static void writeHeader(AnyMap& root, const std::string& id, const std::string& desc); @@ -132,6 +139,13 @@ class SolutionArray */ void writeEntry(const std::string& fname, const std::string& id, int compression=0); + + /*! + * Write SolutionArray data to AnyMap. + * + * @param root Root node of AnyMap structure + * @param id Identifier of SolutionArray node within AnyMap structure + */ void writeEntry(AnyMap& root, const std::string& id); /*! @@ -152,6 +166,13 @@ class SolutionArray * @param id Identifier of SolutionArray within the file structure */ static AnyMap readHeader(const std::string& fname, const std::string& id); + + /*! + * Read header data from AnyMap. + * + * @param root Root node of AnyMap structure + * @param id Identifier of SolutionArray node within AnyMap structure + */ static AnyMap readHeader(const AnyMap& root, const std::string& id); /*! @@ -161,6 +182,13 @@ class SolutionArray * @param id Identifier of SolutionArray within the file structure */ void readEntry(const std::string& fname, const std::string& id); + + /*! + * Restore SolutionArray entry from AnyMap. + * + * @param root Root node of AnyMap structure + * @param id Identifier of SolutionArray node within AnyMap structure + */ void readEntry(const AnyMap& root, const std::string& id); /*! @@ -172,11 +200,15 @@ class SolutionArray AnyMap restore(const std::string& fname, const std::string& id); protected: - //! Detect storage mode of state data - std::string detectMode(std::set names, bool native=true); + /*! + * Identify storage mode of state data (combination of properties defining state); + * valid modes include Phase::nativeState ("native") or other property combinations + * defined by Phase::fullStates (three-letter acronyms, for example "TDY", "TPX"). + */ + std::string detectMode(const std::set& names, bool native=true); //! Retrieve set containing list of properties defining state - std::set stateProperties(std::string mode, bool alias=false); + std::set stateProperties(const std::string& mode, bool alias=false); shared_ptr m_sol; //!< Solution object associated with state data size_t m_size; //!< Number of entries in SolutionArray @@ -186,7 +218,7 @@ class SolutionArray shared_ptr m_work; //!< Work vector holding states double* m_data; //!< Memory location holding state information - std::map> m_other; //!< Auxiliary data + std::map> m_extra; //!< Auxiliary data }; } diff --git a/include/cantera/base/Storage.h b/include/cantera/base/Storage.h index cc8866073be..a1807010bfc 100644 --- a/include/cantera/base/Storage.h +++ b/include/cantera/base/Storage.h @@ -57,7 +57,7 @@ namespace Cantera * A wrapper class handling storage to HDF; acts as a thin wrapper for HighFive * * @since New in Cantera 3.0. - * @warning This function is an experimental part of the %Cantera API and may be + * @warning This class is an experimental part of the %Cantera API and may be * changed or removed without notice. */ class Storage @@ -78,31 +78,54 @@ class Storage //! Check whether path go location exists //! If the file has write access, create location if necessary + //! @param id storage location within file bool checkGroup(const std::string& id); //! Retrieve contents of file from a specified location + //! @param id storage location within file + //! @returns pair containing size and list of entry names of stored data set std::pair> contents(const std::string& id) const; //! Read attributes from a specified location + //! @param id storage location within file + //! @param recursive boolean indicating whether subgroups should be included + //! @returns AnyMap containing attributes AnyMap readAttributes(const std::string& id, bool recursive) const; //! Write attributes to a specified location + //! @param id storage location within file + //! @param meta AnyMap containing attributes void writeAttributes(const std::string& id, const AnyMap& meta); //! Read data vector from a specified location + //! @param id storage location within file + //! @param name name of data vector entry + //! @param size size of data vector entry + //! @returns data vector vector_fp readVector(const std::string& id, const std::string& name, size_t size) const; //! Write data vector to a specified location + //! @param id storage location within file + //! @param name name of data vector entry + //! @param data data vector void writeVector(const std::string& id, const std::string& name, const vector_fp& data); //! Read matrix from a specified location + //! @param id storage location within file + //! @param name name of matrix entry + //! @param rows number of matrix rows + //! @param cols number of matrix columns + //! @returns matrix containing data (vector of vectors) std::vector readMatrix(const std::string& id, const std::string& name, size_t rows, size_t cols) const; //! Write matrix to a specified location + //! @param id storage location within file + //! @param name name of matrix entry + //! @param data matrix containing data (vector of vectors) void writeMatrix(const std::string& id, const std::string& name, const std::vector& data); diff --git a/include/cantera/base/stringUtils.h b/include/cantera/base/stringUtils.h index 46591a06580..eb343eeeb16 100644 --- a/include/cantera/base/stringUtils.h +++ b/include/cantera/base/stringUtils.h @@ -105,9 +105,6 @@ void tokenizeString(const std::string& oval, //! This function separates a string up into tokens according to the location of //! path separators. /*! - * White space includes the new line character. tokens are stripped of leading - * and trailing white space. - * * The separate tokens are returned in a string vector, v. * * @param oval String to be broken up diff --git a/include/cantera/oneD/Boundary1D.h b/include/cantera/oneD/Boundary1D.h index 1944cdb6ccf..063fe8c09b0 100644 --- a/include/cantera/oneD/Boundary1D.h +++ b/include/cantera/oneD/Boundary1D.h @@ -136,7 +136,7 @@ class Inlet1D : public Boundary1D virtual void eval(size_t jg, double* xg, double* rg, integer* diagg, double rdt); virtual AnyMap serialize(const double* soln) const; - virtual std::shared_ptr asArray(const double* soln) const; + virtual shared_ptr asArray(const double* soln) const; virtual void restore(const AnyMap& state, double* soln, int loglevel); virtual void restore(SolutionArray& arr, double* soln, int loglevel); @@ -172,7 +172,7 @@ class Empty1D : public Boundary1D integer* diagg, double rdt); virtual AnyMap serialize(const double* soln) const; - virtual std::shared_ptr asArray(const double* soln) const; + virtual shared_ptr asArray(const double* soln) const; virtual void restore(SolutionArray& arr, double* soln, int loglevel) {} }; @@ -198,7 +198,7 @@ class Symm1D : public Boundary1D integer* diagg, double rdt); virtual AnyMap serialize(const double* soln) const; - virtual std::shared_ptr asArray(const double* soln) const; + virtual shared_ptr asArray(const double* soln) const; virtual void restore(SolutionArray& arr, double* soln, int loglevel) {} }; @@ -224,7 +224,7 @@ class Outlet1D : public Boundary1D integer* diagg, double rdt); virtual AnyMap serialize(const double* soln) const; - virtual std::shared_ptr asArray(const double* soln) const; + virtual shared_ptr asArray(const double* soln) const; virtual void restore(SolutionArray& arr, double* soln, int loglevel) {} }; @@ -257,7 +257,7 @@ class OutletRes1D : public Boundary1D virtual void eval(size_t jg, double* xg, double* rg, integer* diagg, double rdt); virtual AnyMap serialize(const double* soln) const; - virtual std::shared_ptr asArray(const double* soln) const; + virtual shared_ptr asArray(const double* soln) const; virtual void restore(const AnyMap& state, double* soln, int loglevel); virtual void restore(SolutionArray& arr, double* soln, int loglevel); @@ -291,7 +291,7 @@ class Surf1D : public Boundary1D integer* diagg, double rdt); virtual AnyMap serialize(const double* soln) const; - virtual std::shared_ptr asArray(const double* soln) const; + virtual shared_ptr asArray(const double* soln) const; virtual void restore(const AnyMap& state, double* soln, int loglevel); virtual void restore(SolutionArray& arr, double* soln, int loglevel); @@ -329,7 +329,7 @@ class ReactingSurf1D : public Boundary1D integer* diagg, double rdt); virtual AnyMap serialize(const double* soln) const; - virtual std::shared_ptr asArray(const double* soln) const; + virtual shared_ptr asArray(const double* soln) const; virtual void restore(const AnyMap& state, double* soln, int loglevel); virtual void restore(SolutionArray& arr, double* soln, int loglevel); diff --git a/include/cantera/oneD/Domain1D.h b/include/cantera/oneD/Domain1D.h index 3dc96d11f24..6205d6d6ee7 100644 --- a/include/cantera/oneD/Domain1D.h +++ b/include/cantera/oneD/Domain1D.h @@ -322,7 +322,9 @@ class Domain1D * * @since New in Cantera 3.0. */ - virtual std::shared_ptr asArray(const double* soln) const; + virtual shared_ptr asArray(const double* soln) const { + throw CanteraError("Domain1D::asArray", "Needs to be overloaded."); + } //! Restore the solution for this domain from an AnyMap /*! @@ -342,7 +344,9 @@ class Domain1D * * @since New in Cantera 3.0. */ - virtual void restore(SolutionArray& arr, double* soln, int loglevel); + virtual void restore(SolutionArray& arr, double* soln, int loglevel) { + throw NotImplementedError("Domain1D::restore", "Needs to be overloaded."); + } //! Return thermo/kinetics/transport manager used in the domain //! @since New in Cantera 3.0. @@ -530,7 +534,7 @@ class Domain1D bool m_force_full_update; //! Composite thermo/kinetics/transport handler - std::shared_ptr m_solution; + shared_ptr m_solution; }; } diff --git a/include/cantera/oneD/StFlow.h b/include/cantera/oneD/StFlow.h index 594c13d4183..741c81d796c 100644 --- a/include/cantera/oneD/StFlow.h +++ b/include/cantera/oneD/StFlow.h @@ -158,7 +158,7 @@ class StFlow : public Domain1D virtual void showSolution(const doublereal* x); virtual AnyMap serialize(const double* soln) const; - virtual std::shared_ptr asArray(const double* soln) const; + virtual shared_ptr asArray(const double* soln) const; virtual void restore(const AnyMap& state, double* soln, int loglevel); virtual void restore(SolutionArray& arr, double* soln, int loglevel); @@ -449,7 +449,7 @@ class StFlow : public Domain1D // Smart pointer preventing garbage collection when the transport model of an // associated Solution object changes: the transport model of the StFlow object // will remain unaffected by an external change. - std::shared_ptr m_trans_shared; + shared_ptr m_trans_shared; // boundary emissivities for the radiation calculations doublereal m_epsilon_left; diff --git a/samples/cxx/SConscript b/samples/cxx/SConscript index d6c306e92b7..74232957e03 100644 --- a/samples/cxx/SConscript +++ b/samples/cxx/SConscript @@ -65,9 +65,11 @@ set(CMAKE_EXE_LINKER_FLAGS ${CMAKE_EXE_LINKER_FLAGS} ${OpenMP_EXE_LINKER_FLAGS}) excludes) else: incdirs.extend([localenv["sundials_include"], localenv["boost_inc_dir"]]) + incdirs.append(localenv["hdf_include"]) incdirs.extend(localenv["extra_inc_dirs"]) incdirs = list(set(incdirs)) libdirs.extend([localenv["sundials_libdir"], localenv["blas_lapack_dir"]]) + libdirs.append(localenv["hdf_libdir"]) libdirs.extend(localenv["extra_lib_dirs"]) libdirs = list(set(libdirs)) diff --git a/samples/f77/SConscript b/samples/f77/SConscript index 1b9df6153ce..5b0d91628cc 100644 --- a/samples/f77/SConscript +++ b/samples/f77/SConscript @@ -20,7 +20,7 @@ for program_name, fortran_sources in samples: CPPPATH=['#build/src/fortran', '#include'], LIBS=env['cantera_libs']+['cantera_fortran']+env['cxx_stdlib'], LIBPATH=[env['sundials_libdir'], localenv['blas_lapack_dir'], - env['extra_lib_dirs'], '#build/lib'], + env['extra_lib_dirs'], env["hdf_libdir"], '#build/lib'], LINK='$FORTRAN_LINK') # Generate SConstruct file to be installed @@ -37,9 +37,11 @@ if localenv["package_build"]: excludes) else: incdirs.extend([localenv["sundials_include"], localenv["boost_inc_dir"]]) + incdirs.append(localenv["hdf_include"]) incdirs.extend(localenv["extra_inc_dirs"]) incdirs = list(set(incdirs)) libdirs.extend([localenv["sundials_libdir"], localenv["blas_lapack_dir"]]) + libdirs.append(localenv["hdf_libdir"]) libdirs.extend(localenv["extra_lib_dirs"]) libdirs = list(set(libdirs)) diff --git a/samples/f90/SConscript b/samples/f90/SConscript index 73cb74ee4bf..690a35351d5 100644 --- a/samples/f90/SConscript +++ b/samples/f90/SConscript @@ -14,7 +14,7 @@ for programName, sources in samples: F90PATH='#build/src/fortran', LIBS=['cantera_fortran']+env['cantera_libs']+env['cxx_stdlib'], LIBPATH=[env['sundials_libdir'], env['blas_lapack_dir'], - env['extra_lib_dirs'], '#build/lib'], + env['extra_lib_dirs'], env["hdf_libdir"], '#build/lib'], LINK='$FORTRAN_LINK') # Generate SConstruct files to be installed @@ -22,9 +22,11 @@ for programName, sources in samples: libdirs = [localenv["ct_libdir"]] if not localenv["package_build"]: incdirs.extend([localenv["sundials_include"], localenv["boost_inc_dir"]]) + incdirs.append(localenv["hdf_include"]) incdirs.extend(localenv["extra_inc_dirs"]) incdirs = list(set(incdirs)) libdirs.extend([localenv["sundials_libdir"], localenv["blas_lapack_dir"]]) + libdirs.append(localenv["hdf_libdir"]) libdirs.extend(localenv["extra_lib_dirs"]) libdirs = list(set(libdirs)) diff --git a/samples/python/onedim/adiabatic_flame.py b/samples/python/onedim/adiabatic_flame.py index 20414c29e76..0297d573d1f 100644 --- a/samples/python/onedim/adiabatic_flame.py +++ b/samples/python/onedim/adiabatic_flame.py @@ -11,12 +11,6 @@ import cantera as ct -if "native" in ct.hdf_support(): - output = Path() / "adiabatic_flame.h5" -else: - output = Path() / "adiabatic_flame.yaml" -output.unlink(missing_ok=True) - # Simulation parameters p = ct.one_atm # pressure [Pa] Tin = 300.0 # unburned gas temperature [K] @@ -38,6 +32,12 @@ f.transport_model = 'Mix' f.solve(loglevel=loglevel, auto=True) +if "native" in ct.hdf_support(): + output = Path() / "adiabatic_flame.h5" +else: + output = Path() / "adiabatic_flame.yaml" +output.unlink(missing_ok=True) + # Solve with the energy equation enabled f.save(output, name="mix", description="solution with mixture-averaged transport") diff --git a/samples/python/onedim/burner_flame.py b/samples/python/onedim/burner_flame.py index 776d11a821f..4001476ef8a 100644 --- a/samples/python/onedim/burner_flame.py +++ b/samples/python/onedim/burner_flame.py @@ -9,12 +9,6 @@ from pathlib import Path import cantera as ct -if "native" in ct.hdf_support(): - output = Path() / "burner_flame.h5" -else: - output = Path() / "burner_flame.yaml" -output.unlink(missing_ok=True) - p = 0.05 * ct.one_atm tburner = 373.0 mdot = 0.06 @@ -32,6 +26,13 @@ f.transport_model = 'Mix' f.solve(loglevel, auto=True) + +if "native" in ct.hdf_support(): + output = Path() / "burner_flame.h5" +else: + output = Path() / "burner_flame.yaml" +output.unlink(missing_ok=True) + f.save(output, name="mix", description="solution with mixture-averaged transport") f.transport_model = 'Multi' diff --git a/samples/python/onedim/diffusion_flame.py b/samples/python/onedim/diffusion_flame.py index 0960844089c..50b5d1e4c06 100644 --- a/samples/python/onedim/diffusion_flame.py +++ b/samples/python/onedim/diffusion_flame.py @@ -11,12 +11,6 @@ import matplotlib.pyplot as plt -if "native" in ct.hdf_support(): - output = Path() / "diffusion_flame.h5" -else: - output = Path() / "diffusion_flame.yaml" -output.unlink(missing_ok=True) - # Input parameters p = ct.one_atm # pressure tin_f = 300.0 # fuel inlet temperature @@ -60,6 +54,13 @@ # Solve the problem f.solve(loglevel, auto=True) f.show_solution() + +if "native" in ct.hdf_support(): + output = Path() / "diffusion_flame.h5" +else: + output = Path() / "diffusion_flame.yaml" +output.unlink(missing_ok=True) + f.save(output) # write the velocity, temperature, and mole fractions to a CSV file diff --git a/samples/python/onedim/diffusion_flame_batch.py b/samples/python/onedim/diffusion_flame_batch.py index 248d8d6f6f3..2459573aed5 100644 --- a/samples/python/onedim/diffusion_flame_batch.py +++ b/samples/python/onedim/diffusion_flame_batch.py @@ -29,24 +29,6 @@ class FlameExtinguished(Exception): pass -output_path = Path() / "diffusion_flame_batch_data" -output_path.mkdir(parents=True, exist_ok=True) - -hdf_output = "native" in ct.hdf_support() -if hdf_output: - file_name = output_path / "flame_data.h5" - file_name.unlink(missing_ok=True) - -def names(test): - if hdf_output: - # use internal container structure for HDF - file_name = output_path / "flame_data.h5" - return file_name, test - # use separate files for YAML - file_name = output_path / f"{test}.yaml".replace("-", "_").replace("/", "_") - return file_name, "solution" - - # PART 1: INITIALIZATION # Set up an initial hydrogen-oxygen counterflow flame at 1 bar and low strain @@ -87,6 +69,24 @@ def interrupt_extinction(t): print('Creating the initial solution') f.solve(loglevel=0, auto=True) +# Define output locations +output_path = Path() / "diffusion_flame_batch_data" +output_path.mkdir(parents=True, exist_ok=True) + +hdf_output = "native" in ct.hdf_support() +if hdf_output: + file_name = output_path / "flame_data.h5" + file_name.unlink(missing_ok=True) + +def names(test): + if hdf_output: + # use internal container structure for HDF + file_name = output_path / "flame_data.h5" + return file_name, test + # use separate files for YAML + file_name = output_path / f"{test}.yaml".replace("-", "_").replace("/", "_") + return file_name, "solution" + # Save to data directory file_name, entry = names("initial-solution") desc = "Initial hydrogen-oxygen counterflow flame at 1 bar and low strain rate" diff --git a/samples/python/onedim/diffusion_flame_extinction.py b/samples/python/onedim/diffusion_flame_extinction.py index 86a05725e26..081e5b24c80 100644 --- a/samples/python/onedim/diffusion_flame_extinction.py +++ b/samples/python/onedim/diffusion_flame_extinction.py @@ -21,24 +21,6 @@ import cantera as ct -output_path = Path() / "diffusion_flame_extinction_data" -output_path.mkdir(parents=True, exist_ok=True) - -hdf_output = "native" in ct.hdf_support() -if hdf_output: - file_name = output_path / "flame_data.h5" - file_name.unlink(missing_ok=True) - -def names(test): - if hdf_output: - # use internal container structure for HDF - file_name = output_path / "flame_data.h5" - return file_name, test - # use separate files for YAML - file_name = output_path / f"{test}.yaml".replace("-", "_").replace("/", "_") - return file_name, "solution" - - # PART 1: INITIALIZATION # Set up an initial hydrogen-oxygen counterflow flame at 1 bar and low strain @@ -69,6 +51,24 @@ def names(test): print('Creating the initial solution') f.solve(loglevel=0, auto=True) +# Define output locations +output_path = Path() / "diffusion_flame_extinction_data" +output_path.mkdir(parents=True, exist_ok=True) + +hdf_output = "native" in ct.hdf_support() +if hdf_output: + file_name = output_path / "flame_data.h5" + file_name.unlink(missing_ok=True) + +def names(test): + if hdf_output: + # use internal container structure for HDF + file_name = output_path / "flame_data.h5" + return file_name, test + # use separate files for YAML + file_name = output_path / f"{test}.yaml".replace("-", "_").replace("/", "_") + return file_name, "solution" + file_name, entry = names("initial-solution") f.save(file_name, name=entry, description="Initial solution") diff --git a/samples/python/onedim/flame_fixed_T.py b/samples/python/onedim/flame_fixed_T.py index bc2ca95491f..5a9b5055e02 100644 --- a/samples/python/onedim/flame_fixed_T.py +++ b/samples/python/onedim/flame_fixed_T.py @@ -12,12 +12,6 @@ import cantera as ct -if "native" in ct.hdf_support(): - output = Path() / "flame_fixed_T.h5" -else: - output = Path() / "flame_fixed_T.yaml" -output.unlink(missing_ok=True) - ################################################################ # parameter values p = ct.one_atm # pressure @@ -67,6 +61,13 @@ f.set_refine_criteria(ratio=3.0, slope=0.3, curve=1) f.solve(loglevel, refine_grid) + +if "native" in ct.hdf_support(): + output = Path() / "flame_fixed_T.h5" +else: + output = Path() / "flame_fixed_T.yaml" +output.unlink(missing_ok=True) + f.save(output, name="mix", description="solution with mixture-averaged transport") print('\n\n switching to multicomponent transport...\n\n') diff --git a/samples/python/onedim/flame_initial_guess.py b/samples/python/onedim/flame_initial_guess.py index a26256c88cc..2cd07d67a0e 100644 --- a/samples/python/onedim/flame_initial_guess.py +++ b/samples/python/onedim/flame_initial_guess.py @@ -14,8 +14,6 @@ except ImportError: pd = None -output_path = Path() / "flame_initial_guess_data" -output_path.mkdir(parents=True, exist_ok=True) # Simulation parameters p = ct.one_atm # pressure [Pa] @@ -52,6 +50,9 @@ def describe(flame): # Save the flame in a few different formats +output_path = Path() / "flame_initial_guess_data" +output_path.mkdir(parents=True, exist_ok=True) + print("Save YAML") yaml_filepath = output_path / "flame.yaml" f.save(yaml_filepath, name="solution", description="Initial methane flame") @@ -64,7 +65,7 @@ def describe(flame): # HDF is not a required dependency hdf_filepath = output_path / "flame.h5" hdf_filepath.unlink(missing_ok=True) - f.save(hdf_filepath, name="freeflame", description=("Initial methane flame")) + f.save(hdf_filepath, name="freeflame", description="Initial methane flame") print("Save HDF\n") except ct.CanteraError as err: print(f"Skipping HDF: {err}\n") diff --git a/samples/python/onedim/ion_burner_flame.py b/samples/python/onedim/ion_burner_flame.py index 67ec54e2eb4..6b4a8c35a4e 100644 --- a/samples/python/onedim/ion_burner_flame.py +++ b/samples/python/onedim/ion_burner_flame.py @@ -9,12 +9,6 @@ import cantera as ct -if "native" in ct.hdf_support(): - output = Path() / "ion_burner_flame.h5" -else: - output = Path() / "ion_burner_flame.yaml" -output.unlink(missing_ok=True) - p = ct.one_atm tburner = 600.0 reactants = 'CH4:1.0, O2:2.0, N2:7.52' # premixed gas composition @@ -33,6 +27,13 @@ f.transport_model = 'Ion' f.solve(loglevel, auto=True) f.solve(loglevel=loglevel, stage=2, enable_energy=True) + +if "native" in ct.hdf_support(): + output = Path() / "ion_burner_flame.h5" +else: + output = Path() / "ion_burner_flame.yaml" +output.unlink(missing_ok=True) + f.save(output, name="mix", description="solution with mixture-averaged transport") f.write_csv('ion_burner_flame.csv', quiet=False) diff --git a/samples/python/onedim/ion_free_flame.py b/samples/python/onedim/ion_free_flame.py index 15677da9919..f7b27225ef5 100644 --- a/samples/python/onedim/ion_free_flame.py +++ b/samples/python/onedim/ion_free_flame.py @@ -9,12 +9,6 @@ import cantera as ct -if "native" in ct.hdf_support(): - output = Path() / "ion_free_flame.h5" -else: - output = Path() / "ion_free_flame.yaml" -output.unlink(missing_ok=True) - # Simulation parameters p = ct.one_atm # pressure [Pa] Tin = 300.0 # unburned gas temperature [K] @@ -37,6 +31,13 @@ # stage two f.solve(loglevel=loglevel, stage=2, enable_energy=True) + +if "native" in ct.hdf_support(): + output = Path() / "ion_free_flame.h5" +else: + output = Path() / "ion_free_flame.yaml" +output.unlink(missing_ok=True) + f.save(output, name="ion", description="solution with ionized gas transport") f.show_solution() diff --git a/samples/python/onedim/premixed_counterflow_flame.py b/samples/python/onedim/premixed_counterflow_flame.py index cb4023a0d5e..b50c43a5c25 100644 --- a/samples/python/onedim/premixed_counterflow_flame.py +++ b/samples/python/onedim/premixed_counterflow_flame.py @@ -12,12 +12,6 @@ import cantera as ct -if "native" in ct.hdf_support(): - output = Path() / "premixed_counterflow_flame.h5" -else: - output = Path() / "premixed_counterflow_flame.yaml" -output.unlink(missing_ok=True) - # parameter values p = 0.05 * ct.one_atm # pressure T_in = 373.0 # inlet temperature @@ -49,6 +43,13 @@ sim.show_solution() sim.solve(loglevel, auto=True) + +if "native" in ct.hdf_support(): + output = Path() / "premixed_counterflow_flame.h5" +else: + output = Path() / "premixed_counterflow_flame.yaml" +output.unlink(missing_ok=True) + sim.save(output, name="mix", description="solution with mixture-averaged transport") # write the velocity, temperature, and mole fractions to a CSV file diff --git a/samples/python/onedim/premixed_counterflow_twin_flame.py b/samples/python/onedim/premixed_counterflow_twin_flame.py index cb3d95bceab..5289b28609e 100644 --- a/samples/python/onedim/premixed_counterflow_twin_flame.py +++ b/samples/python/onedim/premixed_counterflow_twin_flame.py @@ -80,12 +80,6 @@ def solveOpposedFlame(oppFlame, massFlux=0.12, loglevel=1, return np.max(oppFlame.T), K, strainRatePoint -if "native" in ct.hdf_support(): - output = Path() / "premixed_counterflow_twin_flame.h5" -else: - output = Path() / "premixed_counterflow_twin_flame.yaml" -output.unlink(missing_ok=True) - # Select the reaction mechanism gas = ct.Solution('gri30.yaml') @@ -121,6 +115,13 @@ def solveOpposedFlame(oppFlame, massFlux=0.12, loglevel=1, # Thus to plot temperature vs distance, use oppFlame.grid and oppFlame.T Sc = computeConsumptionSpeed(oppFlame) + +if "native" in ct.hdf_support(): + output = Path() / "premixed_counterflow_twin_flame.h5" +else: + output = Path() / "premixed_counterflow_twin_flame.yaml" +output.unlink(missing_ok=True) + oppFlame.save(output, name="mix") print(f"Peak temperature: {T:.1f} K") diff --git a/samples/python/onedim/stagnation_flame.py b/samples/python/onedim/stagnation_flame.py index b05e4f208d6..a61fc5c1273 100644 --- a/samples/python/onedim/stagnation_flame.py +++ b/samples/python/onedim/stagnation_flame.py @@ -22,15 +22,6 @@ import cantera as ct -output_path = Path() / "stagnation_flame_data" -output_path.mkdir(parents=True, exist_ok=True) - -if "native" in ct.hdf_support(): - output = output_path / "stagnation_flame.h5" -else: - output = output_path / "stagnation_flame.yaml" -output.unlink(missing_ok=True) - # parameter values p = 0.05 * ct.one_atm # pressure tburner = 373.0 # burner temperature @@ -79,6 +70,15 @@ sim.solve(loglevel, auto=True) +output_path = Path() / "stagnation_flame_data" +output_path.mkdir(parents=True, exist_ok=True) + +if "native" in ct.hdf_support(): + output = output_path / "stagnation_flame.h5" +else: + output = output_path / "stagnation_flame.yaml" +output.unlink(missing_ok=True) + for m, md in enumerate(mdot): sim.inlet.mdot = md sim.solve(loglevel) diff --git a/src/base/SolutionArray.cpp b/src/base/SolutionArray.cpp index c2cd62dd4fb..b7338b72723 100644 --- a/src/base/SolutionArray.cpp +++ b/src/base/SolutionArray.cpp @@ -12,10 +12,13 @@ #include "cantera/base/stringUtils.h" #include "cantera/thermo/ThermoPhase.h" #include "cantera/thermo/SurfPhase.h" +#include #include #include +namespace ba = boost::algorithm; + namespace Cantera { @@ -53,18 +56,18 @@ void SolutionArray::initialize(const std::vector& extra) m_work.reset(new vector_fp(m_size * m_stride, 0.)); m_data = m_work->data(); for (auto& key : extra) { - m_other.emplace(key, std::make_shared(m_size)); + m_extra.emplace(key, std::make_shared(m_size)); } } -std::shared_ptr SolutionArray::thermo() +shared_ptr SolutionArray::thermo() { return m_sol->thermo(); } bool SolutionArray::hasComponent(const std::string& name) const { - if (m_other.count(name)) { + if (m_extra.count(name)) { // auxiliary data return true; } @@ -87,10 +90,9 @@ vector_fp SolutionArray::getComponent(const std::string& name) const } vector_fp out(m_size); - if (m_other.count(name)) { + if (m_extra.count(name)) { // auxiliary data - auto other = m_other.at(name); - std::copy(other->begin(), other->end(), out.begin()); + out = *m_extra.at(name); return out; } @@ -115,8 +117,8 @@ void SolutionArray::setComponent( if (!hasComponent(name)) { if (force) { - m_other.emplace(name, std::make_shared(m_size)); - auto& extra = m_other[name]; + m_extra.emplace(name, std::make_shared(m_size)); + auto& extra = m_extra[name]; std::copy(data.begin(), data.end(), extra->begin()); return; } @@ -126,10 +128,10 @@ void SolutionArray::setComponent( throw CanteraError("SolutionArray::setComponent", "incompatible sizes"); } - if (m_other.count(name)) { + if (m_extra.count(name)) { // auxiliary data - auto other = m_other[name]; - std::copy(data.begin(), data.end(), other->begin()); + auto extra = m_extra[name]; + std::copy(data.begin(), data.end(), extra->begin()); } size_t ix = m_sol->thermo()->speciesIndex(name); @@ -190,7 +192,7 @@ std::map SolutionArray::getAuxiliary(size_t index) { setIndex(index); std::map out; - for (auto& item : m_other) { + for (auto& item : m_extra) { auto& extra = *item.second; out[item.first] = extra[m_index]; } @@ -268,8 +270,8 @@ void SolutionArray::writeEntry(const std::string& fname, const std::string& id, } } - for (auto& other : m_other) { - file.writeVector(id, other.first, *(other.second)); + for (auto& extra : m_extra) { + file.writeVector(id, extra.first, *(extra.second)); } file.flush(); } @@ -290,7 +292,7 @@ AnyMap& openField(AnyMap& root, const std::string& id) } else if (!sub.hasKey(field)) { sub[field] = AnyMap(); } - ptr = &sub[field].as(); // AnyMap lacks 'operator=' for const AnyMap + ptr = &sub[field].as(); } return *ptr; } @@ -302,8 +304,8 @@ void SolutionArray::writeEntry(AnyMap& root, const std::string& id) data["points"] = int(m_size); data.update(m_meta); - for (auto& other : m_other) { - data[other.first] = *(other.second); + for (auto& extra : m_extra) { + data[extra.first] = *(extra.second); } auto phase = m_sol->thermo(); @@ -315,7 +317,6 @@ void SolutionArray::writeEntry(AnyMap& root, const std::string& id) auto nSpecies = phase->nSpecies(); vector_fp values(nSpecies); if (surf) { - surf->invalidateCache(); surf->getCoverages(&values[0]); } else { phase->getMassFractions(&values[0]); @@ -390,7 +391,8 @@ AnyMap SolutionArray::readHeader(const std::string& fname, const std::string& id AnyMap SolutionArray::readHeader(const AnyMap& root, const std::string& id) { - throw CanteraError("SolutionArray::readHeader", "Not implemented."); + // todo: implement + throw NotImplementedError("SolutionArray::readHeader", "Not implemented."); } AnyMap SolutionArray::restore(const std::string& fname, const std::string& id) @@ -410,9 +412,9 @@ AnyMap SolutionArray::restore(const std::string& fname, const std::string& id) "Unknown file extension '{}'", extension); } -std::string SolutionArray::detectMode(std::set names, bool native) +std::string SolutionArray::detectMode(const std::set& names, bool native) { - // identify storage mode of state data + // check set of available names against state acronyms defined by Phase::fullStates std::string mode = ""; const auto& nativeState = m_sol->thermo()->nativeState(); bool usesNativeState; @@ -422,16 +424,19 @@ std::string SolutionArray::detectMode(std::set names, bool native) std::string name; usesNativeState = true; for (size_t i = 0; i < item.size(); i++) { + // pick i-th letter from "full" state acronym name = std::string(1, item[i]); if (surf && (name == "X" || name == "Y")) { - // override native state + // override native state to enable detection of surface phases name = "C"; usesNativeState = false; break; } if (names.count(name)) { + // property is stored using letter acronym usesNativeState &= nativeState.count(name) > 0; } else if (aliasMap.count(name) && names.count(aliasMap.at(name))) { + // property is stored using property name usesNativeState &= nativeState.count(name) > 0; } else { found = false; @@ -449,7 +454,8 @@ std::string SolutionArray::detectMode(std::set names, bool native) return mode; } -std::set SolutionArray::stateProperties(std::string mode, bool alias) +std::set SolutionArray::stateProperties( + const std::string& mode, bool alias) { std::set states; if (mode == "native") { @@ -541,12 +547,12 @@ void SolutionArray::readEntry(const std::string& fname, const std::string& id) "Import of '{}' data is not supported.", mode); } - // restore other data + // restore remaining data for (const auto& name : names) { if (!states.count(name)) { vector_fp data = file.readVector(id, name, m_size); - m_other.emplace(name, std::make_shared(m_size)); - auto& extra = m_other[name]; + m_extra.emplace(name, std::make_shared(m_size)); + auto& extra = m_extra[name]; std::copy(data.begin(), data.end(), extra->begin()); } } @@ -664,7 +670,8 @@ void SolutionArray::readEntry(const AnyMap& root, const std::string& id) } } else if (missingProps.size()) { throw CanteraError("SolutionArray::restore", - "Incomplete state information."); + "Incomplete state information: missing '{}'", + ba::join(missingProps, "', '")); } } diff --git a/src/base/stringUtils.cpp b/src/base/stringUtils.cpp index e9fa8f35425..25edff942a4 100644 --- a/src/base/stringUtils.cpp +++ b/src/base/stringUtils.cpp @@ -199,7 +199,7 @@ void tokenizePath(const std::string& in_val, std::vector& v) { std::string val = ba::trim_copy(in_val); v.clear(); - ba::split(v, val, ba::is_any_of("/\\:"), ba::token_compress_on); + ba::split(v, val, ba::is_any_of("/\\"), ba::token_compress_on); } size_t copyString(const std::string& source, char* dest, size_t length) diff --git a/src/oneD/Boundary1D.cpp b/src/oneD/Boundary1D.cpp index e8d70b498a2..548824da0eb 100644 --- a/src/oneD/Boundary1D.cpp +++ b/src/oneD/Boundary1D.cpp @@ -235,7 +235,7 @@ AnyMap Inlet1D::serialize(const double* soln) const return state; } -std::shared_ptr Inlet1D::asArray(const double* soln) const +shared_ptr Inlet1D::asArray(const double* soln) const { AnyMap meta = Boundary1D::getMeta(); meta["type"] = "inlet"; @@ -244,7 +244,7 @@ std::shared_ptr Inlet1D::asArray(const double* soln) const // set gas state (using pressure from adjacent domain) double pressure = m_flow->phase().pressure(); auto phase = m_solution->thermo(); - phase->setState_TPY(m_temp, pressure, &m_yin[0]); + phase->setState_TPY(m_temp, pressure, m_yin.data()); vector_fp data(phase->stateSize()); phase->saveState(data); @@ -290,8 +290,7 @@ void Inlet1D::restore(SolutionArray& arr, double* soln, int loglevel) auto aux = arr.getAuxiliary(0); m_mdot = phase->density() * aux["velocity"]; } - auto Y = phase->massFractions(); - std::copy(Y, Y + m_nsp, &m_yin[0]); + phase->getMassFractions(m_yin.data()); } // ------------- Empty1D ------------- @@ -313,12 +312,11 @@ AnyMap Empty1D::serialize(const double* soln) const return state; } -std::shared_ptr Empty1D::asArray(const double* soln) const +shared_ptr Empty1D::asArray(const double* soln) const { AnyMap meta = Boundary1D::getMeta(); meta["type"] = "empty"; - auto arr = SolutionArray::create(m_solution, 0, meta); - return arr; + return SolutionArray::create(m_solution, 0, meta); } // -------------- Symm1D -------------- @@ -374,12 +372,11 @@ AnyMap Symm1D::serialize(const double* soln) const return state; } -std::shared_ptr Symm1D::asArray(const double* soln) const +shared_ptr Symm1D::asArray(const double* soln) const { AnyMap meta = Boundary1D::getMeta(); meta["type"] = "symmetry"; - auto arr = SolutionArray::create(m_solution, 0, meta); - return arr; + return SolutionArray::create(m_solution, 0, meta); } // -------- Outlet1D -------- @@ -460,12 +457,11 @@ AnyMap Outlet1D::serialize(const double* soln) const return state; } -std::shared_ptr Outlet1D::asArray(const double* soln) const +shared_ptr Outlet1D::asArray(const double* soln) const { AnyMap meta = Boundary1D::getMeta(); meta["type"] = "outlet"; - auto arr = SolutionArray::create(m_solution, 0, meta); - return arr; + return SolutionArray::create(m_solution, 0, meta); } // -------- OutletRes1D -------- @@ -581,7 +577,7 @@ AnyMap OutletRes1D::serialize(const double* soln) const return state; } -std::shared_ptr OutletRes1D::asArray(const double* soln) const +shared_ptr OutletRes1D::asArray(const double* soln) const { AnyMap meta = Boundary1D::getMeta(); meta["type"] = "outlet-reservoir"; @@ -672,13 +668,12 @@ AnyMap Surf1D::serialize(const double* soln) const return state; } -std::shared_ptr Surf1D::asArray(const double* soln) const +shared_ptr Surf1D::asArray(const double* soln) const { AnyMap meta = Boundary1D::getMeta(); meta["type"] = "surface"; meta["temperature"] = m_temp; - auto arr = SolutionArray::create(m_solution, 0, meta); - return arr; + return SolutionArray::create(m_solution, 0, meta); } void Surf1D::restore(const AnyMap& state, double* soln, int loglevel) @@ -875,7 +870,7 @@ AnyMap ReactingSurf1D::serialize(const double* soln) const return state; } -std::shared_ptr ReactingSurf1D::asArray(const double* soln) const +shared_ptr ReactingSurf1D::asArray(const double* soln) const { AnyMap meta = Boundary1D::getMeta(); meta["type"] = "reacting-surface"; diff --git a/src/oneD/Domain1D.cpp b/src/oneD/Domain1D.cpp index 7a28f40913c..7ff91730b8f 100644 --- a/src/oneD/Domain1D.cpp +++ b/src/oneD/Domain1D.cpp @@ -146,11 +146,6 @@ AnyMap Domain1D::serialize(const double* soln) const return getMeta(); } -std::shared_ptr Domain1D::asArray(const double* soln) const -{ - throw CanteraError("Domain1D::asArray", "Needs to be overloaded."); -} - void Domain1D::restore(const AnyMap& state, double* soln, int loglevel) { auto set_tols = [&](const AnyValue& tols, const string& which, vector_fp& out) @@ -183,11 +178,6 @@ void Domain1D::restore(const AnyMap& state, double* soln, int loglevel) } } -void Domain1D::restore(SolutionArray& arr, double* soln, int loglevel) -{ - throw CanteraError("Domain1D::restore", "Needs to be overloaded."); -} - void Domain1D::locate() { if (m_left) { diff --git a/src/oneD/Sim1D.cpp b/src/oneD/Sim1D.cpp index 8cb2cc0ff2d..633d00e81a1 100644 --- a/src/oneD/Sim1D.cpp +++ b/src/oneD/Sim1D.cpp @@ -194,9 +194,9 @@ void Sim1D::saveResidual(const std::string& fname, const std::string& id, std::swap(res, m_x); } +//! convert data format used by Python h5py export (Cantera < 3.0) AnyMap legacyH5(shared_ptr arr, const AnyMap& header={}) { - // convert data format used by Python h5py export (Cantera < 3.0) auto meta = arr->meta(); AnyMap out; @@ -284,7 +284,7 @@ void Sim1D::restore(const std::string& fname, const std::string& id, "Restoring from XML is no longer supported."); } if (extension == "h5" || extension == "hdf" || extension == "hdf5") { - std::map> arrs; + std::map> arrs; auto header = SolutionArray::readHeader(fname, id); for (auto dom : m_dom) { @@ -304,7 +304,7 @@ void Sim1D::restore(const std::string& fname, const std::string& id, finalize(); } else if (extension == "yaml" || extension == "yml") { AnyMap root = AnyMap::fromYamlFile(fname); - std::map> arrs; + std::map> arrs; for (auto dom : m_dom) { auto arr = SolutionArray::create(dom->solution()); arr->readEntry(root, id + "/" + dom->id()); @@ -319,7 +319,8 @@ void Sim1D::restore(const std::string& fname, const std::string& id, finalize(); } else { throw CanteraError("Sim1D::restore", - "Unknown file extension '{}'", extension); + "Unknown file extension '{}'; supported extensions include " + "'h5'/'hdf'/'hdf5' and 'yml'/'yaml'.", extension); } } diff --git a/src/oneD/StFlow.cpp b/src/oneD/StFlow.cpp index ee35f2ef539..4815014a430 100644 --- a/src/oneD/StFlow.cpp +++ b/src/oneD/StFlow.cpp @@ -107,7 +107,7 @@ StFlow::StFlow(ThermoPhase* ph, size_t nsp, size_t points) : m_kRadiating[1] = m_thermo->speciesIndex("H2O"); } -StFlow::StFlow(std::shared_ptr sol, size_t nsp, size_t points) : +StFlow::StFlow(shared_ptr sol, size_t nsp, size_t points) : StFlow(sol->thermo().get(), nsp, points) { m_solution = sol; @@ -757,7 +757,7 @@ AnyMap StFlow::serialize(const double* soln) const return state; } -std::shared_ptr StFlow::asArray(const double* soln) const +shared_ptr StFlow::asArray(const double* soln) const { auto arr = SolutionArray::create(m_solution, nPoints(), getMeta()); arr->setComponent("grid", m_z, true); From 417dcc9632e279c8b2eea07e6cbef323e4dff7c1 Mon Sep 17 00:00:00 2001 From: Ingmar Schoegl Date: Thu, 22 Dec 2022 20:35:06 +0100 Subject: [PATCH 75/93] [base] Simplify SolutionArray data structure --- include/cantera/base/SolutionArray.h | 12 ++------ src/base/SolutionArray.cpp | 44 +++++++--------------------- 2 files changed, 13 insertions(+), 43 deletions(-) diff --git a/include/cantera/base/SolutionArray.h b/include/cantera/base/SolutionArray.h index 350e587c905..85e000b628b 100644 --- a/include/cantera/base/SolutionArray.h +++ b/include/cantera/base/SolutionArray.h @@ -49,13 +49,6 @@ class SolutionArray return shared_ptr(new SolutionArray(sol, size, meta)); } - /*! - * Initialize SolutionArray - * - * @param extra Names of auxiliary data - */ - void initialize(const std::vector& extra={}); - //! Size of SolutionArray (number of entries) int size() const { return m_size; @@ -216,9 +209,8 @@ class SolutionArray AnyMap m_meta; //!< Metadata size_t m_index = npos; //!< Buffered index - shared_ptr m_work; //!< Work vector holding states - double* m_data; //!< Memory location holding state information - std::map> m_extra; //!< Auxiliary data + vector_fp m_data; //!< Work vector holding states + std::map m_extra; //!< Auxiliary data }; } diff --git a/src/base/SolutionArray.cpp b/src/base/SolutionArray.cpp index b7338b72723..00d1b69f2b3 100644 --- a/src/base/SolutionArray.cpp +++ b/src/base/SolutionArray.cpp @@ -48,16 +48,8 @@ SolutionArray::SolutionArray( throw CanteraError("SolutionArray::SolutionArray", "Unable to create SolutionArray from invalid Solution object."); } -} - -void SolutionArray::initialize(const std::vector& extra) -{ m_stride = m_sol->thermo()->stateSize(); - m_work.reset(new vector_fp(m_size * m_stride, 0.)); - m_data = m_work->data(); - for (auto& key : extra) { - m_extra.emplace(key, std::make_shared(m_size)); - } + m_data.resize(m_size * m_stride, 0.); } shared_ptr SolutionArray::thermo() @@ -92,7 +84,7 @@ vector_fp SolutionArray::getComponent(const std::string& name) const vector_fp out(m_size); if (m_extra.count(name)) { // auxiliary data - out = *m_extra.at(name); + out = m_extra.at(name); return out; } @@ -111,15 +103,9 @@ vector_fp SolutionArray::getComponent(const std::string& name) const void SolutionArray::setComponent( const std::string& name, const vector_fp& data, bool force) { - if (!m_work) { - initialize(); - } - if (!hasComponent(name)) { if (force) { - m_extra.emplace(name, std::make_shared(m_size)); - auto& extra = m_extra[name]; - std::copy(data.begin(), data.end(), extra->begin()); + m_extra.emplace(name, data); return; } throw CanteraError("SolutionArray::setComponent", "no component named " + name); @@ -130,8 +116,7 @@ void SolutionArray::setComponent( if (m_extra.count(name)) { // auxiliary data - auto extra = m_extra[name]; - std::copy(data.begin(), data.end(), extra->begin()); + m_extra[name] = data; } size_t ix = m_sol->thermo()->speciesIndex(name); @@ -147,9 +132,6 @@ void SolutionArray::setComponent( void SolutionArray::setIndex(size_t index, bool restore) { - if (!m_work) { - initialize(); - } if (m_size == 0) { throw CanteraError("SolutionArray::setIndex", "Unable to set index in empty SolutionArray."); @@ -193,8 +175,7 @@ std::map SolutionArray::getAuxiliary(size_t index) setIndex(index); std::map out; for (auto& item : m_extra) { - auto& extra = *item.second; - out[item.first] = extra[m_index]; + out[item.first] = item.second[m_index]; } return out; } @@ -271,7 +252,7 @@ void SolutionArray::writeEntry(const std::string& fname, const std::string& id, } for (auto& extra : m_extra) { - file.writeVector(id, extra.first, *(extra.second)); + file.writeVector(id, extra.first, extra.second); } file.flush(); } @@ -305,7 +286,7 @@ void SolutionArray::writeEntry(AnyMap& root, const std::string& id) data.update(m_meta); for (auto& extra : m_extra) { - data[extra.first] = *(extra.second); + data[extra.first] = extra.second; } auto phase = m_sol->thermo(); @@ -480,10 +461,9 @@ void SolutionArray::readEntry(const std::string& fname, const std::string& id) auto contents = file.contents(id); m_size = contents.first; + m_data.resize(m_size * m_stride, 0.); std::set names = contents.second; - initialize({}); - if (m_size == 0) { return; } @@ -551,9 +531,7 @@ void SolutionArray::readEntry(const std::string& fname, const std::string& id) for (const auto& name : names) { if (!states.count(name)) { vector_fp data = file.readVector(id, name, m_size); - m_extra.emplace(name, std::make_shared(m_size)); - auto& extra = m_extra[name]; - std::copy(data.begin(), data.end(), extra->begin()); + m_extra.emplace(name, data); } } } @@ -587,7 +565,7 @@ void SolutionArray::readEntry(const AnyMap& root, const std::string& id) // overwrite size - Sim1D erroneously assigns '1' (Cantera 2.6) m_size = 0; } - initialize({}); + m_data.resize(m_size * m_stride, 0.); // restore data std::set exclude = {"points", "X", "Y"}; @@ -627,7 +605,7 @@ void SolutionArray::readEntry(const AnyMap& root, const std::string& id) throw NotImplementedError("SolutionArray::restore", "Import of '{}' data is not supported.", mode); } - m_sol->thermo()->saveState(nState, m_data); + m_sol->thermo()->saveState(nState, m_data.data()); auto props = stateProperties(mode, true); exclude.insert(props.begin(), props.end()); } else { From 7630316c7ed5c99d1edb0d369a406bfedcb922c9 Mon Sep 17 00:00:00 2001 From: Ingmar Schoegl Date: Thu, 22 Dec 2022 21:28:52 +0100 Subject: [PATCH 76/93] [base] Remove HighFive headers from Storage.h --- include/cantera/base/Storage.h | 435 +------------------------------- src/base/SolutionArray.cpp | 3 - src/base/Storage.cpp | 444 +++++++++++++++++++++++++++++++++ 3 files changed, 450 insertions(+), 432 deletions(-) create mode 100644 src/base/Storage.cpp diff --git a/include/cantera/base/Storage.h b/include/cantera/base/Storage.h index a1807010bfc..8ea9501960b 100644 --- a/include/cantera/base/Storage.h +++ b/include/cantera/base/Storage.h @@ -11,6 +11,7 @@ #include #if CT_USE_HDF5 + #ifdef _WIN32 // see https://github.com/microsoft/vcpkg/issues/24293 #define H5_BUILT_AS_DYNAMIC_LIB @@ -18,36 +19,10 @@ #define H5_BUILT_AS_STATIC_LIB #endif -#if CT_USE_SYSTEM_HIGHFIVE - #include - #include - #include - #include - #include - #include -#else - #include "cantera/ext/HighFive/H5Attribute.hpp" - #include "cantera/ext/HighFive/H5DataSet.hpp" - #include "cantera/ext/HighFive/H5DataSpace.hpp" - #include "cantera/ext/HighFive/H5DataType.hpp" - #include "cantera/ext/HighFive/H5File.hpp" - #include "cantera/ext/HighFive/H5Group.hpp" -#endif - -namespace h5 = HighFive; - -enum class H5Boolean { - FALSE = 0, - TRUE = 1, -}; - -h5::EnumType create_enum_boolean() { - return {{"FALSE", H5Boolean::FALSE}, - {"TRUE", H5Boolean::TRUE}}; +namespace HighFive { + class File; } -HIGHFIVE_REGISTER_TYPE(H5Boolean, create_enum_boolean) - #endif namespace Cantera @@ -65,6 +40,8 @@ class Storage public: Storage(std::string fname, bool write); + ~Storage(); + //! Set compression level (0..9) /*! * Compression is only applied to species data; note that compression may increase @@ -73,9 +50,6 @@ class Storage */ void setCompressionLevel(int level); - //! Flush file contents - void flush(); - //! Check whether path go location exists //! If the file has write access, create location if necessary //! @param id storage location within file @@ -134,409 +108,12 @@ class Storage bool checkGroupRead(const std::string& id) const; bool checkGroupWrite(const std::string& id); - std::unique_ptr m_file; + std::unique_ptr m_file; bool m_write; int m_compressionLevel=0; #endif }; -#if CT_USE_HDF5 - -Storage::Storage(std::string fname, bool write) : m_write(write) -{ - if (m_write) { - m_file.reset(new h5::File(fname, h5::File::OpenOrCreate)); - } else { - m_file.reset(new h5::File(fname, h5::File::ReadOnly)); - } -} - -void Storage::setCompressionLevel(int level) -{ - if (level < 0 || level > 9) { - throw CanteraError("Storage::setCompressionLevel", - "Invalid compression level '{}' (needs to be 0..9).", level); - } - m_compressionLevel = level; -} - -void Storage::flush() -{ - m_file->flush(); -} - -bool Storage::checkGroupRead(const std::string& id) const -{ - std::vector tokens; - tokenizePath(id, tokens); - std::string grp = tokens[0]; - if (!m_file->exist(grp) || m_file->getObjectType(grp) != h5::ObjectType::Group) { - throw CanteraError("Storage::checkGroup", - "No group with id '{}' found", grp); - } - - std::string path = grp; - h5::Group sub = m_file->getGroup(grp); - tokens.erase(tokens.begin()); - for (auto& grp : tokens) { - path += "/" + grp; - if (!sub.exist(grp) || sub.getObjectType(grp) != h5::ObjectType::Group) { - throw CanteraError("Storage::checkGroup", - "No group with id '{}' found", path); - } - sub = sub.getGroup(grp); - } - return true; -} - -bool Storage::checkGroupWrite(const std::string& id) -{ - if (!m_file->exist(id)) { - m_file->createGroup(id); - return true; - } - if (m_file->getObjectType(id) != h5::ObjectType::Group) { - throw CanteraError("Storage::checkGroup", - "Invalid object with id '{}' exists", id); - } - return true; -} - -bool Storage::checkGroup(const std::string& id) { - if (m_write) { - return checkGroupWrite(id); - } - return checkGroupRead(id); -} - -std::pair> Storage::contents(const std::string& id) const -{ - h5::Group sub = m_file->getGroup(id); - std::set names; - size_t nDims = npos; - size_t nElements = 0; - for (auto& name : sub.listObjectNames()) { - if (sub.getObjectType(name) == h5::ObjectType::Dataset) { - h5::DataSpace space = sub.getDataSet(name).getSpace(); - names.insert(name); - if (space.getNumberDimensions() < nDims) { - nDims = space.getNumberDimensions(); - nElements = space.getElementCount(); - } - } - } - if (nDims != 1 && nDims != npos) { - throw NotImplementedError("Storage::content", - "Unable to restore data with {} dimensions.", nDims); - } - return std::make_pair(nElements, names); -} - -AnyMap readH5Attributes(const h5::Group& sub, bool recursive) -{ - // restore meta data from attributes - AnyMap out; - for (auto& name : sub.listAttributeNames()) { - h5::Attribute attr = sub.getAttribute(name); - h5::DataType dtype = attr.getDataType(); - h5::DataTypeClass dclass = dtype.getClass(); - if (dclass == h5::DataTypeClass::Float) { - if (attr.getSpace().getElementCount() > 1) { - std::vector values; - attr.read(values); - out[name] = values; - } else { - double value; - attr.read(value); - out[name] = value; - } - } else if (dclass == h5::DataTypeClass::Integer) { - if (attr.getSpace().getElementCount() > 1) { - std::vector values; - attr.read(values); - out[name] = values; - } else { - int value; - attr.read(value); - out[name] = value; - } - } else if (dclass == h5::DataTypeClass::String) { - if (attr.getSpace().getElementCount() > 1) { - std::vector values; - attr.read(values); - out[name] = values; - } else { - std::string value; - attr.read(value); - out[name] = value; - } - } else if (dclass == h5::DataTypeClass::Enum) { - // only booleans are supported - if (attr.getSpace().getElementCount() > 1) { - std::vector values; - attr.read(values); - std::vector bValues; - for (auto v : values) { - bValues.push_back(bool(v)); - } - out[name] = bValues; - } else { - H5Boolean value; - attr.read(value); - out[name] = bool(value); - } - } else { - throw NotImplementedError("readH5Attributes", - "Unable to read attribute '{}' with type '{}'", name, dtype.string()); - } - } - - if (recursive) { - for (auto& name : sub.listObjectNames()) { - if (sub.getObjectType(name) == h5::ObjectType::Group) { - out[name] = readH5Attributes(sub.getGroup(name), recursive); - } - } - } - - return out; -} - -AnyMap Storage::readAttributes(const std::string& id, bool recursive) const -{ - h5::Group sub = m_file->getGroup(id); - return readH5Attributes(sub, recursive); -} - -void writeH5Attributes(h5::Group sub, const AnyMap& meta) -{ - for (auto& item : meta) { - if (item.second.is()) { - double value = item.second.asDouble(); - h5::Attribute attr = sub.createAttribute( - item.first, h5::DataSpace::From(value)); - attr.write(value); - } else if (item.second.is() || item.second.is()) { - int value = item.second.asInt(); - h5::Attribute attr = sub.createAttribute( - item.first, h5::DataSpace::From(value)); - attr.write(value); - } else if (item.second.is()) { - std::string value = item.second.asString(); - h5::Attribute attr = sub.createAttribute( - item.first, h5::DataSpace::From(value)); - attr.write(value); - } else if (item.second.is()) { - bool bValue = item.second.asBool(); - H5Boolean value = bValue ? H5Boolean::TRUE : H5Boolean::FALSE; - h5::Attribute attr = sub.createAttribute( - item.first, h5::DataSpace::From(value)); - attr.write(value); - } else if (item.second.is>()) { - auto values = item.second.as>(); - h5::Attribute attr = sub.createAttribute( - item.first, h5::DataSpace::From(values)); - attr.write(values); - } else if (item.second.is>()) { - auto values = item.second.as>(); - h5::Attribute attr = sub.createAttribute( - item.first, h5::DataSpace::From(values)); - attr.write(values); - } else if (item.second.is>()) { - auto values = item.second.as>(); - h5::Attribute attr = sub.createAttribute( - item.first, h5::DataSpace::From(values)); - attr.write(values); - } else if (item.second.is>()) { - auto bValue = item.second.as>(); - std::vector values; - for (auto b : bValue) { - values.push_back(b ? H5Boolean::TRUE : H5Boolean::FALSE); - } - h5::Attribute attr = sub.createAttribute( - item.first, h5::DataSpace::From(values)); - attr.write(values); - } else if (item.second.is()) { - // step into recursion - auto value = item.second.as(); - auto grp = sub.createGroup(item.first); - writeH5Attributes(grp, value); - } else { - throw NotImplementedError("Storage::writeAttributes", - "Unable to write attribute '{}' with type '{}'", - item.first, item.second.type_str()); - } - } -} - -void Storage::writeAttributes(const std::string& id, const AnyMap& meta) -{ - h5::Group sub = m_file->getGroup(id); - writeH5Attributes(sub, meta); -} - -vector_fp Storage::readVector(const std::string& id, - const std::string& name, size_t size) const -{ - h5::Group sub = m_file->getGroup(id); - if (!sub.exist(name)) { - throw CanteraError("Storage::readVector", - "DataSet '{}' not found in path '{}'.", name, id); - } - h5::DataSet dataset = sub.getDataSet(name); - if (dataset.getDataType().getClass() != h5::DataTypeClass::Float) { - throw CanteraError("Storage::readVector", - "Type of DataSet '{}' is inconsistent; expected HDF float.", name); - } - if (dataset.getElementCount() != size) { - throw CanteraError("Storage::readVector", - "Size of DataSet '{}' is inconsistent; expected {} elements but " - "received {} elements.", name, size, dataset.getElementCount()); - } - vector_fp out; - dataset.read(out); - return out; -} - -void Storage::writeVector(const std::string& id, - const std::string& name, const vector_fp& data) -{ - h5::Group sub = m_file->getGroup(id); - std::vector dims{data.size()}; - h5::DataSet dataset = sub.createDataSet(name, h5::DataSpace(dims)); - dataset.write(data); -} - -std::vector Storage::readMatrix(const std::string& id, - const std::string& name, - size_t rows, size_t cols) const -{ - h5::Group sub = m_file->getGroup(id); - if (!sub.exist(name)) { - throw CanteraError("Storage::readVector", - "DataSet '{}' not found in path '{}'.", name, id); - } - h5::DataSet dataset = sub.getDataSet(name); - if (dataset.getDataType().getClass() != h5::DataTypeClass::Float) { - throw CanteraError("Storage::readMatrix", - "Type of DataSet '{}' is inconsistent; expected HDF float.", name); - } - h5::DataSpace space = dataset.getSpace(); - if (space.getNumberDimensions() != 2) { - throw CanteraError("Storage::readMatrix", - "Shape of DataSet '{}' is inconsistent; expected two dimensions.", name); - } - const auto& shape = space.getDimensions(); - if (shape[0] != rows) { - throw CanteraError("Storage::readMatrix", - "Shape of DataSet '{}' is inconsistent; expected {} rows.", name, rows); - } - if (shape[1] != cols) { - throw CanteraError("Storage::readMatrix", - "Shape of DataSet '{}' is inconsistent; expected {} columns.", name, cols); - } - std::vector out; - dataset.read(out); - return out; -} - -void Storage::writeMatrix(const std::string& id, - const std::string& name, const std::vector& data) -{ - h5::Group sub = m_file->getGroup(id); - std::vector dims{data.size()}; - dims.push_back(data.size() ? data[0].size() : 0); - if (m_compressionLevel) { - // Set chunk size to single chunk and apply compression level; for caveats, see - // https://stackoverflow.com/questions/32994766/compressed-files-bigger-in-h5py - h5::DataSpace space(dims, dims); //{h5::DataSpace::UNLIMITED, dims[1]}); - h5::DataSetCreateProps props; - props.add(h5::Chunking(std::vector{dims[0], dims[1]})); - props.add(h5::Deflate(m_compressionLevel)); - h5::DataSet dataset = sub.createDataSet(name, space, props); - dataset.write(data); - } else { - h5::DataSpace space(dims); - h5::DataSet dataset = sub.createDataSet(name, space); - dataset.write(data); - } -} - -#else - -Storage::Storage(std::string fname, bool write) -{ - throw CanteraError("Storage::Storage", - "Saving to HDF requires HighFive installation."); -} - -void Storage::setCompressionLevel(int level) -{ - throw CanteraError("Storage::setCompressionLevel", - "Saving to HDF requires HighFive installation."); -} - -void Storage::flush() -{ - throw CanteraError("Storage::flush", - "Saving to HDF requires HighFive installation."); -} - -bool Storage::checkGroup(const std::string& id) -{ - throw CanteraError("Storage::checkGroup", - "Saving to HDF requires HighFive installation."); -} - -std::pair> Storage::contents(const std::string& id) const -{ - throw CanteraError("Storage::contents", - "Saving to HDF requires HighFive installation."); -} - -AnyMap Storage::readAttributes(const std::string& id, bool recursive) const -{ - throw CanteraError("Storage::readAttributes", - "Saving to HDF requires HighFive installation."); -} - -void Storage::writeAttributes(const std::string& id, const AnyMap& meta) -{ - throw CanteraError("Storage::writeAttributes", - "Saving to HDF requires HighFive installation."); -} - -vector_fp Storage::readVector(const std::string& id, - const std::string& name, size_t size) const -{ - throw CanteraError("Storage::readVector", - "Saving to HDF requires HighFive installation."); -} - -void Storage::writeVector(const std::string& id, - const std::string& name, const vector_fp& data) -{ - throw CanteraError("Storage::writeVector", - "Saving to HDF requires HighFive installation."); -} - -std::vector Storage::readMatrix(const std::string& id, - const std::string& name, - size_t rows, size_t cols) const -{ - throw CanteraError("Storage::readMatrix", - "Saving to HDF requires HighFive installation."); -} - -void Storage::writeMatrix(const std::string& id, - const std::string& name, const std::vector& data) -{ - throw CanteraError("Storage::writeMatrix", - "Saving to HDF requires HighFive installation."); -} - -#endif - } #endif diff --git a/src/base/SolutionArray.cpp b/src/base/SolutionArray.cpp index 00d1b69f2b3..bae91c71a09 100644 --- a/src/base/SolutionArray.cpp +++ b/src/base/SolutionArray.cpp @@ -210,7 +210,6 @@ void SolutionArray::writeHeader(const std::string& fname, const std::string& id, Storage file(fname, true); file.checkGroup(id); file.writeAttributes(id, preamble(desc)); - file.flush(); } void SolutionArray::writeHeader(AnyMap& root, const std::string& id, @@ -229,7 +228,6 @@ void SolutionArray::writeEntry(const std::string& fname, const std::string& id, file.checkGroup(id); file.writeAttributes(id, m_meta); if (!m_size) { - file.flush(); return; } @@ -254,7 +252,6 @@ void SolutionArray::writeEntry(const std::string& fname, const std::string& id, for (auto& extra : m_extra) { file.writeVector(id, extra.first, extra.second); } - file.flush(); } AnyMap& openField(AnyMap& root, const std::string& id) diff --git a/src/base/Storage.cpp b/src/base/Storage.cpp new file mode 100644 index 00000000000..c00c50e9eff --- /dev/null +++ b/src/base/Storage.cpp @@ -0,0 +1,444 @@ +/** + * @file Storage.cpp + * Definition file for class Storage. + */ + +// This file is part of Cantera. See License.txt in the top-level directory or +// at https://cantera.org/license.txt for license and copyright information. + +#include "cantera/base/AnyMap.h" +#include "cantera/base/Storage.h" + +#if CT_USE_HDF5 + +#if CT_USE_SYSTEM_HIGHFIVE + #include + #include + #include + #include + #include + #include +#else + #include "cantera/ext/HighFive/H5Attribute.hpp" + #include "cantera/ext/HighFive/H5DataSet.hpp" + #include "cantera/ext/HighFive/H5DataSpace.hpp" + #include "cantera/ext/HighFive/H5DataType.hpp" + #include "cantera/ext/HighFive/H5File.hpp" + #include "cantera/ext/HighFive/H5Group.hpp" +#endif + +namespace h5 = HighFive; + +enum class H5Boolean { + FALSE = 0, + TRUE = 1, +}; + +h5::EnumType create_enum_boolean() { + return {{"FALSE", H5Boolean::FALSE}, + {"TRUE", H5Boolean::TRUE}}; +} + +HIGHFIVE_REGISTER_TYPE(H5Boolean, create_enum_boolean) + +#endif + +namespace Cantera +{ + +#if CT_USE_HDF5 + +Storage::Storage(std::string fname, bool write) : m_write(write) +{ + if (m_write) { + m_file.reset(new h5::File(fname, h5::File::OpenOrCreate)); + } else { + m_file.reset(new h5::File(fname, h5::File::ReadOnly)); + } +} + +Storage::~Storage() +{ + m_file->flush(); +} + +void Storage::setCompressionLevel(int level) +{ + if (level < 0 || level > 9) { + throw CanteraError("Storage::setCompressionLevel", + "Invalid compression level '{}' (needs to be 0..9).", level); + } + m_compressionLevel = level; +} + +bool Storage::checkGroupRead(const std::string& id) const +{ + std::vector tokens; + tokenizePath(id, tokens); + std::string grp = tokens[0]; + if (!m_file->exist(grp) || m_file->getObjectType(grp) != h5::ObjectType::Group) { + throw CanteraError("Storage::checkGroup", + "No group with id '{}' found", grp); + } + + std::string path = grp; + h5::Group sub = m_file->getGroup(grp); + tokens.erase(tokens.begin()); + for (auto& grp : tokens) { + path += "/" + grp; + if (!sub.exist(grp) || sub.getObjectType(grp) != h5::ObjectType::Group) { + throw CanteraError("Storage::checkGroup", + "No group with id '{}' found", path); + } + sub = sub.getGroup(grp); + } + return true; +} + +bool Storage::checkGroupWrite(const std::string& id) +{ + if (!m_file->exist(id)) { + m_file->createGroup(id); + return true; + } + if (m_file->getObjectType(id) != h5::ObjectType::Group) { + throw CanteraError("Storage::checkGroup", + "Invalid object with id '{}' exists", id); + } + return true; +} + +bool Storage::checkGroup(const std::string& id) { + if (m_write) { + return checkGroupWrite(id); + } + return checkGroupRead(id); +} + +std::pair> Storage::contents(const std::string& id) const +{ + h5::Group sub = m_file->getGroup(id); + std::set names; + size_t nDims = npos; + size_t nElements = 0; + for (auto& name : sub.listObjectNames()) { + if (sub.getObjectType(name) == h5::ObjectType::Dataset) { + h5::DataSpace space = sub.getDataSet(name).getSpace(); + names.insert(name); + if (space.getNumberDimensions() < nDims) { + nDims = space.getNumberDimensions(); + nElements = space.getElementCount(); + } + } + } + if (nDims != 1 && nDims != npos) { + throw NotImplementedError("Storage::content", + "Unable to restore data with {} dimensions.", nDims); + } + return std::make_pair(nElements, names); +} + +AnyMap readH5Attributes(const h5::Group& sub, bool recursive) +{ + // restore meta data from attributes + AnyMap out; + for (auto& name : sub.listAttributeNames()) { + h5::Attribute attr = sub.getAttribute(name); + h5::DataType dtype = attr.getDataType(); + h5::DataTypeClass dclass = dtype.getClass(); + if (dclass == h5::DataTypeClass::Float) { + if (attr.getSpace().getElementCount() > 1) { + std::vector values; + attr.read(values); + out[name] = values; + } else { + double value; + attr.read(value); + out[name] = value; + } + } else if (dclass == h5::DataTypeClass::Integer) { + if (attr.getSpace().getElementCount() > 1) { + std::vector values; + attr.read(values); + out[name] = values; + } else { + int value; + attr.read(value); + out[name] = value; + } + } else if (dclass == h5::DataTypeClass::String) { + if (attr.getSpace().getElementCount() > 1) { + std::vector values; + attr.read(values); + out[name] = values; + } else { + std::string value; + attr.read(value); + out[name] = value; + } + } else if (dclass == h5::DataTypeClass::Enum) { + // only booleans are supported + if (attr.getSpace().getElementCount() > 1) { + std::vector values; + attr.read(values); + std::vector bValues; + for (auto v : values) { + bValues.push_back(bool(v)); + } + out[name] = bValues; + } else { + H5Boolean value; + attr.read(value); + out[name] = bool(value); + } + } else { + throw NotImplementedError("readH5Attributes", + "Unable to read attribute '{}' with type '{}'", name, dtype.string()); + } + } + + if (recursive) { + for (auto& name : sub.listObjectNames()) { + if (sub.getObjectType(name) == h5::ObjectType::Group) { + out[name] = readH5Attributes(sub.getGroup(name), recursive); + } + } + } + + return out; +} + +AnyMap Storage::readAttributes(const std::string& id, bool recursive) const +{ + h5::Group sub = m_file->getGroup(id); + return readH5Attributes(sub, recursive); +} + +void writeH5Attributes(h5::Group sub, const AnyMap& meta) +{ + for (auto& item : meta) { + if (item.second.is()) { + double value = item.second.asDouble(); + h5::Attribute attr = sub.createAttribute( + item.first, h5::DataSpace::From(value)); + attr.write(value); + } else if (item.second.is() || item.second.is()) { + int value = item.second.asInt(); + h5::Attribute attr = sub.createAttribute( + item.first, h5::DataSpace::From(value)); + attr.write(value); + } else if (item.second.is()) { + std::string value = item.second.asString(); + h5::Attribute attr = sub.createAttribute( + item.first, h5::DataSpace::From(value)); + attr.write(value); + } else if (item.second.is()) { + bool bValue = item.second.asBool(); + H5Boolean value = bValue ? H5Boolean::TRUE : H5Boolean::FALSE; + h5::Attribute attr = sub.createAttribute( + item.first, h5::DataSpace::From(value)); + attr.write(value); + } else if (item.second.is>()) { + auto values = item.second.as>(); + h5::Attribute attr = sub.createAttribute( + item.first, h5::DataSpace::From(values)); + attr.write(values); + } else if (item.second.is>()) { + auto values = item.second.as>(); + h5::Attribute attr = sub.createAttribute( + item.first, h5::DataSpace::From(values)); + attr.write(values); + } else if (item.second.is>()) { + auto values = item.second.as>(); + h5::Attribute attr = sub.createAttribute( + item.first, h5::DataSpace::From(values)); + attr.write(values); + } else if (item.second.is>()) { + auto bValue = item.second.as>(); + std::vector values; + for (auto b : bValue) { + values.push_back(b ? H5Boolean::TRUE : H5Boolean::FALSE); + } + h5::Attribute attr = sub.createAttribute( + item.first, h5::DataSpace::From(values)); + attr.write(values); + } else if (item.second.is()) { + // step into recursion + auto value = item.second.as(); + auto grp = sub.createGroup(item.first); + writeH5Attributes(grp, value); + } else { + throw NotImplementedError("Storage::writeAttributes", + "Unable to write attribute '{}' with type '{}'", + item.first, item.second.type_str()); + } + } +} + +void Storage::writeAttributes(const std::string& id, const AnyMap& meta) +{ + h5::Group sub = m_file->getGroup(id); + writeH5Attributes(sub, meta); +} + +vector_fp Storage::readVector(const std::string& id, + const std::string& name, size_t size) const +{ + h5::Group sub = m_file->getGroup(id); + if (!sub.exist(name)) { + throw CanteraError("Storage::readVector", + "DataSet '{}' not found in path '{}'.", name, id); + } + h5::DataSet dataset = sub.getDataSet(name); + if (dataset.getDataType().getClass() != h5::DataTypeClass::Float) { + throw CanteraError("Storage::readVector", + "Type of DataSet '{}' is inconsistent; expected HDF float.", name); + } + if (dataset.getElementCount() != size) { + throw CanteraError("Storage::readVector", + "Size of DataSet '{}' is inconsistent; expected {} elements but " + "received {} elements.", name, size, dataset.getElementCount()); + } + vector_fp out; + dataset.read(out); + return out; +} + +void Storage::writeVector(const std::string& id, + const std::string& name, const vector_fp& data) +{ + h5::Group sub = m_file->getGroup(id); + std::vector dims{data.size()}; + h5::DataSet dataset = sub.createDataSet(name, h5::DataSpace(dims)); + dataset.write(data); +} + +std::vector Storage::readMatrix(const std::string& id, + const std::string& name, + size_t rows, size_t cols) const +{ + h5::Group sub = m_file->getGroup(id); + if (!sub.exist(name)) { + throw CanteraError("Storage::readVector", + "DataSet '{}' not found in path '{}'.", name, id); + } + h5::DataSet dataset = sub.getDataSet(name); + if (dataset.getDataType().getClass() != h5::DataTypeClass::Float) { + throw CanteraError("Storage::readMatrix", + "Type of DataSet '{}' is inconsistent; expected HDF float.", name); + } + h5::DataSpace space = dataset.getSpace(); + if (space.getNumberDimensions() != 2) { + throw CanteraError("Storage::readMatrix", + "Shape of DataSet '{}' is inconsistent; expected two dimensions.", name); + } + const auto& shape = space.getDimensions(); + if (shape[0] != rows) { + throw CanteraError("Storage::readMatrix", + "Shape of DataSet '{}' is inconsistent; expected {} rows.", name, rows); + } + if (shape[1] != cols) { + throw CanteraError("Storage::readMatrix", + "Shape of DataSet '{}' is inconsistent; expected {} columns.", name, cols); + } + std::vector out; + dataset.read(out); + return out; +} + +void Storage::writeMatrix(const std::string& id, + const std::string& name, const std::vector& data) +{ + h5::Group sub = m_file->getGroup(id); + std::vector dims{data.size()}; + dims.push_back(data.size() ? data[0].size() : 0); + if (m_compressionLevel) { + // Set chunk size to single chunk and apply compression level; for caveats, see + // https://stackoverflow.com/questions/32994766/compressed-files-bigger-in-h5py + h5::DataSpace space(dims, dims); //{h5::DataSpace::UNLIMITED, dims[1]}); + h5::DataSetCreateProps props; + props.add(h5::Chunking(std::vector{dims[0], dims[1]})); + props.add(h5::Deflate(m_compressionLevel)); + h5::DataSet dataset = sub.createDataSet(name, space, props); + dataset.write(data); + } else { + h5::DataSpace space(dims); + h5::DataSet dataset = sub.createDataSet(name, space); + dataset.write(data); + } +} + +#else + +Storage::Storage(std::string fname, bool write) +{ + throw CanteraError("Storage::Storage", + "Saving to HDF requires HighFive installation."); +} + +Storage::~Storage() +{ +} + +void Storage::setCompressionLevel(int level) +{ + throw CanteraError("Storage::setCompressionLevel", + "Saving to HDF requires HighFive installation."); +} + +bool Storage::checkGroup(const std::string& id) +{ + throw CanteraError("Storage::checkGroup", + "Saving to HDF requires HighFive installation."); +} + +std::pair> Storage::contents(const std::string& id) const +{ + throw CanteraError("Storage::contents", + "Saving to HDF requires HighFive installation."); +} + +AnyMap Storage::readAttributes(const std::string& id, bool recursive) const +{ + throw CanteraError("Storage::readAttributes", + "Saving to HDF requires HighFive installation."); +} + +void Storage::writeAttributes(const std::string& id, const AnyMap& meta) +{ + throw CanteraError("Storage::writeAttributes", + "Saving to HDF requires HighFive installation."); +} + +vector_fp Storage::readVector(const std::string& id, + const std::string& name, size_t size) const +{ + throw CanteraError("Storage::readVector", + "Saving to HDF requires HighFive installation."); +} + +void Storage::writeVector(const std::string& id, + const std::string& name, const vector_fp& data) +{ + throw CanteraError("Storage::writeVector", + "Saving to HDF requires HighFive installation."); +} + +std::vector Storage::readMatrix(const std::string& id, + const std::string& name, + size_t rows, size_t cols) const +{ + throw CanteraError("Storage::readMatrix", + "Saving to HDF requires HighFive installation."); +} + +void Storage::writeMatrix(const std::string& id, + const std::string& name, const std::vector& data) +{ + throw CanteraError("Storage::writeMatrix", + "Saving to HDF requires HighFive installation."); +} + +#endif + +} From ef8b62dec7fdfdc43a29b7851d89b519e7d150ea Mon Sep 17 00:00:00 2001 From: Ingmar Schoegl Date: Thu, 22 Dec 2022 22:26:40 +0100 Subject: [PATCH 77/93] [OneD] Return header data when loading via Sim1D::restore --- include/cantera/oneD/Sim1D.h | 3 +- interfaces/cython/cantera/_onedim.pxd | 2 +- interfaces/cython/cantera/_onedim.pyx | 11 ++++-- src/base/SolutionArray.cpp | 48 +++++++++++++++------------ src/oneD/Sim1D.cpp | 8 +++-- 5 files changed, 45 insertions(+), 27 deletions(-) diff --git a/include/cantera/oneD/Sim1D.h b/include/cantera/oneD/Sim1D.h index 005e3268d44..8fd006ee118 100644 --- a/include/cantera/oneD/Sim1D.h +++ b/include/cantera/oneD/Sim1D.h @@ -144,8 +144,9 @@ class Sim1D : public OneDim * @param fname Name of container file * @param id Identifier of solution within the container file * @param loglevel Level of diagnostic output + * @return AnyMap containing header information */ - void restore(const std::string& fname, const std::string& id, int loglevel=2); + AnyMap restore(const std::string& fname, const std::string& id, int loglevel=2); /** * Initialize the solution with a previously-saved solution (legacy implementation). diff --git a/interfaces/cython/cantera/_onedim.pxd b/interfaces/cython/cantera/_onedim.pxd index c59b5941ea9..52612d4904a 100644 --- a/interfaces/cython/cantera/_onedim.pxd +++ b/interfaces/cython/cantera/_onedim.pxd @@ -124,7 +124,7 @@ cdef extern from "cantera/oneD/Sim1D.h": void setRefineCriteria(size_t, double, double, double, double) except +translate_exception vector[double] getRefineCriteria(int) except +translate_exception void save(string, string, string, int, int) except +translate_exception - void restore(string, string, int) except +translate_exception + CxxAnyMap restore(string, string, int) except +translate_exception void write_yaml(string, string, string, int) except +translate_exception void read_yaml(string, string, int) except +translate_exception void writeStats(int) except +translate_exception diff --git a/interfaces/cython/cantera/_onedim.pyx b/interfaces/cython/cantera/_onedim.pyx index 3dfb910a5de..a2a1a997e72 100644 --- a/interfaces/cython/cantera/_onedim.pyx +++ b/interfaces/cython/cantera/_onedim.pyx @@ -6,7 +6,7 @@ import warnings from collections import OrderedDict import numpy as np -from ._utils cimport stringify, pystr +from ._utils cimport stringify, pystr, anymap_to_dict from ._utils import CanteraError from cython.operator import dereference as deref @@ -1505,11 +1505,18 @@ cdef class Sim1D: :param loglevel: Amount of logging information to display while restoring, from 0 (disabled) to 2 (most verbose). + :return: + dictionary containing meta data >>> s.restore(filename='save.yaml', name='energy_off') + + .. versionchanged:: 3.0 + Implemented return value for meta data """ - self.sim.restore(stringify(str(filename)), stringify(name), loglevel) + cdef CxxAnyMap header + header = self.sim.restore(stringify(str(filename)), stringify(name), loglevel) self._initialized = True + return anymap_to_dict(header) def read_yaml(self, filename, name='solution', description='none', quiet=True): """ diff --git a/src/base/SolutionArray.cpp b/src/base/SolutionArray.cpp index bae91c71a09..4862fc40643 100644 --- a/src/base/SolutionArray.cpp +++ b/src/base/SolutionArray.cpp @@ -367,10 +367,35 @@ AnyMap SolutionArray::readHeader(const std::string& fname, const std::string& id return file.readAttributes(id, false); } +const AnyMap& locateField(const AnyMap& root, const std::string& id) +{ + // locate field based on 'id' + std::vector tokens; + tokenizePath(id, tokens); + const AnyMap* ptr = &root; // use raw pointer to avoid copying + std::string path = ""; + for (auto& field : tokens) { + path += "/" + field; + const AnyMap& sub = *ptr; + if (!sub.hasKey(field) || !sub[field].is()) { + throw CanteraError("SolutionArray::restore", + "No field or solution with id '{}'", path); + } + ptr = &sub[field].as(); // AnyMap lacks 'operator=' for const AnyMap + } + return *ptr; +} + AnyMap SolutionArray::readHeader(const AnyMap& root, const std::string& id) { - // todo: implement - throw NotImplementedError("SolutionArray::readHeader", "Not implemented."); + auto sub = locateField(root, id); + AnyMap header; + for (const auto& item : sub) { + if (!sub[item.first].is()) { + header[item.first] = item.second; + } + } + return header; } AnyMap SolutionArray::restore(const std::string& fname, const std::string& id) @@ -533,25 +558,6 @@ void SolutionArray::readEntry(const std::string& fname, const std::string& id) } } -const AnyMap& locateField(const AnyMap& root, const std::string& id) -{ - // locate field based on 'id' - std::vector tokens; - tokenizePath(id, tokens); - const AnyMap* ptr = &root; // use raw pointer to avoid copying - std::string path = ""; - for (auto& field : tokens) { - path += "/" + field; - const AnyMap& sub = *ptr; - if (!sub.hasKey(field) || !sub[field].is()) { - throw CanteraError("SolutionArray::restore", - "No field or solution with id '{}'", path); - } - ptr = &sub[field].as(); // AnyMap lacks 'operator=' for const AnyMap - } - return *ptr; -} - void SolutionArray::readEntry(const AnyMap& root, const std::string& id) { auto sub = locateField(root, id); diff --git a/src/oneD/Sim1D.cpp b/src/oneD/Sim1D.cpp index 633d00e81a1..c4d08a53340 100644 --- a/src/oneD/Sim1D.cpp +++ b/src/oneD/Sim1D.cpp @@ -274,7 +274,7 @@ AnyMap legacyH5(shared_ptr arr, const AnyMap& header={}) return out; } -void Sim1D::restore(const std::string& fname, const std::string& id, +AnyMap Sim1D::restore(const std::string& fname, const std::string& id, int loglevel) { size_t dot = fname.find_last_of("."); @@ -283,9 +283,10 @@ void Sim1D::restore(const std::string& fname, const std::string& id, throw CanteraError("Sim1D::restore", "Restoring from XML is no longer supported."); } + AnyMap header; if (extension == "h5" || extension == "hdf" || extension == "hdf5") { std::map> arrs; - auto header = SolutionArray::readHeader(fname, id); + header = SolutionArray::readHeader(fname, id); for (auto dom : m_dom) { auto arr = SolutionArray::create(dom->solution()); @@ -305,6 +306,8 @@ void Sim1D::restore(const std::string& fname, const std::string& id, } else if (extension == "yaml" || extension == "yml") { AnyMap root = AnyMap::fromYamlFile(fname); std::map> arrs; + header = SolutionArray::readHeader(root, id); + for (auto dom : m_dom) { auto arr = SolutionArray::create(dom->solution()); arr->readEntry(root, id + "/" + dom->id()); @@ -322,6 +325,7 @@ void Sim1D::restore(const std::string& fname, const std::string& id, "Unknown file extension '{}'; supported extensions include " "'h5'/'hdf'/'hdf5' and 'yml'/'yaml'.", extension); } + return header; } void Sim1D::read_yaml(const std::string& fname, const std::string& id, From 834c39bfb0f12b46ccac57edfca1313a7a847f65 Mon Sep 17 00:00:00 2001 From: Ingmar Schoegl Date: Thu, 22 Dec 2022 23:15:31 +0100 Subject: [PATCH 78/93] [OneD] Remove legacy YAML serialization --- include/cantera/oneD/Boundary1D.h | 11 -- include/cantera/oneD/Domain1D.h | 13 ++- include/cantera/oneD/Sim1D.h | 14 --- include/cantera/oneD/StFlow.h | 4 +- interfaces/cython/cantera/_onedim.pxd | 2 - interfaces/cython/cantera/_onedim.pyx | 21 ---- src/oneD/Boundary1D.cpp | 159 +------------------------- src/oneD/Domain1D.cpp | 25 +++- src/oneD/Sim1D.cpp | 78 ------------- src/oneD/StFlow.cpp | 57 +-------- 10 files changed, 38 insertions(+), 346 deletions(-) diff --git a/include/cantera/oneD/Boundary1D.h b/include/cantera/oneD/Boundary1D.h index 063fe8c09b0..9898777111f 100644 --- a/include/cantera/oneD/Boundary1D.h +++ b/include/cantera/oneD/Boundary1D.h @@ -135,9 +135,7 @@ class Inlet1D : public Boundary1D virtual void init(); virtual void eval(size_t jg, double* xg, double* rg, integer* diagg, double rdt); - virtual AnyMap serialize(const double* soln) const; virtual shared_ptr asArray(const double* soln) const; - virtual void restore(const AnyMap& state, double* soln, int loglevel); virtual void restore(SolutionArray& arr, double* soln, int loglevel); protected: @@ -171,7 +169,6 @@ class Empty1D : public Boundary1D virtual void eval(size_t jg, double* xg, double* rg, integer* diagg, double rdt); - virtual AnyMap serialize(const double* soln) const; virtual shared_ptr asArray(const double* soln) const; virtual void restore(SolutionArray& arr, double* soln, int loglevel) {} }; @@ -197,7 +194,6 @@ class Symm1D : public Boundary1D virtual void eval(size_t jg, double* xg, double* rg, integer* diagg, double rdt); - virtual AnyMap serialize(const double* soln) const; virtual shared_ptr asArray(const double* soln) const; virtual void restore(SolutionArray& arr, double* soln, int loglevel) {} }; @@ -223,7 +219,6 @@ class Outlet1D : public Boundary1D virtual void eval(size_t jg, double* xg, double* rg, integer* diagg, double rdt); - virtual AnyMap serialize(const double* soln) const; virtual shared_ptr asArray(const double* soln) const; virtual void restore(SolutionArray& arr, double* soln, int loglevel) {} }; @@ -256,9 +251,7 @@ class OutletRes1D : public Boundary1D virtual void init(); virtual void eval(size_t jg, double* xg, double* rg, integer* diagg, double rdt); - virtual AnyMap serialize(const double* soln) const; virtual shared_ptr asArray(const double* soln) const; - virtual void restore(const AnyMap& state, double* soln, int loglevel); virtual void restore(SolutionArray& arr, double* soln, int loglevel); protected: @@ -290,9 +283,7 @@ class Surf1D : public Boundary1D virtual void eval(size_t jg, double* xg, double* rg, integer* diagg, double rdt); - virtual AnyMap serialize(const double* soln) const; virtual shared_ptr asArray(const double* soln) const; - virtual void restore(const AnyMap& state, double* soln, int loglevel); virtual void restore(SolutionArray& arr, double* soln, int loglevel); virtual void showSolution_s(std::ostream& s, const double* x); @@ -328,9 +319,7 @@ class ReactingSurf1D : public Boundary1D virtual void eval(size_t jg, double* xg, double* rg, integer* diagg, double rdt); - virtual AnyMap serialize(const double* soln) const; virtual shared_ptr asArray(const double* soln) const; - virtual void restore(const AnyMap& state, double* soln, int loglevel); virtual void restore(SolutionArray& arr, double* soln, int loglevel); virtual void _getInitialSoln(double* x) { diff --git a/include/cantera/oneD/Domain1D.h b/include/cantera/oneD/Domain1D.h index 6205d6d6ee7..67cceafdaa0 100644 --- a/include/cantera/oneD/Domain1D.h +++ b/include/cantera/oneD/Domain1D.h @@ -313,8 +313,10 @@ class Domain1D //! Save the state of this domain as an AnyMap /*! * @param soln local solution vector for this domain + * + * @deprecated To be removed after Cantera 3.0; superseded by asArray. */ - virtual AnyMap serialize(const double* soln) const; + AnyMap serialize(const double* soln) const; //! Save the state of this domain as a SolutionArray /*! @@ -323,7 +325,7 @@ class Domain1D * @since New in Cantera 3.0. */ virtual shared_ptr asArray(const double* soln) const { - throw CanteraError("Domain1D::asArray", "Needs to be overloaded."); + throw NotImplementedError("Domain1D::asArray", "Needs to be overloaded."); } //! Restore the solution for this domain from an AnyMap @@ -332,8 +334,10 @@ class Domain1D * @param[out] soln Value of the solution vector, local to this domain * @param[in] loglevel 0 to suppress all output; 1 to show warnings; 2 for * verbose output + * + * @deprecated To be removed after Cantera 3.0; restore from SolutionArray instead. */ - virtual void restore(const AnyMap& state, double* soln, int loglevel); + void restore(const AnyMap& state, double* soln, int loglevel); //! Restore the solution for this domain from a SolutionArray /*! @@ -502,6 +506,9 @@ class Domain1D //! Retrieve meta data virtual AnyMap getMeta() const; + //! Retrieve meta data + virtual void setMeta(const AnyMap& meta, int loglevel); + doublereal m_rdt; size_t m_nv; size_t m_points; diff --git a/include/cantera/oneD/Sim1D.h b/include/cantera/oneD/Sim1D.h index 8fd006ee118..973becb5014 100644 --- a/include/cantera/oneD/Sim1D.h +++ b/include/cantera/oneD/Sim1D.h @@ -122,13 +122,6 @@ class Sim1D : public OneDim void save(const std::string& fname, const std::string& id, const std::string& desc, int loglevel=1, int compression=0); - /** - * Save the current solution to YAML (legacy implementation). @see save - * @since New in Cantera 3.0. - */ - void write_yaml(const std::string& fname, const std::string& id, - const std::string& desc, int loglevel=1); - /** * Save the residual of the current solution to a container file. * @param fname Name of output container file @@ -148,13 +141,6 @@ class Sim1D : public OneDim */ AnyMap restore(const std::string& fname, const std::string& id, int loglevel=2); - /** - * Initialize the solution with a previously-saved solution (legacy implementation). - * @see restore - * @since New in Cantera 3.0. - */ - void read_yaml(const std::string& fname, const std::string& id, int loglevel=2); - //! @} // @deprecated To be removed after Cantera 3.0 (unused) diff --git a/include/cantera/oneD/StFlow.h b/include/cantera/oneD/StFlow.h index 741c81d796c..802d424a314 100644 --- a/include/cantera/oneD/StFlow.h +++ b/include/cantera/oneD/StFlow.h @@ -157,9 +157,7 @@ class StFlow : public Domain1D //! Print the solution. virtual void showSolution(const doublereal* x); - virtual AnyMap serialize(const double* soln) const; virtual shared_ptr asArray(const double* soln) const; - virtual void restore(const AnyMap& state, double* soln, int loglevel); virtual void restore(SolutionArray& arr, double* soln, int loglevel); //! Set flow configuration for freely-propagating flames, using an internal @@ -284,8 +282,8 @@ class StFlow : public Domain1D } protected: - void setMeta(const AnyMap& state); virtual AnyMap getMeta() const; + virtual void setMeta(const AnyMap& state, int loglevel); doublereal wdot(size_t k, size_t j) const { return m_wdot(k,j); diff --git a/interfaces/cython/cantera/_onedim.pxd b/interfaces/cython/cantera/_onedim.pxd index 52612d4904a..9acc3da220c 100644 --- a/interfaces/cython/cantera/_onedim.pxd +++ b/interfaces/cython/cantera/_onedim.pxd @@ -125,8 +125,6 @@ cdef extern from "cantera/oneD/Sim1D.h": vector[double] getRefineCriteria(int) except +translate_exception void save(string, string, string, int, int) except +translate_exception CxxAnyMap restore(string, string, int) except +translate_exception - void write_yaml(string, string, string, int) except +translate_exception - void read_yaml(string, string, int) except +translate_exception void writeStats(int) except +translate_exception void clearStats() void resize() except +translate_exception diff --git a/interfaces/cython/cantera/_onedim.pyx b/interfaces/cython/cantera/_onedim.pyx index a2a1a997e72..5d20cc2d8a6 100644 --- a/interfaces/cython/cantera/_onedim.pyx +++ b/interfaces/cython/cantera/_onedim.pyx @@ -1484,17 +1484,6 @@ cdef class Sim1D: self.sim.save(stringify(str(filename)), stringify(name), stringify(description), loglevel, compression) - def write_yaml(self, filename, name='solution', description='none', - quiet=True): - """ - Save the solution in YAML format (legacy implementation) - - .. versionadded:: 3.0 - """ - loglevel = 1 - quiet - self.sim.write_yaml(stringify(str(filename)), stringify(name), - stringify(description), loglevel) - def restore(self, filename='soln.yaml', name='solution', loglevel=2): """Set the solution vector to a previously-saved solution. @@ -1518,16 +1507,6 @@ cdef class Sim1D: self._initialized = True return anymap_to_dict(header) - def read_yaml(self, filename, name='solution', description='none', quiet=True): - """ - Set the solution vector to a previously-saved solution (legacy implementation) - - .. versionadded:: 3.0 - """ - loglevel = 2 * (1 - quiet) - self.sim.read_yaml(stringify(str(filename)), stringify(name), loglevel) - self._initialized = True - def restore_time_stepping_solution(self): """ Set the current solution vector to the last successful time-stepping diff --git a/src/oneD/Boundary1D.cpp b/src/oneD/Boundary1D.cpp index 548824da0eb..ec28a23517a 100644 --- a/src/oneD/Boundary1D.cpp +++ b/src/oneD/Boundary1D.cpp @@ -219,22 +219,6 @@ void Inlet1D::eval(size_t jg, double* xg, double* rg, } } -AnyMap Inlet1D::serialize(const double* soln) const -{ - AnyMap state = Boundary1D::serialize(soln); - state["type"] = "inlet"; - state["temperature"] = m_temp; - state["mass-flux"] = m_mdot; - AnyMap Y; - for (size_t k = 0; k < m_nsp; k++) { - if (m_yin[k] != 0.0) { - Y[m_flow->phase().speciesName(k)] = m_yin[k]; - } - } - state["mass-fractions"] = std::move(Y); - return state; -} - shared_ptr Inlet1D::asArray(const double* soln) const { AnyMap meta = Boundary1D::getMeta(); @@ -253,32 +237,9 @@ shared_ptr Inlet1D::asArray(const double* soln) const return arr; } -void Inlet1D::restore(const AnyMap& state, double* soln, int loglevel) -{ - Boundary1D::restore(state, soln, loglevel); - m_mdot = state["mass-flux"].asDouble(); - m_temp = state["temperature"].asDouble(); - const auto& Y = state["mass-fractions"].as(); - ThermoPhase& thermo = m_flow->phase(); - for (size_t k = 0; k < m_nsp; k++) { - m_yin[k] = Y.getDouble(thermo.speciesName(k), 0.0); - } - - // Warn about species not in the current phase - if (loglevel) { - for (auto& item : Y) { - if (thermo.speciesIndex(item.first) == npos) { - warn_user("Inlet1D::restore", "Phase '{}' does not contain a species " - "named '{}'\n which was specified as having a mass fraction of {}.", - thermo.name(), item.first, item.second.asDouble()); - } - } - } -} - void Inlet1D::restore(SolutionArray& arr, double* soln, int loglevel) { - Boundary1D::restore(arr.meta(), soln, loglevel); + Boundary1D::setMeta(arr.meta(), loglevel); arr.setIndex(0); auto phase = arr.thermo(); auto meta = arr.meta(); @@ -305,13 +266,6 @@ void Empty1D::eval(size_t jg, double* xg, double* rg, { } -AnyMap Empty1D::serialize(const double* soln) const -{ - AnyMap state = Boundary1D::serialize(soln); - state["type"] = "empty"; - return state; -} - shared_ptr Empty1D::asArray(const double* soln) const { AnyMap meta = Boundary1D::getMeta(); @@ -365,13 +319,6 @@ void Symm1D::eval(size_t jg, double* xg, double* rg, integer* diagg, } } -AnyMap Symm1D::serialize(const double* soln) const -{ - AnyMap state = Boundary1D::serialize(soln); - state["type"] = "symmetry"; - return state; -} - shared_ptr Symm1D::asArray(const double* soln) const { AnyMap meta = Boundary1D::getMeta(); @@ -450,13 +397,6 @@ void Outlet1D::eval(size_t jg, double* xg, double* rg, integer* diagg, } } -AnyMap Outlet1D::serialize(const double* soln) const -{ - AnyMap state = Boundary1D::serialize(soln); - state["type"] = "outlet"; - return state; -} - shared_ptr Outlet1D::asArray(const double* soln) const { AnyMap meta = Boundary1D::getMeta(); @@ -562,21 +502,6 @@ void OutletRes1D::eval(size_t jg, double* xg, double* rg, } } -AnyMap OutletRes1D::serialize(const double* soln) const -{ - AnyMap state = Boundary1D::serialize(soln); - state["type"] = "outlet-reservoir"; - state["temperature"] = m_temp; - AnyMap Y; - for (size_t k = 0; k < m_nsp; k++) { - if (m_yres[k] != 0.0) { - Y[m_flow->phase().speciesName(k)] = m_yres[k]; - } - } - state["mass-fractions"] = std::move(Y); - return state; -} - shared_ptr OutletRes1D::asArray(const double* soln) const { AnyMap meta = Boundary1D::getMeta(); @@ -595,32 +520,9 @@ shared_ptr OutletRes1D::asArray(const double* soln) const return arr; } -void OutletRes1D::restore(const AnyMap& state, double* soln, int loglevel) -{ - Boundary1D::restore(state, soln, loglevel); - m_temp = state["temperature"].asDouble(); - const auto& Y = state["mass-fractions"].as(); - ThermoPhase& thermo = m_flow->phase(); - for (size_t k = 0; k < m_nsp; k++) { - m_yres[k] = Y.getDouble(thermo.speciesName(k), 0.0); - } - - // Warn about species not in the current phase - if (loglevel) { - for (auto& item : Y) { - if (thermo.speciesIndex(item.first) == npos) { - warn_user("OutletRes1D::restore", "Phase '{}' does not contain a " - "species named '{}'\nwhich was specified as having a mass " - "fraction of {}.", - thermo.name(), item.first, item.second.asDouble()); - } - } - } -} - void OutletRes1D::restore(SolutionArray& arr, double* soln, int loglevel) { - Boundary1D::restore(arr.meta(), soln, loglevel); + Boundary1D::setMeta(arr.meta(), loglevel); arr.setIndex(0); auto phase = arr.thermo(); m_temp = phase->temperature(); @@ -660,14 +562,6 @@ void Surf1D::eval(size_t jg, double* xg, double* rg, } } -AnyMap Surf1D::serialize(const double* soln) const -{ - AnyMap state = Boundary1D::serialize(soln); - state["type"] = "surface"; - state["temperature"] = m_temp; - return state; -} - shared_ptr Surf1D::asArray(const double* soln) const { AnyMap meta = Boundary1D::getMeta(); @@ -676,15 +570,9 @@ shared_ptr Surf1D::asArray(const double* soln) const return SolutionArray::create(m_solution, 0, meta); } -void Surf1D::restore(const AnyMap& state, double* soln, int loglevel) -{ - Boundary1D::restore(state, soln, loglevel); - m_temp = state["temperature"].asDouble(); -} - void Surf1D::restore(SolutionArray& arr, double* soln, int loglevel) { - Boundary1D::restore(arr.meta(), soln, loglevel); + Boundary1D::setMeta(arr.meta(), loglevel); arr.setIndex(0); m_temp = arr.thermo()->temperature(); } @@ -853,23 +741,6 @@ void ReactingSurf1D::eval(size_t jg, double* xg, double* rg, } } -AnyMap ReactingSurf1D::serialize(const double* soln) const -{ - AnyMap state = Boundary1D::serialize(soln); - state["type"] = "reacting-surface"; - state["temperature"] = m_temp; - state["phase"]["name"] = m_sphase->name(); - AnyValue source =m_sphase->input().getMetadata("filename"); - state["phase"]["source"] = source.empty() ? "" : source.asString(); - AnyMap cov; - for (size_t k = 0; k < m_nsp; k++) { - cov[m_sphase->speciesName(k)] = soln[k]; - } - state["coverages"] = std::move(cov); - - return state; -} - shared_ptr ReactingSurf1D::asArray(const double* soln) const { AnyMap meta = Boundary1D::getMeta(); @@ -890,31 +761,9 @@ shared_ptr ReactingSurf1D::asArray(const double* soln) const return arr; } -void ReactingSurf1D::restore(const AnyMap& state, double* soln, int loglevel) -{ - Boundary1D::restore(state, soln, loglevel); - m_temp = state["temperature"].asDouble(); - const auto& cov = state["coverages"].as(); - for (size_t k = 0; k < m_nsp; k++) { - soln[k] = cov.getDouble(m_sphase->speciesName(k), 0.0); - } - - // Warn about species not in the current phase - if (loglevel) { - for (auto& item : cov) { - if (m_sphase->speciesIndex(item.first) == npos) { - warn_user("OutletRes1D::restore", "Phase '{}' does not contain a " - "species named '{}'\nwhich was specified as having a coverage " - "of {}.", - m_sphase->name(), item.first, item.second.asDouble()); - } - } - } -} - void ReactingSurf1D::restore(SolutionArray& arr, double* soln, int loglevel) { - Boundary1D::restore(arr.meta(), soln, loglevel); + Boundary1D::setMeta(arr.meta(), loglevel); arr.setIndex(0); auto surf = std::dynamic_pointer_cast(arr.thermo()); if (!surf) { diff --git a/src/oneD/Domain1D.cpp b/src/oneD/Domain1D.cpp index 7ff91730b8f..af30fdb421f 100644 --- a/src/oneD/Domain1D.cpp +++ b/src/oneD/Domain1D.cpp @@ -9,6 +9,7 @@ #include "cantera/oneD/MultiJac.h" #include "cantera/oneD/refine.h" #include "cantera/base/AnyMap.h" +#include "cantera/base/SolutionArray.h" #include @@ -143,10 +144,15 @@ AnyMap Domain1D::getMeta() const AnyMap Domain1D::serialize(const double* soln) const { - return getMeta(); + warn_deprecated("Domain1D::serialize", + "To be removed after Cantera 3.0; superseded by asArray."); + AnyMap out; + auto arr = asArray(soln); + arr->writeEntry(out, ""); + return out; } -void Domain1D::restore(const AnyMap& state, double* soln, int loglevel) +void Domain1D::setMeta(const AnyMap& meta, int loglevel) { auto set_tols = [&](const AnyValue& tols, const string& which, vector_fp& out) { @@ -162,15 +168,15 @@ void Domain1D::restore(const AnyMap& state, double* soln, int loglevel) if (tol.hasKey(name)) { out[i] = tol[name].asDouble(); } else if (loglevel) { - warn_user("Domain1D::restore", "No {} found for component '{}'", + warn_user("Domain1D::setMeta", "No {} found for component '{}'", which, name); } } } }; - if (state.hasKey("tolerances")) { - const auto& tols = state["tolerances"]; + if (meta.hasKey("tolerances")) { + const auto& tols = meta["tolerances"]; set_tols(tols, "transient-abstol", m_atol_ts); set_tols(tols, "transient-reltol", m_rtol_ts); set_tols(tols, "steady-abstol", m_atol_ss); @@ -178,6 +184,15 @@ void Domain1D::restore(const AnyMap& state, double* soln, int loglevel) } } +void Domain1D::restore(const AnyMap& state, double* soln, int loglevel) +{ + warn_deprecated("Domain1D::restore", + "To be removed after Cantera 3.0; restore from SolutionArray instead."); + auto arr = SolutionArray::create(solution()); + arr->readEntry(state, ""); + restore(*arr, soln, loglevel); +} + void Domain1D::locate() { if (m_left) { diff --git a/src/oneD/Sim1D.cpp b/src/oneD/Sim1D.cpp index c4d08a53340..90f9773210c 100644 --- a/src/oneD/Sim1D.cpp +++ b/src/oneD/Sim1D.cpp @@ -136,52 +136,6 @@ void Sim1D::save(const std::string& fname, const std::string& id, "Unsupported file format '{}'", extension); } -void Sim1D::write_yaml(const std::string& fname, const std::string& id, - const std::string& desc, int loglevel) -{ - // Check for an existing file and load it if present - AnyMap data; - if (ifstream(fname).good()) { - data = AnyMap::fromYamlFile(fname); - } - bool preexisting = data.hasKey(id); - - // Add this simulation to the YAML - data[id] = serialize(m_x.data()); - - // Add metadata - data[id]["description"] = desc; - data[id]["generator"] = "Cantera Sim1D"; - data[id]["cantera-version"] = CANTERA_VERSION; - data[id]["git-commit"] = gitCommit(); - - // Add a timestamp indicating the current time - time_t aclock; - ::time(&aclock); // Get time in seconds - struct tm* newtime = localtime(&aclock); // Convert time to struct tm form - data[id]["date"] = stripnonprint(asctime(newtime)); - - // Force metadata fields to the top of the file - data[id]["description"].setLoc(-6, 0); - data[id]["generator"].setLoc(-5, 0); - data[id]["cantera-version"].setLoc(-4, 0); - data[id]["git-commit"].setLoc(-3, 0); - data[id]["date"].setLoc(-2, 0); - - // If this is not replacing an existing solution, put it at the end - if (!preexisting) { - data[id].setLoc(INT_MAX, 0); - } - - // Write the output file and remove the now-outdated cached file - std::ofstream out(fname); - out << data.toYamlString(); - AnyMap::clearCachedFile(fname); - if (loglevel > 0) { - writelog("Solution saved to file {} as solution '{}'.\n", fname, id); - } -} - void Sim1D::saveResidual(const std::string& fname, const std::string& id, const std::string& desc, int loglevel) { @@ -328,38 +282,6 @@ AnyMap Sim1D::restore(const std::string& fname, const std::string& id, return header; } -void Sim1D::read_yaml(const std::string& fname, const std::string& id, - int loglevel) -{ - size_t dot = fname.find_last_of("."); - string extension = (dot != npos) ? toLowerCopy(fname.substr(dot+1)) : ""; - if (extension == "xml") { - throw CanteraError("Sim1D::restore", - "Restoring from XML is no longer supported."); - } - AnyMap root = AnyMap::fromYamlFile(fname); - if (!root.hasKey(id)) { - throw InputFileError("Sim1D::restore", root, - "No solution with id '{}'", id); - } - const auto& state = root[id]; - for (auto dom : m_dom) { - if (!state.hasKey(dom->id())) { - throw InputFileError("Sim1D::restore", state, - "Saved state '{}' does not contain a domain named '{}'.", - id, dom->id()); - } - dom->resize(dom->nComponents(), state[dom->id()]["points"].asInt()); - } - resize(); - m_xlast_ts.clear(); - for (auto dom : m_dom) { - dom->restore(state[dom->id()].as(), m_x.data() + dom->loc(), - loglevel); - } - finalize(); -} - void Sim1D::setFlatProfile(size_t dom, size_t comp, doublereal v) { size_t np = domain(dom).nPoints(); diff --git a/src/oneD/StFlow.cpp b/src/oneD/StFlow.cpp index 4815014a430..9fb2e53c76c 100644 --- a/src/oneD/StFlow.cpp +++ b/src/oneD/StFlow.cpp @@ -732,31 +732,6 @@ AnyMap StFlow::getMeta() const return state; } -AnyMap StFlow::serialize(const double* soln) const -{ - auto state = getMeta(); - - // m_rho - - state["pressure"] = m_press; - state["grid"] = m_z; - vector_fp data(nPoints()); - for (size_t i = 0; i < nComponents(); i++) { - if (componentActive(i)) { - for (size_t j = 0; j < nPoints(); j++) { - data[j] = soln[index(i,j)]; - } - state[componentName(i)] = data; - } - } - - if (m_do_radiation) { - state["radiative-heat-loss"] = m_qdotRadiation; - } - - return state; -} - shared_ptr StFlow::asArray(const double* soln) const { auto arr = SolutionArray::create(m_solution, nPoints(), getMeta()); @@ -779,35 +754,9 @@ shared_ptr StFlow::asArray(const double* soln) const return arr; } -void StFlow::restore(const AnyMap& state, double* soln, int loglevel) -{ - Domain1D::restore(state, soln, loglevel); - m_press = state["pressure"].asDouble(); - setupGrid(nPoints(), state["grid"].asVector(nPoints()).data()); - - for (size_t i = 0; i < nComponents(); i++) { - if (!componentActive(i)) { - continue; - } - std::string name = componentName(i); - if (state.hasKey(name)) { - const vector_fp& data = state[name].asVector(nPoints()); - for (size_t j = 0; j < nPoints(); j++) { - soln[index(i,j)] = data[j]; - } - } else if (loglevel) { - warn_user("StFlow::restore", "Saved state does not contain values for " - "component '{}' in domain '{}'.", name, id()); - } - } - - updateProperties(npos, soln + loc(), 0, m_points - 1); - setMeta(state); -} - void StFlow::restore(SolutionArray& arr, double* soln, int loglevel) { - Domain1D::restore(arr.meta(), soln, loglevel); + Domain1D::setMeta(arr.meta(), loglevel); arr.setIndex(0); auto phase = arr.thermo(); m_press = phase->pressure(); @@ -832,10 +781,10 @@ void StFlow::restore(SolutionArray& arr, double* soln, int loglevel) } updateProperties(npos, soln + loc(), 0, m_points - 1); - setMeta(arr.meta()); + setMeta(arr.meta(), loglevel); } -void StFlow::setMeta(const AnyMap& state) +void StFlow::setMeta(const AnyMap& state, int loglevel) { if (state.hasKey("energy-enabled")) { const AnyValue& ee = state["energy-enabled"]; From 253e5e79a3772a8f840fb5783e64e672bf0e4c96 Mon Sep 17 00:00:00 2001 From: Ingmar Schoegl Date: Thu, 22 Dec 2022 22:59:48 +0100 Subject: [PATCH 79/93] [unittest] Update Sim1D.save/restore tests --- test/data/adiabatic_flame_legacy.yaml | 78 +++++++++++++++++++++++++++ test/python/test_onedim.py | 69 ++++++++++-------------- 2 files changed, 106 insertions(+), 41 deletions(-) create mode 100644 test/data/adiabatic_flame_legacy.yaml diff --git a/test/data/adiabatic_flame_legacy.yaml b/test/data/adiabatic_flame_legacy.yaml new file mode 100644 index 00000000000..dc4b3359f65 --- /dev/null +++ b/test/data/adiabatic_flame_legacy.yaml @@ -0,0 +1,78 @@ +setup: + description: initial guess + generator: Cantera Sim1D + cantera-version: 2.6.0 + git-commit: unknown + date: Thu Dec 22 22:50:32 2022 + reactants: + points: 1 + type: inlet + temperature: 300.0 + mass-flux: 1.338612362800703 + mass-fractions: + H2: 9.478316470455491e-03 + O2: 0.1367636951757011 + AR: 0.8537579883538435 + flame: + points: 9 + tolerances: + transient-abstol: 1.0e-11 + steady-abstol: 1.0e-09 + transient-reltol: 1.0e-04 + steady-reltol: 1.0e-04 + type: Free Flame + pressure: 1.01325e+05 + phase: + name: ohmech + source: /opt/homebrew/Caskroom/miniforge/base/envs/cantera/lib/python3.10/site-packages/cantera/data/h2o2.yaml + radiation-enabled: false + energy-enabled: true + Soret-enabled: false + species-enabled: true + refine-criteria: + ratio: 3.0 + slope: 0.06 + curve: 0.12 + prune: 0.0 + grid-min: 1.0e-10 + max-points: 1000 + fixed-point: + location: 0.0105 + temperature: 698.1678485027373 + grid: [0.0, 6.0e-03, 9.0e-03, 0.0105, 0.012, 0.015, 0.018, 0.024, 0.03] + velocity: [1.0, 1.0, 1.0, 2.205581220696009, 3.411162441392019, 5.822324882784038, + 5.822324882784038, 5.822324882784038, 5.822324882784038] + T: [300.0, 300.0, 300.0, 698.1678485027371, 1096.335697005475, + 1892.671394010949, 1892.671394010949, 1892.671394010949, + 1892.671394010949] + H2: [9.478316470455491e-03, 9.478316470455491e-03, 9.478316470455491e-03, + 7.109842855845580e-03, 4.741369241235666e-03, 4.422012015843635e-06, + 4.422012015843405e-06, 4.422012015843405e-06, 4.422012015843405e-06] + H: [0.0, 0.0, 0.0, 4.691903645074276e-08, 9.383807290148556e-08, + 1.876761458029711e-07, 1.876761458029711e-07, 1.876761458029711e-07, + 1.876761458029711e-07] + O: [0.0, 0.0, 0.0, 8.204442509821813e-06, 1.640888501964364e-05, + 3.281777003928727e-05, 3.281777003928727e-05, 3.281777003928727e-05, + 3.281777003928727e-05] + O2: [0.1367636951757011, 0.1367636951757011, 0.1367636951757011, + 0.1178952114224941, 0.09902672766928713, 0.06128976016287324, + 0.06128976016287325, 0.06128976016287325, 0.06128976016287325] + OH: [0.0, 0.0, 0.0, 1.365789095524984e-04, 2.731578191049970e-04, + 5.463156382099939e-04, 5.463156382099939e-04, 5.463156382099939e-04, + 5.463156382099939e-04] + H2O: [0.0, 0.0, 0.0, 0.02109188182623040, 0.04218376365246081, + 0.08436752730492161, 0.08436752730492161, 0.08436752730492161, + 0.08436752730492161] + HO2: [0.0, 0.0, 0.0, 2.318933184184578e-07, 4.637866368369159e-07, + 9.275732736738316e-07, 9.275732736738315e-07, 9.275732736738315e-07, + 9.275732736738315e-07] + H2O2: [0.0, 0.0, 0.0, 1.337716924587937e-08, 2.675433849175876e-08, + 5.350867698351750e-08, 5.350867698351750e-08, 5.350867698351750e-08, + 5.350867698351750e-08] + AR: [0.8537579883538435, 0.8537579883538435, 0.8537579883538435, + 0.8537579883538435, 0.8537579883538436, 0.8537579883538436, + 0.8537579883538436, 0.8537579883538436, 0.8537579883538436] + N2: [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0] + products: + points: 1 + type: outlet diff --git a/test/python/test_onedim.py b/test/python/test_onedim.py index 70f539da4ff..07e64ae6ae0 100644 --- a/test/python/test_onedim.py +++ b/test/python/test_onedim.py @@ -515,17 +515,25 @@ def test_prune(self): # TODO: check that the solution is actually correct (that is, that the # residual satisfies the error tolerances) on the new grid. - def test_save_restore_yaml_legacy(self): - self.run_save_restore("legacy") - - def test_save_restore_yaml_transition(self): - self.run_save_restore("transition") + def test_restore_legacy_yaml(self): + reactants = 'H2:1.1, O2:1, AR:5' + p = 5 * ct.one_atm + Tin = 600 + self.create_sim(p, Tin, reactants) + meta = self.sim.restore("adiabatic_flame_legacy.yaml", "setup") + assert meta["generator"] == "Cantera Sim1D" + assert meta["cantera-version"] == "2.6.0" + assert self.sim.inlet.T == 300 + assert self.sim.P == pytest.approx(ct.one_atm) + assert len(self.sim.grid) == 9 def test_save_restore_yaml_array(self): + # save and restore using YAML format self.run_save_restore("array") @utilities.unittest.skipIf("native" not in ct.hdf_support(), "HighFive not installed") def test_save_restore_hdf_array(self): + # save and restore using HDF format self.run_save_restore("hdf") def run_save_restore(self, mode): @@ -550,26 +558,16 @@ def run_save_restore(self, mode): V1 = self.sim.spread_rate P1 = self.sim.P T1 = self.sim.T - - if mode in {"array", "hdf"}: - self.sim.save(filename, "test", loglevel=0) - else: - self.sim.write_yaml(filename, "test", quiet=True) + self.sim.save(filename, "test", loglevel=0) # Save a second solution to the same file self.sim.radiation_enabled = True self.sim.boundary_emissivities = 0.3, 0.8 - if mode in {"array", "hdf"}: - self.sim.save(filename, "test2", loglevel=0) - else: - self.sim.write_yaml(filename, "test2", quiet=True) + self.sim.save(filename, "test2", loglevel=0) # Create flame object with dummy initial grid self.sim = ct.FreeFlame(self.gas) - if mode == "legacy": - self.sim.read_yaml(filename, "test", quiet=True) - else: - self.sim.restore(filename, "test", loglevel=0) + self.sim.restore(filename, "test", loglevel=0) # Sim is initially in "steady-state" mode, so this returns the # steady-state tolerances @@ -607,10 +605,7 @@ def run_save_restore(self, mode): self.assertFalse(self.sim.radiation_enabled) self.assertFalse(self.sim.soret_enabled) - if mode == "legacy": - self.sim.read_yaml(filename, "test2", quiet=True) - else: - self.sim.restore(filename, "test2", loglevel=0) + self.sim.restore(filename, "test2", loglevel=0) self.assertTrue(self.sim.radiation_enabled) self.assertEqual(self.sim.boundary_emissivities, (0.3, 0.8)) @@ -731,15 +726,18 @@ def test_write_csv(self): @pytest.mark.usefixtures("allow_deprecated") @utilities.unittest.skipIf("h5py" not in ct.hdf_support(), "h5py not installed") def test_write_hdf_legacy(self): + # save and restore legacy h5py format self.run_freeflame_write_hdf("legacy") @pytest.mark.usefixtures("allow_deprecated") @utilities.unittest.skipIf(ct.hdf_support() != {"h5py", "native"}, "h5py and/or HighFive not installed") def test_write_hdf_transition(self): + # save legacy h5py format / restore with HighFive self.run_freeflame_write_hdf("transition") @utilities.unittest.skipIf("native" not in ct.hdf_support(), "HighFive not installed") def test_write_hdf_native(self): + # save and restore with updated format (HighFive only) self.run_freeflame_write_hdf("native") def run_freeflame_write_hdf(self, mode): @@ -1316,15 +1314,18 @@ def test_reacting_surface_case3(self): @pytest.mark.usefixtures("allow_deprecated") @utilities.unittest.skipIf("h5py" not in ct.hdf_support(), "h5py not installed") def test_write_hdf_legacy(self): + # save and restore legacy h5py format self.run_impingingjet_write("legacy") @pytest.mark.usefixtures("allow_deprecated") @utilities.unittest.skipIf(ct.hdf_support() != {"h5py", "native"}, "h5py and/or HighFive not installed") def test_write_hdf_transition(self): + # save legacy h5py format and restore using HighFive self.run_impingingjet_write("transition") @utilities.unittest.skipIf("native" not in ct.hdf_support(), "HighFive not installed") def test_write_hdf_native(self): + # save and restore updated HDF format self.run_impingingjet_write("native") def test_write_yaml_native(self): @@ -1374,16 +1375,7 @@ def run_impingingjet_write(self, mode): jet.solve(loglevel=0) - def test_save_restore_yaml_legacy(self): - self.run_save_restore_yaml("legacy") - - def test_save_restore_yaml_transition(self): - self.run_save_restore_yaml("transition") - def test_save_restore_yaml_array(self): - self.run_save_restore_yaml("array") - - def run_save_restore_yaml(self, mode): comp = {'CH4': 0.095, 'O2': 0.21, 'N2': 0.79} self.sim = self.create_reacting_surface(comp, tsurf=900, tinlet=300, width=0.1) @@ -1394,20 +1386,13 @@ def run_save_restore_yaml(self, mode): self.sim.solve(loglevel=0, auto=False) - filename = self.test_work_path / f"impingingjet-{mode}.yaml" + filename = self.test_work_path / f"impingingjet.yaml" filename.unlink(missing_ok=True) - - if mode == "array": - self.sim.save(filename, "test", loglevel=0) - else: - self.sim.write_yaml(filename, "test", quiet=True) + self.sim.save(filename, "test", loglevel=0) self.surf_phase.TPX = 300, ct.one_atm, "PT(S):1" sim2 = ct.ImpingingJet(gas=self.gas, width=0.12, surface=self.surf_phase) - if mode == "legacy": - sim2.read_yaml(filename, "test", quiet=True) - else: - sim2.restore(filename, "test", loglevel=0) + sim2.restore(filename, "test", loglevel=0) self.assertArrayNear(self.sim.grid, sim2.grid) self.assertArrayNear(self.sim.Y, sim2.Y) @@ -1450,10 +1435,12 @@ def test_restart(self): self.assertNear(sim.T[0], sim.reactants.T, 1e-4) def test_save_restore_yaml(self): + # save and restore using YAML format self.run_save_restore("yaml") @utilities.unittest.skipIf("native" not in ct.hdf_support(), "HighFive not installed") def test_save_restore_hdf(self): + # save and restore using HDF format self.run_save_restore("hdf") def run_save_restore(self, mode): From dff1afd4ceea88c1ddb3e4f7251f5f1b8e47fc02 Mon Sep 17 00:00:00 2001 From: Ingmar Schoegl Date: Fri, 23 Dec 2022 12:56:53 +0100 Subject: [PATCH 80/93] [base] Prevent ambiguous YAML components --- src/base/SolutionArray.cpp | 42 +++++++++++++++++++++++++++----------- src/oneD/StFlow.cpp | 2 -- 2 files changed, 30 insertions(+), 14 deletions(-) diff --git a/src/base/SolutionArray.cpp b/src/base/SolutionArray.cpp index 4862fc40643..7611d87417a 100644 --- a/src/base/SolutionArray.cpp +++ b/src/base/SolutionArray.cpp @@ -311,18 +311,26 @@ void SolutionArray::writeEntry(AnyMap& root, const std::string& id) data["mass-fractions"] = std::move(items); } } else if (m_size > 1) { + std::vector components; + for (auto& extra : m_extra) { + components.push_back(extra.first); + } + const auto& nativeState = phase->nativeState(); for (auto& state : nativeState) { std::string name = state.first; if (name == "X" || name == "Y") { - data["basis"] = name == "X" ? "mole" : "mass"; - for (auto& name : phase->speciesNames()) { - data[name] = getComponent(name); + for (auto& spc : phase->speciesNames()) { + data[spc] = getComponent(spc); + components.push_back(spc); } + data["basis"] = name == "X" ? "mole" : "mass"; } else { data[name] = getComponent(name); + components.push_back(name); } } + data["components"] = components; } // If this is not replacing an existing solution, put it at the end @@ -613,20 +621,30 @@ void SolutionArray::readEntry(const AnyMap& root, const std::string& id) exclude.insert(props.begin(), props.end()); } else { // multiple data points - const auto& nativeState = m_sol->thermo()->nativeState(); - for (const auto& item : sub) { - const std::string& name = item.first; - const AnyValue& value = item.second; - if (value.is>()) { - const vector_fp& data = value.as>(); - if (data.size() == m_size) { - setComponent(name, data, true); - exclude.insert(item.first); + if (sub.hasKey("components")) { + const auto& components = sub["components"].as>(); + for (const auto& name : components) { + auto data = sub[name].asVector(m_size); + setComponent(name, data, true); + exclude.insert(name); + } + } else { + // legacy YAML format does not provide for list of components + for (const auto& item : sub) { + const std::string& name = item.first; + const AnyValue& value = item.second; + if (value.is>()) { + const vector_fp& data = value.as>(); + if (data.size() == m_size) { + setComponent(name, data, true); + exclude.insert(item.first); + } } } } // check that state data are complete + const auto& nativeState = m_sol->thermo()->nativeState(); std::set props = {}; std::set missingProps = {}; for (const auto& item : nativeState) { diff --git a/src/oneD/StFlow.cpp b/src/oneD/StFlow.cpp index 9fb2e53c76c..bfeebdc2e65 100644 --- a/src/oneD/StFlow.cpp +++ b/src/oneD/StFlow.cpp @@ -727,8 +727,6 @@ AnyMap StFlow::getMeta() const state["fixed-point"]["temperature"] = m_tfixed; } - state["species-names"] = m_thermo->speciesNames(); - return state; } From 1138adfd4bc83131efc2371ee5f9ac0c825f3de7 Mon Sep 17 00:00:00 2001 From: Ray Speth Date: Wed, 21 Dec 2022 10:02:24 -0500 Subject: [PATCH 81/93] [Test] Adjust integrator tolerance for MoleReactor tests Running with very loose integrator tolerances leads to integrator errors for certain system configurations. This was seen specifically on the "Sundials" CI runners. --- test/python/test_reactor.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/test/python/test_reactor.py b/test/python/test_reactor.py index 25feeb899cb..1df66fc61ad 100644 --- a/test/python/test_reactor.py +++ b/test/python/test_reactor.py @@ -258,7 +258,7 @@ def integrate(atol, rtol): return nSteps n_baseline = integrate(1e-10, 1e-20) - n_rtol = integrate(5e-7, 1e-20) + n_rtol = integrate(1e-7, 1e-20) n_atol = integrate(1e-10, 1e-5) assert n_baseline > n_rtol assert n_baseline > n_atol From 9028139976f9e86a86d35fe2997e27b90420898f Mon Sep 17 00:00:00 2001 From: Ingmar Schoegl Date: Fri, 23 Dec 2022 20:30:38 +0100 Subject: [PATCH 82/93] [Base] Mandatory index for SolutionArray::get/setState --- include/cantera/base/SolutionArray.h | 21 ++++++--------------- src/base/SolutionArray.cpp | 2 +- src/oneD/Boundary1D.cpp | 6 +++--- 3 files changed, 10 insertions(+), 19 deletions(-) diff --git a/include/cantera/base/SolutionArray.h b/include/cantera/base/SolutionArray.h index 85e000b628b..020c25819ca 100644 --- a/include/cantera/base/SolutionArray.h +++ b/include/cantera/base/SolutionArray.h @@ -85,23 +85,14 @@ class SolutionArray */ void setIndex(size_t index, bool restore=true); - /*! - * Retrieve the state vector for a single entry. If index is valid, it is updated; - * otherwise, the last previously used index is referenced. - */ - vector_fp getState(size_t index=npos); + //! Retrieve the state vector for a single entry. + vector_fp getState(size_t index); - /*! - * Set the state vector for a single entry. If index is valid, it is updated; - * otherwise, the last previously used index is referenced. - */ - void setState(const vector_fp& data, size_t index=npos); + //! Set the state vector for a single entry + void setState(size_t index, const vector_fp& data); - /*! - * Retrieve auxiliary data for a single entry. If index is valid, it is updated; - * otherwise, the last previously used index is referenced. - */ - std::map getAuxiliary(size_t index=npos); + //! Retrieve auxiliary data for a single entry. + std::map getAuxiliary(size_t index); /*! * Write header data to container file. diff --git a/src/base/SolutionArray.cpp b/src/base/SolutionArray.cpp index 7611d87417a..ed23191ba02 100644 --- a/src/base/SolutionArray.cpp +++ b/src/base/SolutionArray.cpp @@ -162,7 +162,7 @@ vector_fp SolutionArray::getState(size_t index) return out; } -void SolutionArray::setState(const vector_fp& data, size_t index) +void SolutionArray::setState(size_t index, const vector_fp& data) { setIndex(index, false); m_sol->thermo()->restoreState(data); diff --git a/src/oneD/Boundary1D.cpp b/src/oneD/Boundary1D.cpp index ec28a23517a..cbd7bb6479c 100644 --- a/src/oneD/Boundary1D.cpp +++ b/src/oneD/Boundary1D.cpp @@ -233,7 +233,7 @@ shared_ptr Inlet1D::asArray(const double* soln) const phase->saveState(data); auto arr = SolutionArray::create(m_solution, 1, meta); - arr->setState(data, 0); + arr->setState(0, data); return arr; } @@ -516,7 +516,7 @@ shared_ptr OutletRes1D::asArray(const double* soln) const phase->saveState(data); auto arr = SolutionArray::create(m_solution, 1, meta); - arr->setState(data, 0); + arr->setState(0, data); return arr; } @@ -757,7 +757,7 @@ shared_ptr ReactingSurf1D::asArray(const double* soln) const m_sphase->saveState(data.size(), &data[0]); auto arr = SolutionArray::create(m_solution, 1, meta); - arr->setState(data, 0); + arr->setState(0, data); return arr; } From 1d65af8d46c54f31d6a0dc2f4d44541373240472 Mon Sep 17 00:00:00 2001 From: Ingmar Schoegl Date: Mon, 26 Dec 2022 10:06:54 +0100 Subject: [PATCH 83/93] [Base] Improve error handling for native HDF5 storage --- src/base/SolutionArray.cpp | 22 ++++++++++---------- src/base/Storage.cpp | 42 +++++++++++++++++++++++++++++--------- src/oneD/Boundary1D.cpp | 4 ++-- 3 files changed, 45 insertions(+), 23 deletions(-) diff --git a/src/base/SolutionArray.cpp b/src/base/SolutionArray.cpp index ed23191ba02..33ab606532a 100644 --- a/src/base/SolutionArray.cpp +++ b/src/base/SolutionArray.cpp @@ -19,9 +19,6 @@ namespace ba = boost::algorithm; -namespace Cantera -{ - const std::map aliasMap = { {"T", "temperature"}, {"P", "pressure"}, @@ -36,6 +33,9 @@ const std::map aliasMap = { {"Q", "vapor-fraction"}, }; +namespace Cantera +{ + SolutionArray::SolutionArray( const shared_ptr& sol, size_t size, @@ -364,7 +364,7 @@ void SolutionArray::save(const std::string& fname, const std::string& id, AnyMap::clearCachedFile(fname); return; } - throw CanteraError("SolutionArray::writeHeader", + throw CanteraError("SolutionArray::save", "Unknown file extension '{}'", extension); } @@ -386,7 +386,7 @@ const AnyMap& locateField(const AnyMap& root, const std::string& id) path += "/" + field; const AnyMap& sub = *ptr; if (!sub.hasKey(field) || !sub[field].is()) { - throw CanteraError("SolutionArray::restore", + throw CanteraError("SolutionArray::locateField", "No field or solution with id '{}'", path); } ptr = &sub[field].as(); // AnyMap lacks 'operator=' for const AnyMap @@ -550,10 +550,10 @@ void SolutionArray::readEntry(const std::string& fname, const std::string& id) m_sol->thermo()->saveState(nState, &m_data[i * m_stride]); } } else if (mode == "") { - throw CanteraError("SolutionArray::restore", + throw CanteraError("SolutionArray::readEntry", "Data are not consistent with full state modes."); } else { - throw NotImplementedError("SolutionArray::restore", + throw NotImplementedError("SolutionArray::readEntry", "Import of '{}' data is not supported.", mode); } @@ -601,7 +601,7 @@ void SolutionArray::readEntry(const AnyMap& root, const std::string& id) } else if (mode == "TPC") { auto surf = std::dynamic_pointer_cast(m_sol->thermo()); if (!surf) { - throw CanteraError("SolutionArray::restore", + throw CanteraError("SolutionArray::readEntry", "Restoring of coverages requires surface phase"); } double T = sub["temperature"].asDouble(); @@ -610,10 +610,10 @@ void SolutionArray::readEntry(const AnyMap& root, const std::string& id) auto cov = sub["coverages"].asMap(); surf->setCoveragesByName(cov); } else if (mode == "") { - throw CanteraError("SolutionArray::restore", + throw CanteraError("SolutionArray::readEntry", "Data are not consistent with full state modes."); } else { - throw NotImplementedError("SolutionArray::restore", + throw NotImplementedError("SolutionArray::readEntry", "Import of '{}' data is not supported.", mode); } m_sol->thermo()->saveState(nState, m_data.data()); @@ -668,7 +668,7 @@ void SolutionArray::readEntry(const AnyMap& root, const std::string& id) m_data[offset_D + i * m_stride] = m_sol->thermo()->density(); } } else if (missingProps.size()) { - throw CanteraError("SolutionArray::restore", + throw CanteraError("SolutionArray::readEntry", "Incomplete state information: missing '{}'", ba::join(missingProps, "', '")); } diff --git a/src/base/Storage.cpp b/src/base/Storage.cpp index c00c50e9eff..ba19f5cf403 100644 --- a/src/base/Storage.cpp +++ b/src/base/Storage.cpp @@ -77,7 +77,7 @@ bool Storage::checkGroupRead(const std::string& id) const tokenizePath(id, tokens); std::string grp = tokens[0]; if (!m_file->exist(grp) || m_file->getObjectType(grp) != h5::ObjectType::Group) { - throw CanteraError("Storage::checkGroup", + throw CanteraError("Storage::checkGroupRead", "No group with id '{}' found", grp); } @@ -87,7 +87,7 @@ bool Storage::checkGroupRead(const std::string& id) const for (auto& grp : tokens) { path += "/" + grp; if (!sub.exist(grp) || sub.getObjectType(grp) != h5::ObjectType::Group) { - throw CanteraError("Storage::checkGroup", + throw CanteraError("Storage::checkGroupRead", "No group with id '{}' found", path); } sub = sub.getGroup(grp); @@ -102,7 +102,7 @@ bool Storage::checkGroupWrite(const std::string& id) return true; } if (m_file->getObjectType(id) != h5::ObjectType::Group) { - throw CanteraError("Storage::checkGroup", + throw CanteraError("Storage::checkGroupWrite", "Invalid object with id '{}' exists", id); } return true; @@ -133,7 +133,7 @@ std::pair> Storage::contents(const std::string& id } if (nDims != 1 && nDims != npos) { throw NotImplementedError("Storage::content", - "Unable to restore data with {} dimensions.", nDims); + "Encountered invalid data with {} dimensions.", nDims); } return std::make_pair(nElements, names); } @@ -211,12 +211,21 @@ AnyMap readH5Attributes(const h5::Group& sub, bool recursive) AnyMap Storage::readAttributes(const std::string& id, bool recursive) const { h5::Group sub = m_file->getGroup(id); - return readH5Attributes(sub, recursive); + try { + return readH5Attributes(sub, recursive); + } catch (const Cantera::NotImplementedError& err) { + throw NotImplementedError("Storage::readAttribute", + "{} in group '{}'.", err.getMessage(), id); + } } void writeH5Attributes(h5::Group sub, const AnyMap& meta) { for (auto& item : meta) { + if (sub.hasAttribute(item.first)) { + throw NotImplementedError("writeH5Attributes", + "Unable to overwrite existing Attribute '{}'", item.first); + } if (item.second.is()) { double value = item.second.asDouble(); h5::Attribute attr = sub.createAttribute( @@ -268,7 +277,7 @@ void writeH5Attributes(h5::Group sub, const AnyMap& meta) auto grp = sub.createGroup(item.first); writeH5Attributes(grp, value); } else { - throw NotImplementedError("Storage::writeAttributes", + throw NotImplementedError("writeH5Attributes", "Unable to write attribute '{}' with type '{}'", item.first, item.second.type_str()); } @@ -278,7 +287,12 @@ void writeH5Attributes(h5::Group sub, const AnyMap& meta) void Storage::writeAttributes(const std::string& id, const AnyMap& meta) { h5::Group sub = m_file->getGroup(id); - writeH5Attributes(sub, meta); + try { + writeH5Attributes(sub, meta); + } catch (const Cantera::NotImplementedError& err) { + throw NotImplementedError("Storage::writeAttribute", + "{} in group '{}'.", err.getMessage(), id); + } } vector_fp Storage::readVector(const std::string& id, @@ -287,7 +301,7 @@ vector_fp Storage::readVector(const std::string& id, h5::Group sub = m_file->getGroup(id); if (!sub.exist(name)) { throw CanteraError("Storage::readVector", - "DataSet '{}' not found in path '{}'.", name, id); + "DataSet '{}' not found in group '{}'.", name, id); } h5::DataSet dataset = sub.getDataSet(name); if (dataset.getDataType().getClass() != h5::DataTypeClass::Float) { @@ -308,6 +322,10 @@ void Storage::writeVector(const std::string& id, const std::string& name, const vector_fp& data) { h5::Group sub = m_file->getGroup(id); + if (sub.exist(name)) { + throw NotImplementedError("Storage::writeVector", + "Unable to overwrite existing DataSet '{}' in group '{}'.", name, id); + } std::vector dims{data.size()}; h5::DataSet dataset = sub.createDataSet(name, h5::DataSpace(dims)); dataset.write(data); @@ -319,8 +337,8 @@ std::vector Storage::readMatrix(const std::string& id, { h5::Group sub = m_file->getGroup(id); if (!sub.exist(name)) { - throw CanteraError("Storage::readVector", - "DataSet '{}' not found in path '{}'.", name, id); + throw CanteraError("Storage::readMatrix", + "DataSet '{}' not found in group '{}'.", name, id); } h5::DataSet dataset = sub.getDataSet(name); if (dataset.getDataType().getClass() != h5::DataTypeClass::Float) { @@ -350,6 +368,10 @@ void Storage::writeMatrix(const std::string& id, const std::string& name, const std::vector& data) { h5::Group sub = m_file->getGroup(id); + if (sub.exist(name)) { + throw NotImplementedError("Storage::writeMatrix", + "Unable to overwrite existing DataSet '{}' in group '{}'.", name, id); + } std::vector dims{data.size()}; dims.push_back(data.size() ? data[0].size() : 0); if (m_compressionLevel) { diff --git a/src/oneD/Boundary1D.cpp b/src/oneD/Boundary1D.cpp index cbd7bb6479c..b3820321402 100644 --- a/src/oneD/Boundary1D.cpp +++ b/src/oneD/Boundary1D.cpp @@ -224,6 +224,7 @@ shared_ptr Inlet1D::asArray(const double* soln) const AnyMap meta = Boundary1D::getMeta(); meta["type"] = "inlet"; meta["mass-flux"] = m_mdot; + auto arr = SolutionArray::create(m_solution, 1, meta); // set gas state (using pressure from adjacent domain) double pressure = m_flow->phase().pressure(); @@ -232,7 +233,6 @@ shared_ptr Inlet1D::asArray(const double* soln) const vector_fp data(phase->stateSize()); phase->saveState(data); - auto arr = SolutionArray::create(m_solution, 1, meta); arr->setState(0, data); return arr; } @@ -507,6 +507,7 @@ shared_ptr OutletRes1D::asArray(const double* soln) const AnyMap meta = Boundary1D::getMeta(); meta["type"] = "outlet-reservoir"; meta["temperature"] = m_temp; + auto arr = SolutionArray::create(m_solution, 1, meta); // set gas state (using pressure from adjacent domain) double pressure = m_flow->phase().pressure(); @@ -515,7 +516,6 @@ shared_ptr OutletRes1D::asArray(const double* soln) const vector_fp data(phase->stateSize()); phase->saveState(data); - auto arr = SolutionArray::create(m_solution, 1, meta); arr->setState(0, data); return arr; } From d1d0027e9b85b068d0d524556258a5bba92ff105 Mon Sep 17 00:00:00 2001 From: Ingmar Schoegl Date: Tue, 27 Dec 2022 14:08:01 +0100 Subject: [PATCH 84/93] [OneD] Improve naming of Domain1D objects --- include/cantera/oneD/Boundary1D.h | 22 +++++++++----- include/cantera/oneD/IonFlow.h | 7 ++++- include/cantera/oneD/StFlow.h | 7 ++++- interfaces/cython/cantera/_onedim.pxd | 16 +++++----- interfaces/cython/cantera/_onedim.pyx | 42 +++++++++++++-------------- src/oneD/Boundary1D.cpp | 3 +- src/oneD/IonFlow.cpp | 5 ++-- src/oneD/StFlow.cpp | 5 ++-- 8 files changed, 64 insertions(+), 43 deletions(-) diff --git a/include/cantera/oneD/Boundary1D.h b/include/cantera/oneD/Boundary1D.h index 9898777111f..33c3a8e2ec5 100644 --- a/include/cantera/oneD/Boundary1D.h +++ b/include/cantera/oneD/Boundary1D.h @@ -106,8 +106,9 @@ class Inlet1D : public Boundary1D public: Inlet1D(); - Inlet1D(shared_ptr solution) : Inlet1D() { + Inlet1D(shared_ptr solution, const std::string& id="") : Inlet1D() { m_solution = solution; + m_id = id; } //! set spreading rate @@ -158,8 +159,9 @@ class Empty1D : public Boundary1D m_type = cEmptyType; } - Empty1D(shared_ptr solution) : Empty1D() { + Empty1D(shared_ptr solution, const std::string& id="") : Empty1D() { m_solution = solution; + m_id = id; } virtual void showSolution(const double* x) {} @@ -185,8 +187,9 @@ class Symm1D : public Boundary1D m_type = cSymmType; } - Symm1D(shared_ptr solution) : Symm1D() { + Symm1D(shared_ptr solution, const std::string& id="") : Symm1D() { m_solution = solution; + m_id = id; } virtual void init(); @@ -210,8 +213,9 @@ class Outlet1D : public Boundary1D m_type = cOutletType; } - Outlet1D(shared_ptr solution) : Outlet1D() { + Outlet1D(shared_ptr solution, const std::string& id="") : Outlet1D() { m_solution = solution; + m_id = id; } virtual void init(); @@ -233,8 +237,11 @@ class OutletRes1D : public Boundary1D public: OutletRes1D(); - OutletRes1D(shared_ptr solution) : OutletRes1D() { + OutletRes1D(shared_ptr solution, const std::string& id="") + : OutletRes1D() + { m_solution = solution; + m_id = id; } virtual void showSolution(const double* x) {} @@ -274,8 +281,9 @@ class Surf1D : public Boundary1D m_type = cSurfType; } - Surf1D(shared_ptr solution) : Surf1D() { + Surf1D(shared_ptr solution, const std::string& id="") : Surf1D() { m_solution = solution; + m_id = id; } virtual void init(); @@ -299,7 +307,7 @@ class ReactingSurf1D : public Boundary1D { public: ReactingSurf1D(); - ReactingSurf1D(shared_ptr solution); + ReactingSurf1D(shared_ptr solution, const std::string& id=""); void setKineticsMgr(InterfaceKinetics* kin); diff --git a/include/cantera/oneD/IonFlow.h b/include/cantera/oneD/IonFlow.h index 0ae63b66e39..57ef4f9d9e4 100644 --- a/include/cantera/oneD/IonFlow.h +++ b/include/cantera/oneD/IonFlow.h @@ -33,7 +33,12 @@ class IonFlow : public StFlow public: IonFlow(ThermoPhase* ph = 0, size_t nsp = 1, size_t points = 1); - IonFlow(shared_ptr sol, size_t nsp = 1, size_t points = 1); + //! Create a new flow domain. + //! @param sol Solution object used to evaluate all thermodynamic, kinetic, and + //! transport properties + //! @param id name of flow domain + //! @param points initial number of grid points + IonFlow(shared_ptr sol, const std::string& id="", size_t points = 1); //! set the solving stage virtual void setSolvingStage(const size_t phase); diff --git a/include/cantera/oneD/StFlow.h b/include/cantera/oneD/StFlow.h index 802d424a314..dfd152d2e48 100644 --- a/include/cantera/oneD/StFlow.h +++ b/include/cantera/oneD/StFlow.h @@ -56,7 +56,12 @@ class StFlow : public Domain1D StFlow(th.get(), nsp, points) { } - StFlow(shared_ptr sol, size_t nsp = 1, size_t points = 1); + //! Create a new flow domain. + //! @param sol Solution object used to evaluate all thermodynamic, kinetic, and + //! transport properties + //! @param id name of flow domain + //! @param points initial number of grid points + StFlow(shared_ptr sol, const std::string& id="", size_t points=1); //! @name Problem Specification //! @{ diff --git a/interfaces/cython/cantera/_onedim.pxd b/interfaces/cython/cantera/_onedim.pxd index 9acc3da220c..73946912a87 100644 --- a/interfaces/cython/cantera/_onedim.pxd +++ b/interfaces/cython/cantera/_onedim.pxd @@ -48,25 +48,25 @@ cdef extern from "cantera/oneD/Boundary1D.h": double massFraction(size_t) cdef cppclass CxxInlet1D "Cantera::Inlet1D": - CxxInlet1D(shared_ptr[CxxSolution]) + CxxInlet1D(shared_ptr[CxxSolution], const string&) double spreadRate() void setSpreadRate(double) cdef cppclass CxxOutlet1D "Cantera::Outlet1D": - CxxOutlet1D(shared_ptr[CxxSolution]) + CxxOutlet1D(shared_ptr[CxxSolution], const string&) cdef cppclass CxxOutletRes1D "Cantera::OutletRes1D": - CxxOutletRes1D(shared_ptr[CxxSolution]) + CxxOutletRes1D(shared_ptr[CxxSolution], const string&) cdef cppclass CxxSymm1D "Cantera::Symm1D": - CxxSymm1D(shared_ptr[CxxSolution]) + CxxSymm1D(shared_ptr[CxxSolution], const string&) cdef cppclass CxxSurf1D "Cantera::Surf1D": - CxxSurf1D(shared_ptr[CxxSolution]) + CxxSurf1D(shared_ptr[CxxSolution], const string&) cdef cppclass CxxReactingSurf1D "Cantera::ReactingSurf1D": CxxReactingSurf1D() # deprecated in Python API (Cantera 3.0) - CxxReactingSurf1D(shared_ptr[CxxSolution]) except +translate_exception + CxxReactingSurf1D(shared_ptr[CxxSolution], const string&) except +translate_exception void setKineticsMgr(CxxInterfaceKinetics*) except +translate_exception void enableCoverageEquations(cbool) except +translate_exception cbool coverageEnabled() @@ -74,7 +74,7 @@ cdef extern from "cantera/oneD/Boundary1D.h": cdef extern from "cantera/oneD/StFlow.h": cdef cppclass CxxStFlow "Cantera::StFlow": - CxxStFlow(shared_ptr[CxxSolution], int, int) except +translate_exception + CxxStFlow(shared_ptr[CxxSolution], const string&, int) except +translate_exception void setTransportModel(const string&) except +translate_exception void setTransport(CxxTransport&) except +translate_exception string transportModel() @@ -99,7 +99,7 @@ cdef extern from "cantera/oneD/StFlow.h": cdef extern from "cantera/oneD/IonFlow.h": cdef cppclass CxxIonFlow "Cantera::IonFlow": - CxxIonFlow(shared_ptr[CxxSolution], int, int) except +translate_exception + CxxIonFlow(shared_ptr[CxxSolution], const string&, int) except +translate_exception void setSolvingStage(int) void solveElectricField() void fixElectricField() diff --git a/interfaces/cython/cantera/_onedim.pyx b/interfaces/cython/cantera/_onedim.pyx index 5d20cc2d8a6..0a7ae145b19 100644 --- a/interfaces/cython/cantera/_onedim.pyx +++ b/interfaces/cython/cantera/_onedim.pyx @@ -19,14 +19,11 @@ cdef class Domain1D: def __cinit__(self, _SolutionBase phase not None, *args, **kwargs): self.domain = NULL - def __init__(self, phase, *args, name=None, **kwargs): + def __init__(self, phase, *args, **kwargs): self._weakref_proxy = _WeakrefProxy() if self.domain is NULL: raise TypeError("Can't instantiate abstract class Domain1D.") - if name is not None: - self.name = name - self.gas = phase self.gas._references[self._weakref_proxy] = True self.set_default_tolerances() @@ -343,8 +340,8 @@ cdef class Inlet1D(Boundary1D): domain - it must be either the leftmost or rightmost domain in a stack. """ - def __cinit__(self, _SolutionBase phase, *args, **kwargs): - self.inlet = new CxxInlet1D(phase._base) + def __cinit__(self, _SolutionBase phase, *args, name="", **kwargs): + self.inlet = new CxxInlet1D(phase._base, stringify(name)) self.boundary = (self.inlet) def __dealloc__(self): @@ -365,8 +362,8 @@ cdef class Outlet1D(Boundary1D): A one-dimensional outlet. An outlet imposes a zero-gradient boundary condition on the flow. """ - def __cinit__(self, _SolutionBase phase, *args, **kwargs): - self.outlet = new CxxOutlet1D(phase._base) + def __cinit__(self, _SolutionBase phase, *args, name="", **kwargs): + self.outlet = new CxxOutlet1D(phase._base, stringify(name)) self.boundary = (self.outlet) def __dealloc__(self): @@ -377,8 +374,8 @@ cdef class OutletReservoir1D(Boundary1D): """ A one-dimensional outlet into a reservoir. """ - def __cinit__(self, _SolutionBase phase, *args, **kwargs): - self.outlet = new CxxOutletRes1D(phase._base) + def __cinit__(self, _SolutionBase phase, *args, name="", **kwargs): + self.outlet = new CxxOutletRes1D(phase._base, stringify(name)) self.boundary = (self.outlet) def __dealloc__(self): @@ -387,8 +384,8 @@ cdef class OutletReservoir1D(Boundary1D): cdef class SymmetryPlane1D(Boundary1D): """A symmetry plane.""" - def __cinit__(self, _SolutionBase phase, *args, **kwargs): - self.symm = new CxxSymm1D(phase._base) + def __cinit__(self, _SolutionBase phase, *args, name="", **kwargs): + self.symm = new CxxSymm1D(phase._base, stringify(name)) self.boundary = (self.symm) def __dealloc__(self): @@ -397,8 +394,8 @@ cdef class SymmetryPlane1D(Boundary1D): cdef class Surface1D(Boundary1D): """A solid surface.""" - def __cinit__(self, _SolutionBase phase, *args, **kwargs): - self.surf = new CxxSurf1D(phase._base) + def __cinit__(self, _SolutionBase phase, *args, name="", **kwargs): + self.surf = new CxxSurf1D(phase._base, stringify(name)) self.boundary = (self.surf) def __dealloc__(self): @@ -416,9 +413,9 @@ cdef class ReactingSurface1D(Boundary1D): Starting in Cantera 3.0, parameter `phase` should reference surface instead of gas phase. """ - def __cinit__(self, _SolutionBase phase, *args, **kwargs): + def __cinit__(self, _SolutionBase phase, *args, name="", **kwargs): if phase.phase_of_matter != "gas": - self.surf = new CxxReactingSurf1D(phase._base) + self.surf = new CxxReactingSurf1D(phase._base, stringify(name)) else: # legacy pathway - deprecation is handled in __init__ self.surf = new CxxReactingSurf1D() @@ -429,7 +426,9 @@ cdef class ReactingSurface1D(Boundary1D): if phase.phase_of_matter == "gas": warnings.warn("Starting in Cantera 3.0, parameter 'phase' should " "reference surface instead of gas phase.", DeprecationWarning) - super().__init__(phase, name=name) + super().__init__(phase) + if name is not None: + self.name = name else: sol = phase gas = None @@ -707,8 +706,8 @@ cdef class IdealGasFlow(_FlowBase): equations assume an ideal gas mixture. Arbitrary chemistry is allowed, as well as arbitrary variation of the transport properties. """ - def __cinit__(self, _SolutionBase phase, *args, **kwargs): - self.flow = new CxxStFlow(phase._base, phase.n_species, 2) + def __cinit__(self, _SolutionBase phase, *args, name="", **kwargs): + self.flow = new CxxStFlow(phase._base, stringify(name), 2) cdef class IonFlow(_FlowBase): @@ -717,8 +716,9 @@ cdef class IonFlow(_FlowBase): In an ion flow domain, the electric drift is added to the diffusion flux """ - def __cinit__(self, _SolutionBase thermo, *args, **kwargs): - self.flow = (new CxxIonFlow(thermo._base, thermo.n_species, 2)) + def __cinit__(self, _SolutionBase thermo, *args, name="", **kwargs): + self.flow = ( + new CxxIonFlow(thermo._base, stringify(name), 2)) def set_solving_stage(self, stage): """ diff --git a/src/oneD/Boundary1D.cpp b/src/oneD/Boundary1D.cpp index b3820321402..8baa518556a 100644 --- a/src/oneD/Boundary1D.cpp +++ b/src/oneD/Boundary1D.cpp @@ -598,7 +598,7 @@ ReactingSurf1D::ReactingSurf1D() m_type = cSurfType; } -ReactingSurf1D::ReactingSurf1D(shared_ptr solution) +ReactingSurf1D::ReactingSurf1D(shared_ptr solution, const std::string& id) { auto phase = std::dynamic_pointer_cast(solution->thermo()); if (!phase) { @@ -612,6 +612,7 @@ ReactingSurf1D::ReactingSurf1D(shared_ptr solution) solution->kinetics()->kineticsType()); } m_solution = solution; + m_id = id; m_kin = kin.get(); m_sphase = phase.get(); diff --git a/src/oneD/IonFlow.cpp b/src/oneD/IonFlow.cpp index edbe88bf071..b5d9eddacc7 100644 --- a/src/oneD/IonFlow.cpp +++ b/src/oneD/IonFlow.cpp @@ -58,10 +58,11 @@ IonFlow::IonFlow(ThermoPhase* ph, size_t nsp, size_t points) : m_do_electric_field.resize(m_points,false); } -IonFlow::IonFlow(shared_ptr sol, size_t nsp, size_t points) : - IonFlow(sol->thermo().get(), nsp, points) +IonFlow::IonFlow(shared_ptr sol, const std::string& id, size_t points) + : IonFlow(sol->thermo().get(), sol->thermo()->nSpecies(), points) { m_solution = sol; + m_id = id; m_kin = m_solution->kinetics().get(); m_trans_shared = m_solution->transport(); m_trans = m_trans_shared.get(); diff --git a/src/oneD/StFlow.cpp b/src/oneD/StFlow.cpp index bfeebdc2e65..70661e7a790 100644 --- a/src/oneD/StFlow.cpp +++ b/src/oneD/StFlow.cpp @@ -107,10 +107,11 @@ StFlow::StFlow(ThermoPhase* ph, size_t nsp, size_t points) : m_kRadiating[1] = m_thermo->speciesIndex("H2O"); } -StFlow::StFlow(shared_ptr sol, size_t nsp, size_t points) : - StFlow(sol->thermo().get(), nsp, points) +StFlow::StFlow(shared_ptr sol, const std::string& id, size_t points) + : StFlow(sol->thermo().get(), sol->thermo()->nSpecies(), points) { m_solution = sol; + m_id = id; m_kin = m_solution->kinetics().get(); m_trans_shared = m_solution->transport(); m_trans = m_trans_shared.get(); From 71554f1ba9027c0652a4b311ebe8a2a83832d9c5 Mon Sep 17 00:00:00 2001 From: Ingmar Schoegl Date: Tue, 27 Dec 2022 08:43:37 +0100 Subject: [PATCH 85/93] [samples] Add Sim1D::save to cxx flamespeed sample --- samples/cxx/flamespeed/flamespeed.cpp | 28 ++- .../cxx_samples/cxx_flamespeed_blessed.txt | 192 +++++++++--------- 2 files changed, 120 insertions(+), 100 deletions(-) diff --git a/samples/cxx/flamespeed/flamespeed.cpp b/samples/cxx/flamespeed/flamespeed.cpp index 6c2cd90b3e4..3f8abe36b0d 100644 --- a/samples/cxx/flamespeed/flamespeed.cpp +++ b/samples/cxx/flamespeed/flamespeed.cpp @@ -7,7 +7,8 @@ * * Usage: flamespeed [equivalence_ratio] [refine_grid] [loglevel] * - * Keywords: combustion, 1D flow, premixed flame, flame speed + * Requires: cantera >= 3.0 + * Keywords: combustion, 1D flow, premixed flame, flame speed, saving output */ // This file is part of Cantera. See License.txt in the top-level directory or @@ -52,7 +53,7 @@ int flamespeed(double phi, bool refine_grid, int loglevel) //-------- step 1: create the flow ------------- - StFlow flow(sol); + StFlow flow(sol, "flow"); flow.setFreeFlow(); // create an initial grid @@ -68,7 +69,7 @@ int flamespeed(double phi, bool refine_grid, int loglevel) //------- step 2: create the inlet ----------------------- - Inlet1D inlet; + Inlet1D inlet(sol, "inlet"); inlet.setMoleFractions(x.data()); double mdot=uin*rho_in; @@ -77,7 +78,7 @@ int flamespeed(double phi, bool refine_grid, int loglevel) //------- step 3: create the outlet --------------------- - Outlet1D outlet; + Outlet1D outlet(sol, "outlet"); //=================== create the container and insert the domains ===== @@ -113,6 +114,21 @@ int flamespeed(double phi, bool refine_grid, int loglevel) flame.setRefineCriteria(flowdomain,ratio,slope,curve); + // Save initial guess to container file + + // Solution is saved in HDF5 or YAML file format + std::string fileName; + if (usesHDF5()) { + // Cantera is compiled with native HDF5 support + fileName = "flamespeed.h5"; + } else { + fileName = "flamespeed.yaml"; + } + if (std::ifstream(fileName).good()) { + std::remove(fileName.c_str()); + } + flame.save(fileName, "initial-guess", "Initial guess", 0); + // Solve freely propagating flame // Linearly interpolate to find location where this temperature would @@ -126,6 +142,7 @@ int flamespeed(double phi, bool refine_grid, int loglevel) flow.componentIndex("velocity"),0); print("Flame speed with mixture-averaged transport: {} m/s\n", flameSpeed_mix); + flame.save(fileName, "mix", "Solution with mixture-averaged transport", 0); // now switch to multicomponent transport flow.setTransportModel("multicomponent"); @@ -134,6 +151,7 @@ int flamespeed(double phi, bool refine_grid, int loglevel) flow.componentIndex("velocity"),0); print("Flame speed with multicomponent transport: {} m/s\n", flameSpeed_multi); + flame.save(fileName, "multi", "Solution with multicomponent transport", 0); // now enable Soret diffusion flow.enableSoret(true); @@ -142,6 +160,8 @@ int flamespeed(double phi, bool refine_grid, int loglevel) flow.componentIndex("velocity"),0); print("Flame speed with multicomponent transport + Soret: {} m/s\n", flameSpeed_full); + flame.save(fileName, "soret", + "Solution with mixture-averaged transport and Soret", 0); vector_fp zvec,Tvec,COvec,CO2vec,Uvec; diff --git a/test_problems/cxx_samples/cxx_flamespeed_blessed.txt b/test_problems/cxx_samples/cxx_flamespeed_blessed.txt index f65184bb7d2..983b3af42b5 100644 --- a/test_problems/cxx_samples/cxx_flamespeed_blessed.txt +++ b/test_problems/cxx_samples/cxx_flamespeed_blessed.txt @@ -1,154 +1,154 @@ phi = 0.9, Tad = 2133.7925650475245 ->>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> domain 0 <<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<< +>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> inlet <<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<< - Mass Flux: 0.3381 kg/m^2/s - Temperature: 300 K - Mass Fractions: - O2 0.2213 - CH4 0.04993 - N2 0.7288 + Mass Flux: 0.3381 kg/m^2/s + Temperature: 300 K + Mass Fractions: + O2 0.2213 + CH4 0.04993 + N2 0.7288 ->>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> domain 1 <<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<< +>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> flow <<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<< Pressure: 1.013e+05 Pa ------------------------------------------------------------------------------- - z velocity spread_rate T lambda eField + z velocity spread_rate T lambda eField ------------------------------------------------------------------------------- - 0 0.3 0 300 0 0 - 0.02 0.3 0 300 0 0 - 0.04 0.7598 0 758.4 0 0 - 0.06 1.679 0 1675 0 0 - 0.08 2.139 0 2134 0 0 - 0.1 2.139 0 2134 0 0 + 0 0.3 0 300 0 0 + 0.02 0.3 0 300 0 0 + 0.04 0.7598 0 758.4 0 0 + 0.06 1.679 0 1675 0 0 + 0.08 2.139 0 2134 0 0 + 0.1 2.139 0 2134 0 0 ------------------------------------------------------------------------------- - z H2 H O O2 OH + z H2 H O O2 OH ------------------------------------------------------------------------------- - 0 0 0 0 0.2213 0 - 0.02 0 0 0 0.2213 0 - 0.04 1.696e-05 1.065e-06 3.44e-05 0.1713 0.0004119 - 0.06 5.087e-05 3.194e-06 0.0001032 0.07133 0.001236 - 0.08 6.782e-05 4.259e-06 0.0001376 0.02135 0.001648 - 0.1 6.782e-05 4.259e-06 0.0001376 0.02135 0.001648 + 0 0 0 0 0.2213 0 + 0.02 0 0 0 0.2213 0 + 0.04 1.696e-05 1.065e-06 3.44e-05 0.1713 0.0004119 + 0.06 5.087e-05 3.194e-06 0.0001032 0.07133 0.001236 + 0.08 6.782e-05 4.259e-06 0.0001376 0.02135 0.001648 + 0.1 6.782e-05 4.259e-06 0.0001376 0.02135 0.001648 ------------------------------------------------------------------------------- - z H2O HO2 H2O2 C CH + z H2O HO2 H2O2 C CH ------------------------------------------------------------------------------- - 0 0 0 0 0 0 - 0.02 0 0 0 0 0 - 0.04 0.02765 2.96e-07 2.011e-08 4.454e-20 4.963e-21 - 0.06 0.08296 8.88e-07 6.033e-08 1.336e-19 1.489e-20 - 0.08 0.1106 1.184e-06 8.044e-08 1.782e-19 1.985e-20 - 0.1 0.1106 1.184e-06 8.044e-08 1.782e-19 1.985e-20 + 0 0 0 0 0 0 + 0.02 0 0 0 0 0 + 0.04 0.02765 2.96e-07 2.011e-08 4.454e-20 4.963e-21 + 0.06 0.08296 8.88e-07 6.033e-08 1.336e-19 1.489e-20 + 0.08 0.1106 1.184e-06 8.044e-08 1.782e-19 1.985e-20 + 0.1 0.1106 1.184e-06 8.044e-08 1.782e-19 1.985e-20 ------------------------------------------------------------------------------- - z CH2 CH2(S) CH3 CH4 CO + z CH2 CH2(S) CH3 CH4 CO ------------------------------------------------------------------------------- - 0 0 0 0 0.04993 0 - 0.02 0 0 0 0.04993 0 - 0.04 1.299e-20 7.228e-22 7.945e-20 0.03744 0.000587 - 0.06 3.897e-20 2.168e-21 2.384e-19 0.01248 0.001761 - 0.08 5.196e-20 2.891e-21 3.178e-19 1.401e-19 0.002348 - 0.1 5.196e-20 2.891e-21 3.178e-19 1.401e-19 0.002348 + 0 0 0 0 0.04993 0 + 0.02 0 0 0 0.04993 0 + 0.04 1.299e-20 7.228e-22 7.945e-20 0.03744 0.000587 + 0.06 3.897e-20 2.168e-21 2.384e-19 0.01248 0.001761 + 0.08 5.196e-20 2.891e-21 3.178e-19 1.401e-19 0.002348 + 0.1 5.196e-20 2.891e-21 3.178e-19 1.401e-19 0.002348 ------------------------------------------------------------------------------- - z CO2 HCO CH2O CH2OH CH3O + z CO2 HCO CH2O CH2OH CH3O ------------------------------------------------------------------------------- - 0 0 0 0 0 0 - 0.02 0 0 0 0 0 - 0.04 0.03332 1.924e-11 2.395e-13 3.147e-19 4.683e-21 - 0.06 0.09995 5.772e-11 7.185e-13 9.441e-19 1.405e-20 - 0.08 0.1333 7.696e-11 9.58e-13 1.259e-18 1.873e-20 - 0.1 0.1333 7.696e-11 9.58e-13 1.259e-18 1.873e-20 + 0 0 0 0 0 0 + 0.02 0 0 0 0 0 + 0.04 0.03332 1.924e-11 2.395e-13 3.147e-19 4.683e-21 + 0.06 0.09995 5.772e-11 7.185e-13 9.441e-19 1.405e-20 + 0.08 0.1333 7.696e-11 9.58e-13 1.259e-18 1.873e-20 + 0.1 0.1333 7.696e-11 9.58e-13 1.259e-18 1.873e-20 ------------------------------------------------------------------------------- - z CH3OH C2H C2H2 C2H3 C2H4 + z CH3OH C2H C2H2 C2H3 C2H4 ------------------------------------------------------------------------------- - 0 0 0 0 0 0 - 0.02 0 0 0 0 0 - 0.04 2.19e-20 1.096e-27 3.177e-25 1.094e-30 1.075e-30 - 0.06 6.569e-20 3.287e-27 9.531e-25 3.283e-30 3.224e-30 - 0.08 8.758e-20 4.383e-27 1.271e-24 4.377e-30 4.298e-30 - 0.1 8.758e-20 4.383e-27 1.271e-24 4.377e-30 4.298e-30 + 0 0 0 0 0 0 + 0.02 0 0 0 0 0 + 0.04 2.19e-20 1.096e-27 3.177e-25 1.094e-30 1.075e-30 + 0.06 6.569e-20 3.287e-27 9.531e-25 3.283e-30 3.224e-30 + 0.08 8.758e-20 4.383e-27 1.271e-24 4.377e-30 4.298e-30 + 0.1 8.758e-20 4.383e-27 1.271e-24 4.377e-30 4.298e-30 ------------------------------------------------------------------------------- - z C2H5 C2H6 HCCO CH2CO HCCOH + z C2H5 C2H6 HCCO CH2CO HCCOH ------------------------------------------------------------------------------- - 0 0 0 0 0 0 - 0.02 0 0 0 0 0 - 0.04 5.292e-36 3.24e-37 1.308e-22 1.521e-22 1.095e-25 - 0.06 1.588e-35 9.719e-37 3.923e-22 4.563e-22 3.285e-25 - 0.08 2.117e-35 1.296e-36 5.23e-22 6.084e-22 4.381e-25 - 0.1 2.117e-35 1.296e-36 5.23e-22 6.084e-22 4.381e-25 + 0 0 0 0 0 0 + 0.02 0 0 0 0 0 + 0.04 5.292e-36 3.24e-37 1.308e-22 1.521e-22 1.095e-25 + 0.06 1.588e-35 9.719e-37 3.923e-22 4.563e-22 3.285e-25 + 0.08 2.117e-35 1.296e-36 5.23e-22 6.084e-22 4.381e-25 + 0.1 2.117e-35 1.296e-36 5.23e-22 6.084e-22 4.381e-25 ------------------------------------------------------------------------------- - z N NH NH2 NH3 NNH + z N NH NH2 NH3 NNH ------------------------------------------------------------------------------- - 0 0 0 0 0 0 - 0.02 0 0 0 0 0 - 0.04 5.944e-10 7.145e-11 2.291e-11 6.197e-11 5.73e-11 - 0.06 1.783e-09 2.144e-10 6.873e-11 1.859e-10 1.719e-10 - 0.08 2.378e-09 2.858e-10 9.164e-11 2.479e-10 2.292e-10 - 0.1 2.378e-09 2.858e-10 9.164e-11 2.479e-10 2.292e-10 + 0 0 0 0 0 0 + 0.02 0 0 0 0 0 + 0.04 5.944e-10 7.145e-11 2.291e-11 6.197e-11 5.73e-11 + 0.06 1.783e-09 2.144e-10 6.873e-11 1.859e-10 1.719e-10 + 0.08 2.378e-09 2.858e-10 9.164e-11 2.479e-10 2.292e-10 + 0.1 2.378e-09 2.858e-10 9.164e-11 2.479e-10 2.292e-10 ------------------------------------------------------------------------------- - z NO NO2 N2O HNO CN + z NO NO2 N2O HNO CN ------------------------------------------------------------------------------- - 0 0 0 0 0 0 - 0.02 0 0 0 0 0 - 0.04 0.000833 5.323e-07 6.55e-08 7.525e-09 5.744e-16 - 0.06 0.002499 1.597e-06 1.965e-07 2.257e-08 1.723e-15 - 0.08 0.003332 2.129e-06 2.62e-07 3.01e-08 2.297e-15 - 0.1 0.003332 2.129e-06 2.62e-07 3.01e-08 2.297e-15 + 0 0 0 0 0 0 + 0.02 0 0 0 0 0 + 0.04 0.000833 5.323e-07 6.55e-08 7.525e-09 5.744e-16 + 0.06 0.002499 1.597e-06 1.965e-07 2.257e-08 1.723e-15 + 0.08 0.003332 2.129e-06 2.62e-07 3.01e-08 2.297e-15 + 0.1 0.003332 2.129e-06 2.62e-07 3.01e-08 2.297e-15 ------------------------------------------------------------------------------- - z HCN H2CN HCNN HCNO HOCN + z HCN H2CN HCNN HCNO HOCN ------------------------------------------------------------------------------- - 0 0 0 0 0 0 - 0.02 0 0 0 0 0 - 0.04 1.756e-13 2.02e-20 8.356e-24 2.661e-18 4.599e-14 - 0.06 5.268e-13 6.061e-20 2.507e-23 7.983e-18 1.38e-13 - 0.08 7.024e-13 8.082e-20 3.342e-23 1.064e-17 1.84e-13 - 0.1 7.024e-13 8.082e-20 3.342e-23 1.064e-17 1.84e-13 + 0 0 0 0 0 0 + 0.02 0 0 0 0 0 + 0.04 1.756e-13 2.02e-20 8.356e-24 2.661e-18 4.599e-14 + 0.06 5.268e-13 6.061e-20 2.507e-23 7.983e-18 1.38e-13 + 0.08 7.024e-13 8.082e-20 3.342e-23 1.064e-17 1.84e-13 + 0.1 7.024e-13 8.082e-20 3.342e-23 1.064e-17 1.84e-13 ------------------------------------------------------------------------------- - z HNCO NCO N2 AR C3H7 + z HNCO NCO N2 AR C3H7 ------------------------------------------------------------------------------- - 0 0 0 0.7288 0 0 - 0.02 0 0 0.7288 0 0 - 0.04 1.988e-11 8.476e-13 0.7284 0 9.35e-53 - 0.06 5.963e-11 2.543e-12 0.7276 0 2.805e-52 - 0.08 7.95e-11 3.391e-12 0.7272 0 3.74e-52 - 0.1 7.95e-11 3.391e-12 0.7272 0 3.74e-52 + 0 0 0 0.7288 0 0 + 0.02 0 0 0.7288 0 0 + 0.04 1.988e-11 8.476e-13 0.7284 0 9.35e-53 + 0.06 5.963e-11 2.543e-12 0.7276 0 2.805e-52 + 0.08 7.95e-11 3.391e-12 0.7272 0 3.74e-52 + 0.1 7.95e-11 3.391e-12 0.7272 0 3.74e-52 ------------------------------------------------------------------------------- - z C3H8 CH2CHO CH3CHO + z C3H8 CH2CHO CH3CHO ------------------------------------------------------------------------------- - 0 0 0 0 - 0.02 0 0 0 - 0.04 5.365e-54 2.809e-28 4.525e-29 - 0.06 1.61e-53 8.426e-28 1.358e-28 - 0.08 2.146e-53 1.123e-27 1.81e-28 - 0.1 2.146e-53 1.123e-27 1.81e-28 + 0 0 0 0 + 0.02 0 0 0 + 0.04 5.365e-54 2.809e-28 4.525e-29 + 0.06 1.61e-53 8.426e-28 1.358e-28 + 0.08 2.146e-53 1.123e-27 1.81e-28 + 0.1 2.146e-53 1.123e-27 1.81e-28 ->>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> domain 2 <<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<< +>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> outlet <<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<< ------------------------------------------------------------------------------- - z + z ------------------------------------------------------------------------------- - 0 + 0 Flame speed with mixture-averaged transport: 0.3158995218368368 m/s Flame speed with multicomponent transport: 0.318579363934684 m/s Flame speed with multicomponent transport + Soret: 0.3183709757821478 m/s -z (m) T (K) U (m/s) Y(CO) +z (m) T (K) U (m/s) Y(CO) 0.000000 300.000 0.318 0.00000 0.020000 300.070 0.318 0.00000 0.040000 319.350 0.339 0.00074 From d629432cbd1b0d90bf899962a43347d4b7e2eb3c Mon Sep 17 00:00:00 2001 From: Ingmar Schoegl Date: Fri, 6 Jan 2023 09:27:43 +0100 Subject: [PATCH 86/93] [platform] Update Makefile and cantera.pc --- platform/posix/Cantera.mak.in | 18 +++++++++++++----- platform/posix/SConscript | 8 ++++++++ 2 files changed, 21 insertions(+), 5 deletions(-) diff --git a/platform/posix/Cantera.mak.in b/platform/posix/Cantera.mak.in index 7665e3a21e4..12ce59c599d 100644 --- a/platform/posix/Cantera.mak.in +++ b/platform/posix/Cantera.mak.in @@ -64,12 +64,20 @@ CANTERA_SUNDIALS_LIBS=@mak_sundials_libdir@ @mak_sundials_libs@ CANTERA_BLAS_LAPACK_LIBS=@mak_blas_lapack_libs@ +############################################################################### +# HDF5 SUPPORT +############################################################################### + +CANTERA_HDF5_INCLUDES=@mak_hdf_include@ +CANTERA_HDF5_LIBS=@mak_hdf_libs@ + ############################################################################### # COMBINATIONS OF INCLUDES AND LIBS ############################################################################### CANTERA_INCLUDES=$(CANTERA_CORE_INCLUDES) $(CANTERA_SUNDIALS_INCLUDE) \ - $(CANTERA_BOOST_INCLUDES) $(CANTERA_EXTRA_INCLUDES) + $(CANTERA_BOOST_INCLUDES) $(CANTERA_HDF5_INCLUDES) \ + $(CANTERA_EXTRA_INCLUDES) CANTERA_TOTAL_INCLUDES = $(CANTERA_INCLUDES) @@ -78,16 +86,16 @@ CANTERA_DEFINES = -DCANTERA_VERSION=@cantera_version@ CANTERA_LIBS=$(CANTERA_CORE_LIBS) \ $(CANTERA_EXTRA_LIBDIRS) $(CANTERA_SUNDIALS_LIBS) \ - $(CANTERA_BLAS_LAPACK_LIBS) + $(CANTERA_BLAS_LAPACK_LIBS) $(CANTERA_HDF5_LIBS) CANTERA_TOTAL_LIBS=$(CANTERA_LIBS) -CANTERA_TOTAL_LIBS_DEP= $(CANTERA_CORE_LIBS_DEP) \ - $(CANTERA_SUNDIALS_LIBS_DEP) +CANTERA_TOTAL_LIBS_DEP= $(CANTERA_CORE_LIBS_DEP) CANTERA_FORTRAN_LIBS=$(CANTERA_CORE_FTN) \ $(CANTERA_EXTRA_LIBDIRS) $(CANTERA_SUNDIALS_LIBS) \ - $(CANTERA_BLAS_LAPACK_LIBS) $(CANTERA_FORTRAN_SYSLIBS) + $(CANTERA_BLAS_LAPACK_LIBS) $(CANTERA_FORTRAN_SYSLIBS) \ + $(CANTERA_HDF5_LIBS) ############################################################################### # END diff --git a/platform/posix/SConscript b/platform/posix/SConscript index 90cc8454568..9f090768ad1 100644 --- a/platform/posix/SConscript +++ b/platform/posix/SConscript @@ -58,6 +58,14 @@ if localenv["boost_inc_dir"] and not localenv["package_build"]: else: localenv['mak_boost_include'] = '' +if localenv["use_hdf5"] and not localenv["package_build"]: + localenv["mak_hdf_include"] = f"-I{localenv['hdf_include']}" + pc_incdirs.append(localenv["hdf_include"]) + localenv["mak_hdf_libs"] = f"-L{localenv['hdf_libdir']} -lhdf5" +else: + localenv["mak_hdf_include"] = "" + localenv["mak_hdf_libs"] = "-lhdf5" + # Handle BLAS/LAPACK linkage blas_lapack_libs = " ".join(f"-l{lib}" for lib in localenv["blas_lapack_libs"]) if localenv["blas_lapack_dir"] and not localenv["package_build"]: From 8a1bba987ac9a4f0e539dc98632c86ca818e4882 Mon Sep 17 00:00:00 2001 From: Bang-Shiuh Chen Date: Mon, 6 Jun 2022 23:39:57 -0400 Subject: [PATCH 87/93] [thermo] fix plasma thermo properties --- include/cantera/thermo/PlasmaPhase.h | 67 ++++++++++++++++++++++++++++ src/thermo/IdealGasPhase.cpp | 8 +--- src/thermo/PlasmaPhase.cpp | 66 +++++++++++++++++++++++++++ 3 files changed, 135 insertions(+), 6 deletions(-) diff --git a/include/cantera/thermo/PlasmaPhase.h b/include/cantera/thermo/PlasmaPhase.h index 60630a07916..2f02ef9f396 100644 --- a/include/cantera/thermo/PlasmaPhase.h +++ b/include/cantera/thermo/PlasmaPhase.h @@ -187,11 +187,78 @@ class PlasmaPhase: public IdealGasPhase return m_electronTemp; } + //! Return the Gas Constant multiplied by the current electron temperature + /*! + * The units are Joules kmol-1 + */ + double RTe() const { + return electronTemperature() * GasConstant; + } + + //! Pressure + //! Units: Pa. For an ideal gas mixture with additional electrons, + //! \f[ + //! P = \sum_{k \neq k_e} n_k R T. + //! \f] + virtual double pressure() const { + double sum = 0.0; + for (size_t k = 0; k < m_kk; k++) { + if (k != m_electronSpeciesIndex) { + sum += GasConstant * concentration(k) * temperature(); + } + } + return sum; + } + + /** + * Electron pressure. Units: Pa. + * \f[P = n_{k_e} R T_e\f] + */ + virtual double electronPressure() const { + return GasConstant * concentration(m_electronSpeciesIndex) * + electronTemperature(); + } + //! Number of electron levels size_t nElectronEnergyLevels() const { return m_nPoints; } + //! Electron Species Index + size_t electronSpeciesIndex() const { + return m_electronSpeciesIndex; + } + + //! Return the Molar enthalpy. Units: J/kmol. + /*! + * For an ideal gas mixture with additional electron, + * \f[ + * \hat h(T) = \sum_{k \neq k_e} X_k \hat h^0_k(T) + X_{k_e} \hat h^0_{k_e}(T_e), + * \f] + * and is a function only of temperature. The standard-state pure-species + * enthalpies \f$ \hat h^0_k(T) \f$ are computed by the species + * thermodynamic property manager. + * + * \see MultiSpeciesThermo + */ + virtual double enthalpy_mole() const; + + virtual double entropy_mole() const; + + virtual double gibbs_mole() const; + + virtual void getGibbs_ref(double* g) const; + + virtual void getStandardVolumes_ref(double* vol) const; + + virtual void getStandardChemPotentials(double* mu) const; + + virtual void getChemPotentials(double* mu) const; + + virtual void getPartialMolarEnthalpies(double* hbar) const; + + virtual void getPartialMolarIntEnergies(double* ubar) const; + virtual void getParameters(AnyMap& phaseNode) const; virtual void setParameters(const AnyMap& phaseNode, diff --git a/src/thermo/IdealGasPhase.cpp b/src/thermo/IdealGasPhase.cpp index f0860945638..9dae1f0f719 100644 --- a/src/thermo/IdealGasPhase.cpp +++ b/src/thermo/IdealGasPhase.cpp @@ -54,8 +54,7 @@ void IdealGasPhase::getActivityCoefficients(doublereal* ac) const void IdealGasPhase::getStandardChemPotentials(doublereal* muStar) const { - const vector_fp& gibbsrt = gibbs_RT_ref(); - scale(gibbsrt.begin(), gibbsrt.end(), muStar, RT()); + getGibbs_ref(muStar); double tmp = log(pressure() / refPressure()) * RT(); for (size_t k = 0; k < m_kk; k++) { muStar[k] += tmp; // add RT*ln(P/P_0) @@ -152,10 +151,7 @@ void IdealGasPhase::getPureGibbs(doublereal* gpure) const void IdealGasPhase::getIntEnergy_RT(doublereal* urt) const { - const vector_fp& _h = enthalpy_RT_ref(); - for (size_t k = 0; k < m_kk; k++) { - urt[k] = _h[k] - 1.0; - } + getIntEnergy_RT_ref(urt); } void IdealGasPhase::getCp_R(doublereal* cpr) const diff --git a/src/thermo/PlasmaPhase.cpp b/src/thermo/PlasmaPhase.cpp index 7e075d71ac2..6658b830577 100644 --- a/src/thermo/PlasmaPhase.cpp +++ b/src/thermo/PlasmaPhase.cpp @@ -268,4 +268,70 @@ void PlasmaPhase::updateThermo() const m_g0_RT[k] = m_h0_RT[k] - m_s0_R[k]; } +double PlasmaPhase::enthalpy_mole() const { + double value = IdealGasPhase::enthalpy_mole(); + value += GasConstant * (electronTemperature() - temperature()) * + moleFraction(m_electronSpeciesIndex) * + m_h0_RT[m_electronSpeciesIndex]; + return value; +} + +double PlasmaPhase::entropy_mole() const { + warn_user("PlasmaPhase::entropy_mole", + "Use the same equation of IdealGasPhase::entropy_mole " + "which is not correct for plasma."); + return IdealGasPhase::entropy_mole(); +} + +double PlasmaPhase::gibbs_mole() const { + warn_user("PlasmaPhase::gibbs_mole", + "Use the same equation of IdealGasPhase::gibbs_mole " + "which is not correct for plasma."); + return IdealGasPhase::gibbs_mole(); +} + +void PlasmaPhase::getGibbs_ref(double* g) const +{ + IdealGasPhase::getGibbs_ref(g); + g[m_electronSpeciesIndex] *= electronTemperature() / temperature(); +} + +void PlasmaPhase::getStandardVolumes_ref(double* vol) const +{ + IdealGasPhase::getStandardVolumes_ref(vol); + vol[m_electronSpeciesIndex] *= electronTemperature() / temperature(); +} + +void PlasmaPhase::getStandardChemPotentials(double* muStar) const +{ + warn_user("PlasmaPhase::getStandardChemPotentials", + "Use the same equation of IdealGasPhase::getStandardChemPotentials " + "which is not correct for plasma."); + IdealGasPhase::getStandardChemPotentials(muStar); +} + +void PlasmaPhase::getChemPotentials(double* mu) const +{ + warn_user("PlasmaPhase::getChemPotentials", + "Use the same equation of IdealGasPhase::getChemPotentials " + "which is not correct for plasma."); + IdealGasPhase::getChemPotentials(mu); +} + +void PlasmaPhase::getPartialMolarEnthalpies(double* hbar) const +{ + IdealGasPhase::getPartialMolarEnthalpies(hbar); + hbar[m_electronSpeciesIndex] *= electronTemperature() / temperature(); +} + +void PlasmaPhase::getPartialMolarIntEnergies(double* ubar) const +{ + const vector_fp& _h = enthalpy_RT_ref(); + for (size_t k = 0; k < m_kk; k++) { + ubar[k] = RT() * (_h[k] - 1.0); + } + size_t k = m_electronSpeciesIndex; + ubar[k] = RTe() * (_h[k] - 1.0); +} + } From d38782db0867912241a00eea1e5390b8c7e8a246 Mon Sep 17 00:00:00 2001 From: bangshiuh Date: Fri, 17 Jun 2022 11:36:53 -0400 Subject: [PATCH 88/93] [thermo, test] fix the consistency calculation for plasma --- test/thermo/consistency.cpp | 17 +++++++++++++++-- 1 file changed, 15 insertions(+), 2 deletions(-) diff --git a/test/thermo/consistency.cpp b/test/thermo/consistency.cpp index 0004d242120..d3143a2deeb 100644 --- a/test/thermo/consistency.cpp +++ b/test/thermo/consistency.cpp @@ -1,5 +1,6 @@ #include "gtest/gtest.h" #include "cantera/thermo/ThermoPhase.h" +#include "cantera/thermo/PlasmaPhase.h" #include "cantera/thermo/ThermoFactory.h" #include "cantera/thermo/MolalityVPSSTP.h" #include "cantera/base/Solution.h" @@ -80,7 +81,14 @@ class TestConsistency : public testing::TestWithParam nsp = phase->nSpecies(); p = phase->pressure(); T = phase->temperature(); + Te = phase->electronTemperature(); + RTe = Te * GasConstant; RT = T * GasConstant; + if (phase->type() == "plasma") { + ke = dynamic_cast(*phase).electronSpeciesIndex(); + } else { + ke = npos; + } } void SetUp() { @@ -101,7 +109,8 @@ class TestConsistency : public testing::TestWithParam AnyMap setup; shared_ptr phase; size_t nsp; - double T, p, RT; + double T, p, RT, RTe, Te; + size_t ke; double atol; // absolute tolerance for molar energy comparisons double atol_v; // absolute tolerance for molar volume comparisons double rtol_fd; // relative tolerance for finite difference comparisons @@ -136,7 +145,11 @@ TEST_P(TestConsistency, hk_eq_uk_plus_P_vk) GTEST_SKIP() << err.getMethod() << " threw NotImplementedError"; } for (size_t k = 0; k < nsp; k++) { - EXPECT_NEAR(hk[k], uk[k] + p * vk[k], atol) << "k = " << k; + if (k == ke) { + EXPECT_NEAR(hk[k], uk[k] + RTe, atol) << "k = " << k; + } else { + EXPECT_NEAR(hk[k], uk[k] + p * vk[k], atol) << "k = " << k; + } } } From 5f40aac79fece95f945fb0c2c9fd89683479d147 Mon Sep 17 00:00:00 2001 From: bangshiuh Date: Fri, 17 Jun 2022 16:25:10 -0400 Subject: [PATCH 89/93] [interface] add electron pressure --- include/cantera/thermo/PlasmaPhase.h | 15 --------------- interfaces/cython/cantera/thermo.pxd | 1 + interfaces/cython/cantera/thermo.pyx | 7 +++++++ 3 files changed, 8 insertions(+), 15 deletions(-) diff --git a/include/cantera/thermo/PlasmaPhase.h b/include/cantera/thermo/PlasmaPhase.h index 2f02ef9f396..adee8b9309a 100644 --- a/include/cantera/thermo/PlasmaPhase.h +++ b/include/cantera/thermo/PlasmaPhase.h @@ -195,21 +195,6 @@ class PlasmaPhase: public IdealGasPhase return electronTemperature() * GasConstant; } - //! Pressure - //! Units: Pa. For an ideal gas mixture with additional electrons, - //! \f[ - //! P = \sum_{k \neq k_e} n_k R T. - //! \f] - virtual double pressure() const { - double sum = 0.0; - for (size_t k = 0; k < m_kk; k++) { - if (k != m_electronSpeciesIndex) { - sum += GasConstant * concentration(k) * temperature(); - } - } - return sum; - } - /** * Electron pressure. Units: Pa. * \f[P = n_{k_e} R T_e\f] diff --git a/interfaces/cython/cantera/thermo.pxd b/interfaces/cython/cantera/thermo.pxd index a8e1102a0f6..6645bd14f49 100644 --- a/interfaces/cython/cantera/thermo.pxd +++ b/interfaces/cython/cantera/thermo.pxd @@ -205,6 +205,7 @@ cdef extern from "cantera/thermo/PlasmaPhase.h": double isotropicShapeFactor() double meanElectronEnergy() size_t nElectronEnergyLevels() except +translate_exception + double electronPressure() except +translate_exception cdef extern from "cantera/cython/thermo_utils.h": diff --git a/interfaces/cython/cantera/thermo.pyx b/interfaces/cython/cantera/thermo.pyx index efb3a890f50..4c1c3b2c5c3 100644 --- a/interfaces/cython/cantera/thermo.pyx +++ b/interfaces/cython/cantera/thermo.pyx @@ -1747,6 +1747,13 @@ cdef class ThermoPhase(_SolutionBase): raise ThermoModelMethodError(self.thermo_model) self.plasma.setElectronTemperature(value) + property Pe: + """Get electron Pressure [Pa].""" + def __get__(self): + if not self._enable_plasma: + raise ThermoModelMethodError(self.thermo_model) + return self.plasma.electronPressure() + def set_discretized_electron_energy_distribution(self, levels, distribution): """ Set electron energy distribution. When this method is used, electron From a1d393087b4dfffa616f2e0d26246d155616972f Mon Sep 17 00:00:00 2001 From: bangshiuh Date: Sun, 2 Oct 2022 17:13:33 -0400 Subject: [PATCH 90/93] delete unused except +translate_exception --- interfaces/cython/cantera/thermo.pxd | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/interfaces/cython/cantera/thermo.pxd b/interfaces/cython/cantera/thermo.pxd index 6645bd14f49..5e4a1aa401b 100644 --- a/interfaces/cython/cantera/thermo.pxd +++ b/interfaces/cython/cantera/thermo.pxd @@ -199,13 +199,13 @@ cdef extern from "cantera/thermo/PlasmaPhase.h": string electronEnergyDistributionType() void setQuadratureMethod(const string&) except +translate_exception string quadratureMethod() - void enableNormalizeElectronEnergyDist(cbool) except +translate_exception + void enableNormalizeElectronEnergyDist(cbool) cbool normalizeElectronEnergyDistEnabled() void setMeanElectronEnergy(double) except +translate_exception double isotropicShapeFactor() double meanElectronEnergy() - size_t nElectronEnergyLevels() except +translate_exception - double electronPressure() except +translate_exception + size_t nElectronEnergyLevels() + double electronPressure() cdef extern from "cantera/cython/thermo_utils.h": From 2f9abcfa735e24021f4f3f686c7f19ac15c0c301 Mon Sep 17 00:00:00 2001 From: bangshiuh Date: Tue, 11 Oct 2022 22:15:04 -0400 Subject: [PATCH 91/93] make more functions NotImplemented for PlasmaPhase --- include/cantera/thermo/PlasmaPhase.h | 26 +++++-- src/thermo/PlasmaPhase.cpp | 82 +++++++++++++-------- test/thermo/consistency.cpp | 104 +++++++++++++++++++-------- 3 files changed, 150 insertions(+), 62 deletions(-) diff --git a/include/cantera/thermo/PlasmaPhase.h b/include/cantera/thermo/PlasmaPhase.h index adee8b9309a..9f1d28c2451 100644 --- a/include/cantera/thermo/PlasmaPhase.h +++ b/include/cantera/thermo/PlasmaPhase.h @@ -228,20 +228,38 @@ class PlasmaPhase: public IdealGasPhase */ virtual double enthalpy_mole() const; - virtual double entropy_mole() const; + virtual double cp_mole() const { + throw NotImplementedError("PlasmaPhase::cp_mole"); + } + + virtual double entropy_mole() const { + throw NotImplementedError("PlasmaPhase::entropy_mole"); + } - virtual double gibbs_mole() const; + virtual double gibbs_mole() const { + throw NotImplementedError("PlasmaPhase::gibbs_mole"); + } + + virtual double intEnergy_mole() const { + throw NotImplementedError("PlasmaPhase::intEnergy_mole"); + } + + virtual void getEntropy_R(double* sr) const; + + virtual void getGibbs_RT(double* grt) const; virtual void getGibbs_ref(double* g) const; virtual void getStandardVolumes_ref(double* vol) const; - virtual void getStandardChemPotentials(double* mu) const; - virtual void getChemPotentials(double* mu) const; + virtual void getStandardChemPotentials(double* muStar) const; + virtual void getPartialMolarEnthalpies(double* hbar) const; + virtual void getPartialMolarEntropies(double* sbar) const; + virtual void getPartialMolarIntEnergies(double* ubar) const; virtual void getParameters(AnyMap& phaseNode) const; diff --git a/src/thermo/PlasmaPhase.cpp b/src/thermo/PlasmaPhase.cpp index 6658b830577..c50e3e38d21 100644 --- a/src/thermo/PlasmaPhase.cpp +++ b/src/thermo/PlasmaPhase.cpp @@ -276,20 +276,6 @@ double PlasmaPhase::enthalpy_mole() const { return value; } -double PlasmaPhase::entropy_mole() const { - warn_user("PlasmaPhase::entropy_mole", - "Use the same equation of IdealGasPhase::entropy_mole " - "which is not correct for plasma."); - return IdealGasPhase::entropy_mole(); -} - -double PlasmaPhase::gibbs_mole() const { - warn_user("PlasmaPhase::gibbs_mole", - "Use the same equation of IdealGasPhase::gibbs_mole " - "which is not correct for plasma."); - return IdealGasPhase::gibbs_mole(); -} - void PlasmaPhase::getGibbs_ref(double* g) const { IdealGasPhase::getGibbs_ref(g); @@ -302,28 +288,20 @@ void PlasmaPhase::getStandardVolumes_ref(double* vol) const vol[m_electronSpeciesIndex] *= electronTemperature() / temperature(); } -void PlasmaPhase::getStandardChemPotentials(double* muStar) const -{ - warn_user("PlasmaPhase::getStandardChemPotentials", - "Use the same equation of IdealGasPhase::getStandardChemPotentials " - "which is not correct for plasma."); - IdealGasPhase::getStandardChemPotentials(muStar); -} - -void PlasmaPhase::getChemPotentials(double* mu) const -{ - warn_user("PlasmaPhase::getChemPotentials", - "Use the same equation of IdealGasPhase::getChemPotentials " - "which is not correct for plasma."); - IdealGasPhase::getChemPotentials(mu); -} - void PlasmaPhase::getPartialMolarEnthalpies(double* hbar) const { IdealGasPhase::getPartialMolarEnthalpies(hbar); hbar[m_electronSpeciesIndex] *= electronTemperature() / temperature(); } +void PlasmaPhase::getPartialMolarEntropies(double* sbar) const +{ + IdealGasPhase::getPartialMolarEntropies(sbar); + double logp = log(pressure()); + double logpe = log(electronPressure()); + sbar[m_electronSpeciesIndex] += GasConstant * (logp - logpe); +} + void PlasmaPhase::getPartialMolarIntEnergies(double* ubar) const { const vector_fp& _h = enthalpy_RT_ref(); @@ -334,4 +312,48 @@ void PlasmaPhase::getPartialMolarIntEnergies(double* ubar) const ubar[k] = RTe() * (_h[k] - 1.0); } +void PlasmaPhase::getChemPotentials(double* mu) const +{ + IdealGasPhase::getChemPotentials(mu); + size_t k = m_electronSpeciesIndex; + double xx = std::max(SmallNumber, moleFraction(k)); + mu[k] += (RTe() - RT()) * log(xx); +} + +void PlasmaPhase::getStandardChemPotentials(double* muStar) const +{ + IdealGasPhase::getStandardChemPotentials(muStar); + size_t k = m_electronSpeciesIndex; + muStar[k] -= log(pressure() / refPressure()) * RT(); + muStar[k] += log(electronPressure() / refPressure()) * RTe(); +} + +void PlasmaPhase::getEntropy_R(double* sr) const +{ + const vector_fp& _s = entropy_R_ref(); + copy(_s.begin(), _s.end(), sr); + double tmp = log(pressure() / refPressure()); + for (size_t k = 0; k < m_kk; k++) { + if (k != m_electronSpeciesIndex) { + sr[k] -= tmp; + } else { + sr[k] -= log(electronPressure() / refPressure()); + } + } +} + +void PlasmaPhase::getGibbs_RT(double* grt) const +{ + const vector_fp& gibbsrt = gibbs_RT_ref(); + copy(gibbsrt.begin(), gibbsrt.end(), grt); + double tmp = log(pressure() / refPressure()); + for (size_t k = 0; k < m_kk; k++) { + if (k != m_electronSpeciesIndex) { + grt[k] += tmp; + } else { + grt[k] += log(electronPressure() / refPressure()); + } + } +} + } diff --git a/test/thermo/consistency.cpp b/test/thermo/consistency.cpp index d3143a2deeb..bade573bf90 100644 --- a/test/thermo/consistency.cpp +++ b/test/thermo/consistency.cpp @@ -121,16 +121,26 @@ map, shared_ptr> TestConsistency::cache = {}; // --------------- Definitions for individual consistency tests --------------- TEST_P(TestConsistency, h_eq_u_plus_Pv) { - double h = phase->enthalpy_mole(); - double u = phase->intEnergy_mole(); - double v = phase->molarVolume(); + double h, u, v; + try { + h = phase->enthalpy_mole(); + u = phase->intEnergy_mole(); + v = phase->molarVolume(); + } catch (NotImplementedError& err) { + GTEST_SKIP() << err.getMethod() << " threw NotImplementedError"; + } EXPECT_NEAR(h, u + p * v, atol); } TEST_P(TestConsistency, g_eq_h_minus_Ts) { - double g = phase->gibbs_mole(); - double h = phase->enthalpy_mole(); - double s = phase->entropy_mole(); + double g, h, s; + try { + g = phase->gibbs_mole(); + h = phase->enthalpy_mole(); + s = phase->entropy_mole(); + } catch (NotImplementedError& err) { + GTEST_SKIP() << err.getMethod() << " threw NotImplementedError"; + } EXPECT_NEAR(g, h - T * s, atol); } @@ -145,11 +155,9 @@ TEST_P(TestConsistency, hk_eq_uk_plus_P_vk) GTEST_SKIP() << err.getMethod() << " threw NotImplementedError"; } for (size_t k = 0; k < nsp; k++) { - if (k == ke) { - EXPECT_NEAR(hk[k], uk[k] + RTe, atol) << "k = " << k; - } else { + if (k != ke) { EXPECT_NEAR(hk[k], uk[k] + p * vk[k], atol) << "k = " << k; - } + } // not applicable for electron } } @@ -164,7 +172,9 @@ TEST_P(TestConsistency, gk_eq_hk_minus_T_sk) GTEST_SKIP() << err.getMethod() << " threw NotImplementedError"; } for (size_t k = 0; k < nsp; k++) { - EXPECT_NEAR(gk[k], hk[k] - T * sk[k], atol) << "k = " << k; + if (k != ke) { + EXPECT_NEAR(gk[k], hk[k] - T * sk[k], atol) << "k = " << k; + } // not applicable for electron } } @@ -182,34 +192,40 @@ TEST_P(TestConsistency, h_eq_sum_hk_Xk) TEST_P(TestConsistency, u_eq_sum_uk_Xk) { vector_fp uk(nsp); + double u; try { phase->getPartialMolarIntEnergies(uk.data()); + u = phase->intEnergy_mole(); } catch (NotImplementedError& err) { GTEST_SKIP() << err.getMethod() << " threw NotImplementedError"; } - EXPECT_NEAR(phase->intEnergy_mole(), phase->mean_X(uk), atol); + EXPECT_NEAR(u, phase->mean_X(uk), atol); } TEST_P(TestConsistency, g_eq_sum_gk_Xk) { vector_fp gk(nsp); + double g; try { phase->getChemPotentials(gk.data()); + g = phase->gibbs_mole(); } catch (NotImplementedError& err) { GTEST_SKIP() << err.getMethod() << " threw NotImplementedError"; } - EXPECT_NEAR(phase->gibbs_mole(), phase->mean_X(gk), atol); + EXPECT_NEAR(g, phase->mean_X(gk), atol); } TEST_P(TestConsistency, s_eq_sum_sk_Xk) { vector_fp sk(nsp); + double s; try { phase->getPartialMolarEntropies(sk.data()); + s = phase->entropy_mole(); } catch (NotImplementedError& err) { GTEST_SKIP() << err.getMethod() << " threw NotImplementedError"; } - EXPECT_NEAR(phase->entropy_mole(), phase->mean_X(sk), atol); + EXPECT_NEAR(s, phase->mean_X(sk), atol); } TEST_P(TestConsistency, v_eq_sum_vk_Xk) @@ -226,12 +242,14 @@ TEST_P(TestConsistency, v_eq_sum_vk_Xk) TEST_P(TestConsistency, cp_eq_sum_cpk_Xk) { vector_fp cpk(nsp); + double cp; try { phase->getPartialMolarCp(cpk.data()); + cp = phase->cp_mole(); } catch (NotImplementedError& err) { GTEST_SKIP() << err.getMethod() << " threw NotImplementedError"; } - EXPECT_NEAR(phase->cp_mole(), phase->mean_X(cpk), atol); + EXPECT_NEAR(cp, phase->mean_X(cpk), atol); } TEST_P(TestConsistency, cp_eq_dhdT) @@ -320,10 +338,15 @@ TEST_P(TestConsistency, cv_eq_dsdT_const_v_times_T) TEST_P(TestConsistency, dsdP_const_T_eq_minus_dV_dT_const_P) { - double s1 = phase->entropy_mole(); - double P1 = phase->pressure(); - double T1 = phase->temperature(); - double v1 = phase->molarVolume(); + double s1, P1, T1, v1; + try { + s1 = phase->entropy_mole(); + P1 = phase->pressure(); + T1 = phase->temperature(); + v1 = phase->molarVolume(); + } catch (NotImplementedError& err) { + GTEST_SKIP() << err.getMethod() << " threw NotImplementedError"; + } double P2 = P1 * (1 + 1e-6); phase->setState_TP(T1, P2); double s2 = phase->entropy_mole(); @@ -339,7 +362,12 @@ TEST_P(TestConsistency, dsdP_const_T_eq_minus_dV_dT_const_P) TEST_P(TestConsistency, dSdv_const_T_eq_dPdT_const_V) { if (phase->isCompressible()) { - double s1 = phase->entropy_mass(); + double s1; + try { + s1 = phase->entropy_mass(); + } catch (NotImplementedError& err) { + GTEST_SKIP() << err.getMethod() << " threw NotImplementedError"; + } double v1 = 1 / phase->density(); double P1 = phase->pressure(); double v2 = v1 * (1 + 1e-7); @@ -423,7 +451,11 @@ TEST_P(TestConsistency, standard_gibbs_nondim) GTEST_SKIP() << err.getMethod() << " threw NotImplementedError"; } for (size_t k = 0; k < nsp; k++) { - EXPECT_NEAR(g0_RT[k] * RT , mu0[k], atol) << "k = " << k; + if (k != ke) { + EXPECT_NEAR(g0_RT[k] * RT , mu0[k], atol) << "k = " << k; + } else { + EXPECT_NEAR(g0_RT[k] * RTe , mu0[k], atol) << "k = " << k; + } } } @@ -437,9 +469,15 @@ TEST_P(TestConsistency, chem_potentials_to_activities) { GTEST_SKIP() << err.getMethod() << " threw NotImplementedError"; } for (size_t k = 0; k < nsp; k++) { - double a_from_mu = exp((mu[k] - mu0[k]) / RT); - double scale = std::max(std::abs(a[k]), std::abs(a_from_mu)); - EXPECT_NEAR(a_from_mu, a[k], 1e-9 * scale + 1e-14) << "k = " << k; + if (k != ke) { + double a_from_mu = exp((mu[k] - mu0[k]) / RT); + double scale = std::max(std::abs(a[k]), std::abs(a_from_mu)); + EXPECT_NEAR(a_from_mu, a[k], 1e-9 * scale + 1e-14) << "k = " << k; + } else { + double a_from_mu = exp((mu[k] - mu0[k]) / RTe); + double scale = std::max(std::abs(a[k]), std::abs(a_from_mu)); + EXPECT_NEAR(a_from_mu, a[k], 1e-9 * scale + 1e-14) << "k = " << k; + } } } @@ -505,7 +543,11 @@ TEST_P(TestConsistency, hRef_eq_uRef_plus_P_vRef) GTEST_SKIP() << err.getMethod() << " threw NotImplementedError"; } for (size_t k = 0; k < nsp; k++) { - EXPECT_NEAR(hRef[k] * RT, uRef[k] * RT + OneAtm * vRef[k], atol) << "k = " << k; + if (k != ke) { + EXPECT_NEAR(hRef[k] * RT, uRef[k] * RT + OneAtm * vRef[k], atol) << "k = " << k; + } else { + EXPECT_NEAR(hRef[k] * RTe, uRef[k] * RTe + OneAtm * vRef[k], atol) << "k = " << k; + } } } @@ -521,9 +563,15 @@ TEST_P(TestConsistency, gRef_eq_hRef_minus_T_sRef) GTEST_SKIP() << err.getMethod() << " threw NotImplementedError"; } for (size_t k = 0; k < nsp; k++) { - EXPECT_NEAR(gRef[k], gRef_RT[k] * RT, atol) << "k = " << k; - EXPECT_NEAR(gRef[k], hRef[k] * RT - T * sRef[k] * GasConstant, - atol) << "k = " << k; + if (k != ke) { + EXPECT_NEAR(gRef[k], gRef_RT[k] * RT, atol) << "k = " << k; + EXPECT_NEAR(gRef[k], hRef[k] * RT - T * sRef[k] * GasConstant, + atol) << "k = " << k; + } else { + EXPECT_NEAR(gRef[k], gRef_RT[k] * RTe, atol) << "k = " << k; + EXPECT_NEAR(gRef[k], hRef[k] * RTe - Te * sRef[k] * GasConstant, + atol) << "k = " << k; + } } } From cb514cdcd5984fa300925ffd8aaeb17bf54cc231 Mon Sep 17 00:00:00 2001 From: Ray Speth Date: Tue, 10 Jan 2023 17:46:04 -0500 Subject: [PATCH 92/93] Fix compareThermo test for PlasmaPhase --- test/thermo/thermoToYaml.cpp | 12 ++++++++++-- 1 file changed, 10 insertions(+), 2 deletions(-) diff --git a/test/thermo/thermoToYaml.cpp b/test/thermo/thermoToYaml.cpp index dd158d87791..ca697a379c6 100644 --- a/test/thermo/thermoToYaml.cpp +++ b/test/thermo/thermoToYaml.cpp @@ -342,6 +342,7 @@ class ThermoYamlRoundTrip : public testing::Test input1); skip_cp = false; skip_activities = false; + skip_entropy = false; rtol = 1e-14; } @@ -367,8 +368,10 @@ class ThermoYamlRoundTrip : public testing::Test EXPECT_NEAR(original->cp_mass(), duplicate->cp_mass(), rtol * original->cp_mass()); } - EXPECT_NEAR(original->entropy_mass(), duplicate->entropy_mass(), - rtol * fabs(original->entropy_mass())); + if (!skip_entropy) { + EXPECT_NEAR(original->entropy_mass(), duplicate->entropy_mass(), + rtol * fabs(original->entropy_mass())); + } EXPECT_NEAR(original->enthalpy_mole(), duplicate->enthalpy_mole(), rtol * fabs(original->enthalpy_mole())); @@ -403,6 +406,7 @@ class ThermoYamlRoundTrip : public testing::Test shared_ptr duplicate; bool skip_cp; bool skip_activities; + bool skip_entropy; double rtol; }; @@ -517,6 +521,8 @@ TEST_F(ThermoYamlRoundTrip, Surface) TEST_F(ThermoYamlRoundTrip, IsotropicElectronEnergyPlasma) { roundtrip("oxygen-plasma.yaml", "isotropic-electron-energy-plasma"); + skip_cp = true; // Not implemented for PlasmaPhase + skip_entropy = true; // Not implemented for PlasmaPhase compareThermo(800, 2*OneAtm); auto origPlasma = std::dynamic_pointer_cast(original); auto duplPlasma = std::dynamic_pointer_cast(duplicate); @@ -530,6 +536,8 @@ TEST_F(ThermoYamlRoundTrip, IsotropicElectronEnergyPlasma) TEST_F(ThermoYamlRoundTrip, DiscretizedElectronEnergyPlasma) { roundtrip("oxygen-plasma.yaml", "discretized-electron-energy-plasma"); + skip_cp = true; // Not implemented for PlasmaPhase + skip_entropy = true; // Not implemented for PlasmaPhase compareThermo(800, 2*OneAtm); EXPECT_DOUBLE_EQ(original->electronTemperature(), duplicate->electronTemperature()); } From 1fa20b7b753f5a25c2bbbd7021ff3618eb88385b Mon Sep 17 00:00:00 2001 From: bangshiuh Date: Wed, 4 Jan 2023 19:05:12 -0500 Subject: [PATCH 93/93] [interface, thermo] fix PlasmaPhase by adding the inheritance --- interfaces/cython/cantera/thermo.pxd | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/interfaces/cython/cantera/thermo.pxd b/interfaces/cython/cantera/thermo.pxd index 5e4a1aa401b..9a08b86bef6 100644 --- a/interfaces/cython/cantera/thermo.pxd +++ b/interfaces/cython/cantera/thermo.pxd @@ -186,9 +186,8 @@ cdef extern from "cantera/thermo/SurfPhase.h": cdef extern from "cantera/thermo/PlasmaPhase.h": - cdef cppclass CxxPlasmaPhase "Cantera::PlasmaPhase": + cdef cppclass CxxPlasmaPhase "Cantera::PlasmaPhase" (CxxThermoPhase): CxxPlasmaPhase() - double electronTemperature() except +translate_exception void setElectronTemperature(double) except +translate_exception void setElectronEnergyLevels(double*, size_t) except +translate_exception void getElectronEnergyLevels(double*)