diff --git a/MeshLib/IO/XDMF/HdfData.cpp b/MeshLib/IO/XDMF/HdfData.cpp
index b32c114b36e6330c812be9604153c242409d6119..76b3ce2da9a83ebb5fec5a0b5361317ede20ae23 100644
--- a/MeshLib/IO/XDMF/HdfData.cpp
+++ b/MeshLib/IO/XDMF/HdfData.cpp
@@ -72,5 +72,5 @@ HdfData::HdfData(void const* data_start, std::size_t const size_partitioned_dim,
         "{:d}, file_space: {:d}, tuples: {:d}",
         name, partition_info.local_offset, data_space[0], chunk_space[0],
         file_space[0], size_tuple);
-    }
+}
 }  // namespace MeshLib::IO
\ No newline at end of file
diff --git a/MeshLib/IO/XDMF/HdfWriter.cpp b/MeshLib/IO/XDMF/HdfWriter.cpp
index 9f3ee82fe9be56200b2b3b5c4b44288e503971a5..76b1f2812731ce5eb7d8423814d9ab48a361c8b9 100644
--- a/MeshLib/IO/XDMF/HdfWriter.cpp
+++ b/MeshLib/IO/XDMF/HdfWriter.cpp
@@ -18,7 +18,6 @@
 #include "BaseLib/Error.h"
 #include "BaseLib/Logging.h"
 #include "fileIO.h"
-
 template <typename... Args>
 void checkHdfStatus(const hid_t status, std::string const& formatting,
                     Args&&... args)
@@ -35,11 +34,6 @@ using namespace MeshLib::IO;
 
 using namespace std::string_literals;
 
-static std::string getTimeSection(int const step)
-{
-    return "t_"s + std::to_string(step);
-}
-
 static bool checkCompression()
 {
     // Check if gzip compression is available and can be used for both
@@ -60,83 +54,109 @@ static bool checkCompression()
     return true;
 }
 
-static hid_t createStepGroup(hid_t const& file, int const step)
+static std::vector<Hdf5DimType> prependDimension(
+    Hdf5DimType const prepend_value, std::vector<Hdf5DimType> const& dimensions)
 {
-    std::string const& time_section = getTimeSection(step);
-
-    // Open or create Group
-    if (H5Lexists(file, time_section.c_str(), H5P_DEFAULT) > 0)
-    {
-        return H5Gopen2(file, time_section.c_str(), H5P_DEFAULT);
-    }
-    return H5Gcreate2(file, time_section.c_str(), H5P_DEFAULT, H5P_DEFAULT,
-                      H5P_DEFAULT);
+    std::vector<Hdf5DimType> dims = {prepend_value};
+    dims.insert(dims.end(), dimensions.begin(), dimensions.end());
+    return dims;
 }
 
-static hid_t writeDataSet(
-    void const* nodes_data,  // what
-    hid_t const data_type,
-    std::vector<Hdf5DimType> const& data_dims,  // how ...
-    std::vector<Hdf5DimType> const& dim_offsets,
-    std::vector<Hdf5DimType> const& dim_maxs,
-    [[maybe_unused]] std::vector<Hdf5DimType> const& chunk_dim,
-    bool use_compression,
-    hid_t const section,
-    std::string const& dataset_name)  // where
+static hid_t createDataSet(
+    hid_t const data_type, std::vector<Hdf5DimType> const& data_dims,
+    std::vector<Hdf5DimType> const& max_dims,
+    [[maybe_unused]] std::vector<Hdf5DimType> const& chunk_dims,
+    bool const use_compression, hid_t const section,
+    std::string const& dataset_name)
 {
-    int const dim_size = data_dims.size();
-    hid_t const memspace =
-        H5Screate_simple(dim_size, data_dims.data(), nullptr);
-    hid_t const filespace =
-        H5Screate_simple(dim_size, dim_maxs.data(), nullptr);
+    int const time_dim_local_size = data_dims.size() + 1;
+
+    std::vector<Hdf5DimType> time_max_dims =
+        prependDimension(H5S_UNLIMITED, max_dims);
+    std::vector<Hdf5DimType> time_data_global_dims =
+        prependDimension(1, max_dims);
+
+    hid_t fspace =
+        H5Screate_simple(time_dim_local_size, time_data_global_dims.data(),
+                         time_max_dims.data());
+    assert(fspace >= 0);
 
-    hid_t dataset_property = H5Pcreate(H5P_DATASET_CREATE);
+    hid_t dcpl = H5Pcreate(H5P_DATASET_CREATE);
+    assert(dcpl >= 0);
 
-    if (use_compression )
+    hid_t status =
+        H5Pset_chunk(dcpl, time_dim_local_size, time_data_global_dims.data());
+    if (status < 0)
     {
-        hid_t status =
-            H5Pset_chunk(dataset_property, dim_size, dim_maxs.data());
-        if (status != 0)
-        {
-            ERR("H5Pset_layout failed for data set: {:s}.", dataset_name);
-        }
-        H5Pset_deflate(dataset_property, default_compression_factor);
+        OGS_FATAL("H5Pset_layout failed for data set: {:s}.", dataset_name);
     }
 
-    hid_t const dataset =
-        H5Dcreate2(section, dataset_name.c_str(), data_type, filespace,
-                   H5P_DEFAULT, dataset_property, H5P_DEFAULT);
+    if (use_compression)
+    {
+        H5Pset_deflate(dcpl, default_compression_factor);
+    }
 
-    H5Pclose(dataset_property);
-    H5Sclose(filespace);
+    hid_t dataset = H5Dcreate2(section, dataset_name.c_str(), data_type, fspace,
+                               H5P_DEFAULT, dcpl, H5P_DEFAULT);
 
-    hid_t const dataset_filespace = H5Dget_space(dataset);
+    assert(dataset >= 0);
+    H5Pclose(dcpl);
+    assert(H5Sclose(fspace) >= 0);
 
-    std::vector<hsize_t> const stride(dim_size, 1);
-    std::vector<hsize_t> const count(dim_size, 1);
-    std::vector<hsize_t> const block = data_dims;
+    return dataset;
+}
+/**
+ * \brief Assumes a dataset is already opened by createDatasetFunction
+ * \details Defines what (nodes_data, data_type) will be written how (data
+ * subsections: data_dims, offset_dims, max_dims, chunk_dims, time) where
+ * (dataset and dataset_name)
+ */
+static void writeDataSet(
+    void const* nodes_data,  // what
+    hid_t const data_type,
+    std::vector<Hdf5DimType> const& data_dims,  // how ...
+    std::vector<Hdf5DimType> const& offset_dims,
+    std::vector<Hdf5DimType> const& max_dims,
+    [[maybe_unused]] std::vector<Hdf5DimType> const& chunk_dims,
+    std::string const& dataset_name, int step, hid_t dataset)  // where
+{
+    Hdf5DimType hdf_step = step;
+    Hdf5DimType time_steps = hdf_step + 1;
 
-    hid_t status = H5Sselect_hyperslab(dataset_filespace, H5S_SELECT_SET,
-                                 dim_offsets.data(), stride.data(),
-                                 count.data(), block.data());
-    if (status != 0)
+    std::vector<Hdf5DimType> time_data_local_dims = data_dims;
+    std::vector<Hdf5DimType> time_max_dims =
+        prependDimension(time_steps, max_dims);
+    std::vector<Hdf5DimType> time_offsets =
+        prependDimension(hdf_step, offset_dims);
+    std::vector<hsize_t> count = prependDimension(1, time_data_local_dims);
+
+    hid_t const io_transfer_property = createHDF5TransferPolicy();
+
+    hid_t const mspace = H5Screate_simple(time_data_local_dims.size(),
+                                          time_data_local_dims.data(), NULL);
+    assert(H5Sselect_all(mspace) >= 0);
+
+    hid_t status = H5Dset_extent(dataset, time_max_dims.data());
+    if (status < 0)
     {
-        ERR("H5Sselect_hyperslab failed in dataset '{:s}'.", dataset_name);
+        OGS_FATAL("H5D set extent failed dataset '{:s}'.", dataset_name);
     }
+    hid_t fspace = H5Dget_space(dataset);
 
-    hid_t const io_transfer_property = createHDF5TransferPolicy();
-    status = H5Dwrite(dataset, data_type, memspace, dataset_filespace,
-                      io_transfer_property, nodes_data);
-    if (status != 0)
+    H5Sselect_hyperslab(fspace, H5S_SELECT_SET, time_offsets.data(), NULL,
+                        count.data(), NULL);
+
+    status = H5Dwrite(dataset, data_type, mspace, fspace, io_transfer_property,
+                      nodes_data);
+    if (status < 0)
     {
-        ERR("H5Dwrite failed in dataset '{:s}'.", dataset_name);
+        OGS_FATAL("H5Dwrite failed in dataset '{:s}'.", dataset_name);
     }
 
-    H5Dclose(dataset);
-    status = H5Sclose(memspace);
+    H5Sclose(mspace);
     H5Pclose(io_transfer_property);
 
-    return (status >= 0 ? 1 : 0);
+    return;
 }
 namespace MeshLib::IO
 {
@@ -151,49 +171,62 @@ HdfWriter::HdfWriter(std::vector<HdfData> constant_attributes,
       _use_compression(checkCompression() && use_compression),
       _file(createFile(filepath))
 {
+    _group = H5Gcreate2(_file, "data", H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+
+    auto createAndWriteDataSet = [&](auto const& attribute) -> hid_t {
+        hid_t dataset = createDataSet(
+            attribute.data_type, attribute.data_space, attribute.file_space,
+            attribute.chunk_space, _use_compression, _group, attribute.name);
 
-    std::string const& time_section = getTimeSection(step);
-    hid_t const group_id = H5Gcreate2(_file, time_section.c_str(), H5P_DEFAULT,
-                                      H5P_DEFAULT, H5P_DEFAULT);
+        checkHdfStatus(dataset, "Creating HDF5 Dataset: {:s} failed.",
+                       attribute.name);
+        writeDataSet(attribute.data_start, attribute.data_type,
+                     attribute.data_space, attribute.offsets,
+                     attribute.file_space, attribute.chunk_space,
+                     attribute.name, step, dataset);
+        return dataset;
+    };
 
     for (auto const& attribute : constant_attributes)
     {
-        hid_t status = writeDataSet(attribute.data_start, attribute.data_type,
-                                    attribute.data_space, attribute.offsets,
-                                    attribute.file_space, attribute.chunk_space,
-                                    _use_compression, group_id, attribute.name);
-
-        checkHdfStatus(status, "Writing HDF5 Dataset: {:s} failed.",
-                       attribute.name);
+        hid_t dataset = createAndWriteDataSet(attribute);
+        H5Dclose(dataset);
     }
 
-    hid_t status = H5Gclose(group_id);
-
-    checkHdfStatus(status, "HDF group could not be created!");
+    for (auto const& attribute : _variable_attributes)
+    {
+        hid_t dataset = createAndWriteDataSet(attribute);
+        // datasets are kept open
+        _datasets.insert({attribute.name, dataset});
+    }
 }
 
 HdfWriter::~HdfWriter()
 {
+    for (auto& dataset : _datasets)
+    {
+        H5Dclose(dataset.second);
+    }
+    H5Gclose(_group);
     H5Fclose(_file);
 }
 
 bool HdfWriter::writeStep(int const step) const
 {
-    hid_t const group = createStepGroup(_file, step);
-
-    hid_t status = 0;
     for (auto const& attribute : _variable_attributes)
     {
-        status = writeDataSet(attribute.data_start, attribute.data_type,
-                              attribute.data_space, attribute.offsets,
-                              attribute.file_space, attribute.chunk_space,
-                              _use_compression, group, attribute.name);
+        auto const& dataset_hid = _datasets.find(attribute.name);
+        if (dataset_hid == _datasets.end())
+        {
+            OGS_FATAL("Writing HDF5 Dataset: {:s} failed.", attribute.name);
+        }
 
-        checkHdfStatus(status, "Writing HDF5 Dataset: {:s} failed.",
-                       attribute.name);
+        writeDataSet(attribute.data_start, attribute.data_type,
+                     attribute.data_space, attribute.offsets,
+                     attribute.file_space, attribute.chunk_space,
+                     attribute.name, step, _datasets.at(attribute.name));
     }
 
-    H5Gclose(group);
-    return (status >= 0);
+    return true;
 }
 }  // namespace MeshLib::IO
diff --git a/MeshLib/IO/XDMF/HdfWriter.h b/MeshLib/IO/XDMF/HdfWriter.h
index ec060b93f9336b55e0a39da41c17d28d722a0bc3..6f62beba496c103716e7b4273c9cef5b56146237 100644
--- a/MeshLib/IO/XDMF/HdfWriter.h
+++ b/MeshLib/IO/XDMF/HdfWriter.h
@@ -14,6 +14,7 @@
 #include <filesystem.h>
 #include <hdf5.h>
 
+#include <map>
 #include <vector>
 
 #include "HdfData.h"
@@ -58,6 +59,8 @@ private:
     std::vector<HdfData> const _variable_attributes;
     std::filesystem::path const _hdf5_filepath;
     bool const _use_compression;
-    hid_t _file;
+    hid_t const _file;
+    hid_t _group;
+    std::map<std::string, hid_t> _datasets;
 };
 }  // namespace MeshLib::IO
\ No newline at end of file
diff --git a/MeshLib/IO/XDMF/Xdmf3Writer.cpp b/MeshLib/IO/XDMF/Xdmf3Writer.cpp
index 43fc7eb9cd6fdee24b7e8cc89f781ed94303633d..91556c378c9757f8173be3d8792c5e10f19e8510 100644
--- a/MeshLib/IO/XDMF/Xdmf3Writer.cpp
+++ b/MeshLib/IO/XDMF/Xdmf3Writer.cpp
@@ -44,19 +44,27 @@ boost::shared_ptr<const XdmfAttributeCenter> elemTypeOGS2XDMF(
     return mesh_item_type_ogs2xdmf.at(elem_type);
 }
 
-static std::string getTimeSection(int const step, std::string const& name)
+static std::string getDataSection(std::string const& name)
 {
-    return "t_"s + std::to_string(step) + "/"s + name;
+    return "data/"s + name;
+}
+
+static std::vector<XdmfDimType> prependDimension(
+    XdmfDimType const prepend_value, std::vector<XdmfDimType> const& dimensions)
+{
+    std::vector<XdmfDimType> dims = {prepend_value};
+    dims.insert(dims.end(), dimensions.begin(), dimensions.end());
+    return dims;
 }
 
 static boost::shared_ptr<XdmfGeometry> getLightGeometry(
-    std::string const& hdf5filename, int const step, XdmfData const& geometry)
+    std::string const& hdf5filename, XdmfData const& geometry)
 {
     auto xdmf_geometry = XdmfGeometry::New();
     xdmf_geometry->setType(XdmfGeometryType::XYZ());
     boost::shared_ptr<XdmfHDF5Controller> geometry_controller =
         XdmfHDF5Controller::New(hdf5filename,
-                                getTimeSection(step, "geometry"),
+                                getDataSection("geometry"),
                                 XdmfArrayType::Float64(),
                                 geometry.starts,
                                 geometry.strides,
@@ -67,13 +75,13 @@ static boost::shared_ptr<XdmfGeometry> getLightGeometry(
 }
 
 static boost::shared_ptr<XdmfTopology> getLightTopology(
-    std::string const& hdf5filename, int const step, XdmfData const& topology)
+    std::string const& hdf5filename, XdmfData const& topology)
 {
     auto xdmf_topology = XdmfTopology::New();
     xdmf_topology->setType(XdmfTopologyType::Mixed());
     auto topology_controller =
         XdmfHDF5Controller::New(hdf5filename,
-                                getTimeSection(step, "topology"),
+                                getDataSection("topology"),
                                 XdmfArrayType::Int32(),
                                 topology.starts,
                                 topology.strides,
@@ -86,14 +94,21 @@ static boost::shared_ptr<XdmfTopology> getLightTopology(
 static boost::shared_ptr<XdmfAttribute> getLightAttribute(
     std::string const& hdf5filename, int const step, XdmfData const& attribute)
 {
+    std::vector<XdmfDimType> starts = prependDimension(step, attribute.starts);
+    std::vector<XdmfDimType> strides = prependDimension(1, attribute.strides);
+    std::vector<XdmfDimType> global_block_dims =
+        prependDimension(1, attribute.global_block_dims);
+    std::vector<XdmfDimType> all_global_block_dims =
+        prependDimension(step + 1, attribute.global_block_dims);
+
     auto const attribute_controller =
         XdmfHDF5Controller::New(hdf5filename,
-                                getTimeSection(step, attribute.name),
+                                getDataSection(attribute.name),
                                 attribute.data_type,
-                                attribute.starts,
-                                attribute.strides,
-                                attribute.global_block_dims,
-                                attribute.global_block_dims);
+                                starts,
+                                strides,
+                                global_block_dims,
+                                all_global_block_dims);
 
     auto const xdmf_attribute = XdmfAttribute::New();
     auto const center = elemTypeOGS2XDMF(*(attribute.attribute_center));
@@ -113,8 +128,8 @@ Xdmf3Writer::Xdmf3Writer(XdmfData const& geometry, XdmfData const& topology,
     : _variable_attributes(std::move(variable_attributes)),
       _hdf5filename(filepath.stem().string() + ".h5")
 {
-    _initial_geometry = getLightGeometry(_hdf5filename, time_step, geometry);
-    _initial_topology = getLightTopology(_hdf5filename, time_step, topology);
+    _initial_geometry = getLightGeometry(_hdf5filename, geometry);
+    _initial_topology = getLightTopology(_hdf5filename, topology);
 
     std::transform(
         constant_attributes.begin(), constant_attributes.end(),
diff --git a/MeshLib/IO/XDMF/XdmfData.cpp b/MeshLib/IO/XDMF/XdmfData.cpp
index 5d734997321edeafda2ae8d291fcec71a2db2825..a2e0462e48e4caeb339fa2363a15ba03eff9c443 100644
--- a/MeshLib/IO/XDMF/XdmfData.cpp
+++ b/MeshLib/IO/XDMF/XdmfData.cpp
@@ -47,13 +47,31 @@ static boost::shared_ptr<XdmfArrayType const> MeshPropertyDataType2XdmfType(
     }
 }
 
-XdmfData::XdmfData(std::size_t size_partitioned_dim,
+XdmfData::XdmfData(std::size_t const size_partitioned_dim,
                    std::size_t const size_tuple,
                    MeshPropertyDataType const mesh_property_data_type,
                    std::string const& name,
                    std::optional<MeshLib::MeshItemType> const attribute_center)
-    : starts{0, 0, 0},
-      strides{1, 1, 1},
+    : starts([&size_tuple]() {
+          if (size_tuple > 1)
+          {
+              return std::vector<XdmfDimType>{0, 0};
+          }
+          else
+          {
+              return std::vector<XdmfDimType>{0};
+          }
+      }()),
+      strides([&size_tuple]() {
+          if (size_tuple > 1)
+          {
+              return std::vector<XdmfDimType>{1};
+          }
+          else
+          {
+              return std::vector<XdmfDimType>{1, 1};
+          }
+      }()),
       name(name),
       attribute_center(attribute_center)
 {
@@ -64,12 +82,20 @@ XdmfData::XdmfData(std::size_t size_partitioned_dim,
     auto const ui_global_components =
         static_cast<unsigned int>(partition_info.global_length);
     auto const ui_tuple_size = static_cast<unsigned int>(size_tuple);
-    global_block_dims = {ui_global_components, ui_tuple_size};
+
+    if (ui_tuple_size == 1)
+    {
+        global_block_dims = {ui_global_components};
+    }
+    else
+    {
+        global_block_dims = {ui_global_components, ui_tuple_size};
+    }
+
     data_type = MeshPropertyDataType2XdmfType(mesh_property_data_type);
     DBUG(
         "XDMF: dataset name: {:s}, offset: {:d} "
         "global_blocks: {:d}, tuples: {:d}",
-        name, partition_info.local_offset,
-        global_block_dims[0], ui_tuple_size);
+        name, partition_info.local_offset, global_block_dims[0], ui_tuple_size);
 }
 }  // namespace MeshLib::IO
\ No newline at end of file
diff --git a/MeshLib/IO/XDMF/XdmfHdfWriter.cpp b/MeshLib/IO/XDMF/XdmfHdfWriter.cpp
index de17a4261435bb7a55ea03843593025478db7777..a33354b118fca1c5ca1b66b321d9ff352319f295 100644
--- a/MeshLib/IO/XDMF/XdmfHdfWriter.cpp
+++ b/MeshLib/IO/XDMF/XdmfHdfWriter.cpp
@@ -21,7 +21,7 @@ XdmfHdfWriter::XdmfHdfWriter(MeshLib::Mesh const& mesh,
                              std::filesystem::path const& filepath,
                              int const time_step,
                              std::set<std::string>
-                             variable_output_names,
+                                 variable_output_names,
                              bool const use_compression)
 {
     // transform Data into contiguous data and collect meta data
diff --git a/ProcessLib/LiquidFlow/Tests.cmake b/ProcessLib/LiquidFlow/Tests.cmake
index f7464e29de7ff87cab148a87cc13dc1c118b96a5..e00f3a14fda68217faf20b5ad32b9aa817a718a7 100644
--- a/ProcessLib/LiquidFlow/Tests.cmake
+++ b/ProcessLib/LiquidFlow/Tests.cmake
@@ -475,7 +475,7 @@ AddTest(
     WRAPPER time
     TESTER xdmfdiff
     # See https://gitlab.opengeosys.org/ogs/ogs/-/merge_requests/3184#note_85104
-    REQUIREMENTS NOT OGS_USE_MPI AND NOT COMPILER_IS_APPLE_CLANG
+    REQUIREMENTS NOT OGS_USE_MPI AND OGS_USE_XDMF AND NOT COMPILER_IS_APPLE_CLANG
     DIFF_DATA
     square_5x5_tris_32.xdmf square_5x5_tris_32.xdmf pressure pressure 1e-7 1e-13
     square_5x5_tris_32.xdmf square_5x5_tris_32.xdmf HydraulicFlow HydraulicFlow 1e-7 1e-13
diff --git a/Tests/Data/EllipticPETSc/cube_1e3_XDMF.prj b/Tests/Data/EllipticPETSc/cube_1e3_XDMF.prj
index a7cc316d1bce564ad2bbeab61c777e76e4d2726e..ca8fd5dc1a466536c960173f1f27cd0819424cc0 100644
--- a/Tests/Data/EllipticPETSc/cube_1e3_XDMF.prj
+++ b/Tests/Data/EllipticPETSc/cube_1e3_XDMF.prj
@@ -45,7 +45,15 @@
                     <type>BackwardEuler</type>
                 </time_discretization>
                 <time_stepping>
-                    <type>SingleStep</type>
+                    <type>FixedTimeStepping</type>
+                    <t_initial>0.0</t_initial>
+                    <t_end>1e-1</t_end>
+                    <timesteps>
+                        <pair>
+                            <repeat>1</repeat>
+                            <delta_t>1e-1</delta_t>
+                        </pair>
+                    </timesteps>
                 </time_stepping>
             </process>
         </processes>
diff --git a/Tests/Data/Parabolic/LiquidFlow/SimpleSynthetics/XDMF/FunctionParameterTest_XDMF.prj b/Tests/Data/Parabolic/LiquidFlow/SimpleSynthetics/XDMF/FunctionParameterTest_XDMF.prj
index 2eed785076be2f34dd4dcf08428b187615345019..285a2e1d36e5641e943d6a1b203d4c682d14f5e6 100644
--- a/Tests/Data/Parabolic/LiquidFlow/SimpleSynthetics/XDMF/FunctionParameterTest_XDMF.prj
+++ b/Tests/Data/Parabolic/LiquidFlow/SimpleSynthetics/XDMF/FunctionParameterTest_XDMF.prj
@@ -132,8 +132,6 @@ boundary meshes will be written as well -->
             <suffix>_ts_{:timestep}_t_{:time}</suffix>
             <meshes>
                 <mesh>square_5x5_tris_32</mesh>
-                <mesh>square_5x5_tris_32_left_boundary</mesh>
-                <mesh>square_5x5_tris_32_right_boundary</mesh>
             </meshes>
             <timesteps>
                 <pair>