diff --git a/Documentation/.vale/Vocab/ogs/accept.txt b/Documentation/.vale/Vocab/ogs/accept.txt
index f5010cf78f731ad8185efb23de7877ad9736dad4..e3fe091e5d3926e3006f5c0e72557c4832aaef79 100644
--- a/Documentation/.vale/Vocab/ogs/accept.txt
+++ b/Documentation/.vale/Vocab/ogs/accept.txt
@@ -125,6 +125,7 @@ pacman
 Palandri
 parameteri[zs]ation
 ParaView
+Pardiso
 Parisio
 Parkhurst
 partmesh
diff --git a/scripts/ci/jobs/build-guix.yml b/scripts/ci/jobs/build-guix.yml
index 415aa3d4b984776fb69e6ee810bcb10da0a98ef1..6cadbd66a98badb8856ef0a97cfd30ec6e470000 100644
--- a/scripts/ci/jobs/build-guix.yml
+++ b/scripts/ci/jobs/build-guix.yml
@@ -25,9 +25,11 @@ build guix:
       if [[ "$CI_MERGE_REQUEST_LABELS" =~ .*ci::guix\ only.* ]]; then
         guix time-machine -C scripts/guix/channels.scm -- build ogs-serial --with-source=ogs-serial=$PWD
         guix time-machine -C scripts/guix/channels.scm -- build ogs-petsc --with-source=ogs-petsc=$PWD
+        guix time-machine -C scripts/guix/channels.scm -- build ogs-petsc-mkl --with-source=ogs-petsc-mkl=$PWD --with-commit=eigen=9000b3767770f6dd0f4cfb12f4e19c71921885a4 --without-tests=eigen --with-configure-flag=vtk=-DVTK_MODULE_USE_EXTERNAL_VTK_eigen=OFF --with-source=vtk=https://www.vtk.org/files/release/9.3/VTK-9.3.1.tar.gz
       else
         guix time-machine -C scripts/guix/channels.scm -- build ogs-ssd --with-source=ogs-ssd=$PWD
         guix time-machine -C scripts/guix/channels.scm -- build ogs-petsc-ssd --with-source=ogs-petsc-ssd=$PWD
+        guix time-machine -C scripts/guix/channels.scm -- build ogs-petsc-mkl-ssd --with-source=ogs-petsc-mkl-ssd=$PWD --with-commit=eigen=9000b3767770f6dd0f4cfb12f4e19c71921885a4 --without-tests=eigen --with-configure-flag=vtk=-DVTK_MODULE_USE_EXTERNAL_VTK_eigen=OFF --with-source=vtk=https://www.vtk.org/files/release/9.3/VTK-9.3.1.tar.gz
       fi
 
 deploy container:
@@ -51,7 +53,7 @@ deploy container:
         # keep vtk source parameter: default vtk 9.3.0 download misses
         # ThirdParty/eigen/vtkeigen subdirectory. Use explicit download
         # from GitHub-tag.
-        GUIX_ARGS="--with-commit=eigen=9000b3767770f6dd0f4cfb12f4e19c71921885a4 --without-tests=eigen --with-configure-flag=vtk=-DVTK_MODULE_USE_EXTERNAL_VTK_eigen=OFF --with-source=vtk=https://github.com/Kitware/VTK/archive/refs/tags/v9.3.0.tar.gz"
+        GUIX_ARGS="--with-commit=eigen=9000b3767770f6dd0f4cfb12f4e19c71921885a4 --without-tests=eigen --with-configure-flag=vtk=-DVTK_MODULE_USE_EXTERNAL_VTK_eigen=OFF --with-source=vtk=https://www.vtk.org/files/release/9.3/VTK-9.3.1.tar.gz"
       fi
       # Add openmpi package for mpirun
       if [[ "$GUIX_PACKAGE" =~ .*petsc.* ]]; then
diff --git a/scripts/guix/channels.scm b/scripts/guix/channels.scm
index 03be40786cd69cb07f427c2a9944fc5a65aa531b..355541bc3c853640e55f3d5ff60e0c8388eb2b2d 100644
--- a/scripts/guix/channels.scm
+++ b/scripts/guix/channels.scm
@@ -2,7 +2,7 @@
         (name 'guix-ogs)
         (url "https://gitlab.opengeosys.org/ogs/inf/guix-ogs.git")
         (branch "master")
-        (commit "fd7a63510cac0ab7b93b174098e77e118de524cb"))
+        (commit "901d7522bfdb710814f26f7858fb55f57769371f"))
       (channel
         (name 'guix)
         (url "https://git.savannah.gnu.org/git/guix.git")
diff --git a/web/content/docs/userguide/basics/container/index.md b/web/content/docs/userguide/basics/container/index.md
index 81781c4277fa8ba0da286d290771fd23ae9365af..2c872c9dccee7817aa3c9ac9872fddee3c93f66f 100644
--- a/web/content/docs/userguide/basics/container/index.md
+++ b/web/content/docs/userguide/basics/container/index.md
@@ -41,7 +41,9 @@ Simply download an image from the latest master-branch build:
 
 <!-- vale off -->
 * [ogs-serial.squashfs](https://minio.ufz.de/ogs/public/container/ogs/master/ogs-serial.squashfs)
+* [ogs-mkl.squashfs](https://minio.ufz.de/ogs/public/container/ogs/master/ogs-mkl.squashfs)  (with and MKL Pardiso-support )
 * [ogs-petsc.squashfs](https://minio.ufz.de/ogs/public/container/ogs/master/ogs-petsc.squashfs) (with PETSC-support)
+* [ogs-petsc-mkl.squashfs](https://minio.ufz.de/ogs/public/container/ogs/master/ogs-petsc-mkl.squashfs) (with PETSC- and MKL Pardiso-support )
 <!-- vale on -->
 
 Please note that these images have `.squashfs` as the file ending. Usage is the same as with `.sif`-files.
diff --git a/web/content/docs/userguide/blocks/linear_solvers.md b/web/content/docs/userguide/blocks/linear_solvers.md
index 637de4fda695ce40d2b007511260f50dd3e92474..49e22b0097191c407aff05a1f9d5e197c8a66a53 100644
--- a/web/content/docs/userguide/blocks/linear_solvers.md
+++ b/web/content/docs/userguide/blocks/linear_solvers.md
@@ -33,3 +33,7 @@ For most cases value below $10^{-10}$ is recommended.
 ## Eigen
 
 <!-- TODO: Add description of Eigen -->
+
+## PETSc
+
+See [Running OGS with MPI]({{< ref "parallel_computing_mpi">}})-page for details.
diff --git a/web/content/docs/userguide/features/parallel_computing_mpi.md b/web/content/docs/userguide/features/parallel_computing_mpi.md
index 5a55b08ba08cec4a2ad2022b866be5844fb0b465..879c9827fffe425508491e8a99a00d20f86cd376 100644
--- a/web/content/docs/userguide/features/parallel_computing_mpi.md
+++ b/web/content/docs/userguide/features/parallel_computing_mpi.md
@@ -99,6 +99,24 @@ An introduction and a list of PETSc KSP solvers and preconditioners can be found
 
 <!-- TODO: At best explain the example above in more detail. This can be done by comments in the `xml`-code snippet. -->
 
+<div class="note">
+
+#### PETSc with Pardiso solver
+
+If you have configured OGS (`OGS_USE_MKL=ON`) and PETSc (`--with-mkl_pardiso-dir=... --with-mkl_cpardiso-dir=...`) with MKL support then you can run the parallel Pardiso solver with .e.g.:
+
+```xml
+<petsc>
+     <parameters>-mat_type mpiaij
+                 -pc_type lu
+                 -pc_factor_mat_solver_type mkl_cpardiso</parameters>
+</petsc>
+```
+
+See the [PETSc docs](https://petsc.org/release/overview/linear_solve_table/#direct-solvers) for more info on the solver settings. The [prebuilt containers]({{< relref "container" >}}#get-a-container-image) support this configuration.
+
+</div>
+
 ### 2. Launch MPI OGS
 
 For MPI launcher, either `mpiexec` or `mpirun` can be used to run OGS.