diff --git a/dev/runtime-measurement/ctests/README.md b/dev/runtime-measurement/ctests/README.md
new file mode 100644
index 0000000000000000000000000000000000000000..bfb9e7bfc89e89a95b04cc21156864bc86c5e2bc
--- /dev/null
+++ b/dev/runtime-measurement/ctests/README.md
@@ -0,0 +1,19 @@
+# Runtime measurements for OGS's ctests
+
+The run time for OGS's ctests for fixed-size Eigen matrices and dynamic-size
+Eigen matrices is measured and compared.
+
+The final results are given in the Libreoffice spreadsheet.
+
+## Shell commands
+
+```{sh}
+export OMP_NUM_THREADS=1
+
+for d in build*/; do ( cd "$d"; ctest; ); done
+
+find build/Tests/Data/ build-fixed/Tests/Data/ -type f -name '*.log' -exec ../compute-time-shares.py --quiet --json-out {} \;
+
+../aggregate_timings.py build-fixed/Tests/Data/ - | sort >timings_fixed.csv
+../aggregate_timings.py build/Tests/Data/ - | sort >timings_dynamic.csv
+
diff --git a/dev/runtime-measurement/ctests/aggregate_timings.py b/dev/runtime-measurement/ctests/aggregate_timings.py
new file mode 100755
index 0000000000000000000000000000000000000000..f1c8a5d3b83e001e1648a2f370b4dc6a3dcd5f5a
--- /dev/null
+++ b/dev/runtime-measurement/ctests/aggregate_timings.py
@@ -0,0 +1,53 @@
+#!/usr/bin/python
+
+import subprocess
+import shlex
+import argparse
+import os
+import json
+
+csv_sep = '\t'
+
+parser = argparse.ArgumentParser()
+
+parser.add_argument("root", type=str)
+parser.add_argument("csv_output", type=argparse.FileType("w"))
+
+args = parser.parse_args()
+
+columns = set()
+table = []
+
+for dirpath, dirnames, filenames in os.walk(args.root):
+    for filename in filenames:
+        if not filename.endswith("_total_timings.json"):
+            continue
+
+        filepath = os.path.join(dirpath, filename)
+        with open(filepath) as fh:
+            values = json.load(fh)
+
+        values[" test_case"] = os.path.relpath(
+                filepath[:-len("_total_timings.json")], args.root)
+        table.append(values)
+        for k in values: columns.add(k)
+
+table.sort(key=lambda row: row[" test_case"])
+
+columns = sorted(columns)
+args.csv_output.write('#' + csv_sep.join(columns) + '\n')
+
+for row in table:
+    full_row = [ '' ] * len(columns)
+    for i, col in enumerate(columns):
+        try:
+            value = row[col]
+            if type(value) is list:
+                # only copy the timing information
+                value = "{:.16g}".format(value[0])
+            full_row[i] = value
+        except KeyError:
+            pass
+
+    args.csv_output.write(csv_sep.join(full_row) + '\n')
+
diff --git a/dev/runtime-measurement/ctests/compute-time-shares.py b/dev/runtime-measurement/ctests/compute-time-shares.py
new file mode 100755
index 0000000000000000000000000000000000000000..5d36385965a2b8e5b855befdbed81869e0c06fed
--- /dev/null
+++ b/dev/runtime-measurement/ctests/compute-time-shares.py
@@ -0,0 +1,46 @@
+#!/usr/bin/python
+
+import subprocess
+import shlex
+import argparse
+
+parser = argparse.ArgumentParser()
+
+parser.add_argument("in_file", type=str)
+parser.add_argument("--json-out", action="store_true")
+parser.add_argument("--quiet", action="store_true")
+
+args = parser.parse_args()
+
+
+cumulative_values = {}
+max_cat_length = 0
+
+proc = subprocess.Popen(
+        '''grep -F '[time]' {} | sed -e 's_^.*[[]time[]] \([^0-9#]*\) .*took \([^ ]*\) s.*$_\\2 \\1_' ''' \
+                .format(shlex.quote(args.in_file))
+        , shell=True, stdout=subprocess.PIPE)
+
+for line in proc.stdout:
+    t, cat = line.decode("utf-8").strip().split(maxsplit=1)
+    t = float(t)
+
+    if cat in cumulative_values:
+        p = cumulative_values[cat]
+        np = (p[0] + t, p[1] + 1)
+        cumulative_values[cat] = np
+    else:
+        cumulative_values[cat] = (t, 1)
+    if len(cat) > max_cat_length:
+        max_cat_length = len(cat)
+
+if not args.quiet:
+    for k, (t, count) in sorted(cumulative_values.items(), key=lambda p: p[1][0], reverse=True):
+        print(("{:6} times {:" + str(max_cat_length) + "} took in total {} s").format(count, k, t))
+
+if args.json_out:
+    import json
+    out_file = args.in_file + "_total_timings.json"
+    with open(out_file, "w") as fh:
+        json.dump(cumulative_values, fh, indent=2)
+
diff --git a/dev/runtime-measurement/ctests/timings_results.ods b/dev/runtime-measurement/ctests/timings_results.ods
new file mode 100644
index 0000000000000000000000000000000000000000..7390a3bed4d78d8bea2fa528741a8ab5753c9184
Binary files /dev/null and b/dev/runtime-measurement/ctests/timings_results.ods differ