diff --git a/scripts/cmake/DocumentationProjectFile.cmake b/scripts/cmake/DocumentationProjectFile.cmake
index 775838331939ea855a8e8a60861e3df92fa6e8a7..593551250cb474a72edefeb6016ffdfee4830940 100644
--- a/scripts/cmake/DocumentationProjectFile.cmake
+++ b/scripts/cmake/DocumentationProjectFile.cmake
@@ -1,3 +1,6 @@
+# Moves an *.md file from the src directory to DocAux directory in the build
+# tree augmenting it with basic extra information, such as the list of child
+# pages and the page title.
 function(documentationProjectFilePutIntoPlace p)
     file(RELATIVE_PATH relative_path ${DocumentationProjectFileInputDir} ${p})
     get_filename_component(dir_name ${relative_path} DIRECTORY)
@@ -119,6 +122,7 @@ if (IS_DIRECTORY ${DocumentationProjectFileBuildDir})
     file(REMOVE_RECURSE ${DocumentationProjectFileBuildDir})
 endif()
 
+# traverse input file hierarchy
 file(GLOB_RECURSE input_paths ${DocumentationProjectFileInputDir}/c_* ${DocumentationProjectFileInputDir}/i_*)
 
 foreach(p ${input_paths})
diff --git a/scripts/cmake/DocumentationSetup.cmake b/scripts/cmake/DocumentationSetup.cmake
index 835017459fe2e55c8226410425126292bb7c0323..8504e304650e47369c289e7e41ba7be8ea5729bd 100644
--- a/scripts/cmake/DocumentationSetup.cmake
+++ b/scripts/cmake/DocumentationSetup.cmake
@@ -57,10 +57,9 @@ if(DOXYGEN_FOUND)
         COMMENT "Generating project file documentation hierarchy." VERBATIM)
     add_dependencies(doc internal_pre_doc)
 
-    # TODO also check python
     if (doc_use_external_tools)
         set(data_dir "${PROJECT_SOURCE_DIR}/Tests/Data")
-        add_custom_target(internal_pre_doc2
+        add_custom_target(internal_pre_doc_qa_page
             ${BASH_TOOL_PATH}
             "${PROJECT_SOURCE_DIR}/scripts/doc/generate-project-file-doc-qa.sh"
             ${PROJECT_SOURCE_DIR}
@@ -68,7 +67,7 @@ if(DOXYGEN_FOUND)
             ${data_dir}
             WORKING_DIRECTORY ${PROJECT_BINARY_DIR}
             COMMENT "Generating project file documentation quality assurance pages." VERBATIM)
-        add_dependencies(doc internal_pre_doc2)
-        add_dependencies(internal_pre_doc2 internal_pre_doc)
+        add_dependencies(doc internal_pre_doc_qa_page)
+        add_dependencies(internal_pre_doc_qa_page internal_pre_doc)
     endif()
 endif()
diff --git a/scripts/doc/append-xml-tags.py b/scripts/doc/append-xml-tags.py
index 2b242e3ced4ddc88c55daf9a953d820b66e34c18..8baf534513fda245ebaa0c9f1d9cb885a1ccda9d 100755
--- a/scripts/doc/append-xml-tags.py
+++ b/scripts/doc/append-xml-tags.py
@@ -1,5 +1,10 @@
 #!/usr/bin/python
 
+# This script augments the parameter documentation pages by information
+# such as if they are required/optional, their data typ and in which
+# end-to-end tests they are used.
+# It uses the cache file generated by normalize-param-cache.py
+
 # prevent broken pipe error
 from signal import signal, SIGPIPE, SIG_DFL
 signal(SIGPIPE,SIG_DFL)
@@ -23,6 +28,8 @@ datadir   = os.path.abspath(args.datadir)
 docauxdir = os.path.abspath(args.docauxdir)
 docdir    = os.path.join(docauxdir, "dox", "ProjectFile")
 
+# used to expand documentation entry points to full xml tag paths
+# that are used in the prj file.
 tag_path_expansion_table = {
     "initial_condition":  "process_variables.process_variable.initial_condition",
     "boundary_condition": "process_variables.process_variable.boundary_conditions.boundary_condition",
@@ -89,7 +96,7 @@ if False:
         for f in sorted(files):
             print("   ", f)
 
-# read parameter cache
+# read parameter cache (generated by normalize-param-cache.py)
 with open(os.path.join(docauxdir, "documented-parameters-cache.txt")) as fh:
     for line in fh:
         line = line.strip().split("@@@")
diff --git a/scripts/doc/check-project-params.py b/scripts/doc/check-project-params.py
index e7e7781bd1cbdd5e851bcd495eb75fc33f53e886..f54a0969219f330878cc1c350de2bd3257b5a338 100755
--- a/scripts/doc/check-project-params.py
+++ b/scripts/doc/check-project-params.py
@@ -1,5 +1,8 @@
 #!/usr/bin/python
 
+# This script actually generates the QA page.
+# For its usage see generate-project-file-doc-qa.sh
+
 import sys
 import re
 import os.path
diff --git a/scripts/doc/create-docu-file-stubs.sh b/scripts/doc/create-docu-file-stubs.sh
index 5a38fada92ee53de94ed880d8b1f408e397083ba..2cb2867cebcef823d72c421edc79581d5fff05fa 100755
--- a/scripts/doc/create-docu-file-stubs.sh
+++ b/scripts/doc/create-docu-file-stubs.sh
@@ -1,6 +1,12 @@
 #!/bin/sh
 
-# expect input from get-project-params.sh
+# This script creates missing file in the OGS input file documentation source tree.
+# It expects input from get-project-params.sh
+#
+# The working directory of this script must be the root of the OGS sources.
+#
+# Example:
+# scripts/doc/get-project-params.sh . | scripts/doc/create-docu-file-stubs.sh
 
 base="Documentation/ProjectFile"
 
diff --git a/scripts/doc/generate-project-file-doc-qa.sh b/scripts/doc/generate-project-file-doc-qa.sh
index 77c3a226346a0c865af4e0fd0c5b49370d3596e7..bcae60efd9c9670f02a5cb5dcbf4e1548a4b7bc2 100644
--- a/scripts/doc/generate-project-file-doc-qa.sh
+++ b/scripts/doc/generate-project-file-doc-qa.sh
@@ -1,6 +1,8 @@
 #!/bin/bash
 
-echo "======== $@"
+# This script creates the quality assurance page and augments the
+# input file parameter documentation by information about end-to-end
+# tests in which the respective parameters are used.
 
 if [ $# -ne 3 ]; then
     echo "USAGE: $0 SRCDIR BUILDDIR DATADIR" >&2
diff --git a/scripts/doc/get-project-params.sh b/scripts/doc/get-project-params.sh
index 09cd9e99e90102c056f5da5bbff296f1c40f8cc4..754b21adc92d4aa23f9d6c783fcbdf4cb50517ad 100755
--- a/scripts/doc/get-project-params.sh
+++ b/scripts/doc/get-project-params.sh
@@ -1,5 +1,10 @@
 #!/bin/bash
 
+# This script traverses the OGS input file documentation source tree
+# making a list of all input file parameter related Doxygen commands,
+# i.e., those beginning with \ogs, and of all ConfigTree related C++
+# code.
+
 if [ $# -ne 1 ]; then
     echo "USAGE: ${0##*/} SRCDIR" >&2
     exit 1
@@ -11,7 +16,7 @@ srcdir="`readlink -f "$1"`"
 color=""
 
 cat <<"EOF" \
-| grep -r $srcdir \
+| grep -r "$srcdir" \
     --include '*.h' \
     --include '*.cpp' \
     --exclude-dir '.git' \
diff --git a/scripts/doc/get-undocumented-project-params.py b/scripts/doc/get-undocumented-project-params.py
deleted file mode 100755
index 238b0bbddccd4c70ef26da41b48ba0976189db1f..0000000000000000000000000000000000000000
--- a/scripts/doc/get-undocumented-project-params.py
+++ /dev/null
@@ -1,43 +0,0 @@
-#!/usr/bin/python
-
-# expect input from get-project-params.sh
-
-import sys
-import subprocess
-
-print_next = True
-
-old_fn = None
-undoc_lnos = []
-
-def add_doc_stubs(fn, lnos):
-    if not lnos: return
-
-    print(fn, lnos)
-    cmd = ["sed", "-i"]
-    for lno in lnos:
-        cmd.append("-e")
-        cmd.append(str(lno) + r""" i \
-//! \\ogs_file_param{todo_document_parameter} \\todo project_file_docu
-//! \\ogs_file_param{todo_document_parameter} \\todo project_file_docu
-""")
-    cmd.append(fn)
-    subprocess.run(cmd)
-    del lnos[:]
-
-
-for line in sys.stdin:
-    fn, l, content = line.split(maxsplit=2)
-    if fn != old_fn:
-        add_doc_stubs(old_fn, undoc_lnos)
-        old_fn = fn
-
-    if content.startswith("//!"):
-        print_next = False
-    elif print_next:
-        # print(line.rstrip())
-        undoc_lnos.append(l)
-    else:
-        print_next = True
-
-add_doc_stubs(old_fn, undoc_lnos)
diff --git a/scripts/doc/normalize-param-cache.py b/scripts/doc/normalize-param-cache.py
index acd524bdabd60ff9ce3c07f8fdf46aec44f7c341..76b084a4f72ed3b21172c589ecaa90ac332dab18 100755
--- a/scripts/doc/normalize-param-cache.py
+++ b/scripts/doc/normalize-param-cache.py
@@ -1,5 +1,9 @@
 #!/usr/bin/python
 
+# This script takes the output of get-project-params.sh on stdin
+# and transforms it into a tabular representation for further
+# processing.
+
 import sys
 import re
 import os.path
@@ -10,10 +14,9 @@ def debug(msg):
 def write_out(*args):
     print("@@@".join([str(a) for a in args]))
 
-# capture #1 is the parameter path
+# capture #2 is the parameter path
 comment = re.compile(r"^//! \\ogs_file_(param|attr)\{([A-Za-z_0-9]+)\}( \\todo .*)?$")
 comment_special = re.compile(r"^//! \\ogs_file(_param|_attr)?_special(\{[A-Za-z_0-9]+\})?( \\todo .*)?$")
-#comment_special = re.compile(r"^//! \\ogs_file_special$")
 
 # capture #5 is the parameter name
 getter = re.compile(r'^(get|check|ignore|peek)Conf(Param|Attribute|Subtree)(List|Optional|All)?'
diff --git a/scripts/doc/print-xml-tags.py b/scripts/doc/print-xml-tags.py
deleted file mode 100755
index a4831ea0a37a0de2375a6bb09ad58f68f18ef854..0000000000000000000000000000000000000000
--- a/scripts/doc/print-xml-tags.py
+++ /dev/null
@@ -1,67 +0,0 @@
-#!/usr/bin/python
-
-# prevent broken pipe error
-from signal import signal, SIGPIPE, SIG_DFL
-signal(SIGPIPE,SIG_DFL)
-
-import os
-
-import xml.etree.cElementTree as ET
-
-import argparse
-
-parser = argparse.ArgumentParser(description="Print XML tags")
-
-parser.add_argument("ext",  help="Extension of files to consider")
-parser.add_argument("path", help="Top level directory of traversal")
-
-args = parser.parse_args()
-rootdir = os.path.abspath(args.path)
-extension = '.' + args.ext
-
-# maps tags to the set of xml files they appear in
-dict_tag_files = dict()
-
-def dict_of_set_append(dict_, key, value):
-    if key in dict_:
-        dict_[key].add(value)
-    else:
-        dict_[key] = set((value,))
-
-
-def print_tags(node, path, level, filepath):
-    global dict_tag_files
-
-    tag = node.tag
-    if level>1: # skip root node
-        tagpath = path + "." + tag
-    else:
-        tagpath = tag
-
-    if level>0: # skip root node
-        dict_of_set_append(dict_tag_files, "T | " + tagpath, filepath)
-        for k in node.attrib:
-            dict_of_set_append(dict_tag_files, "A | " + tagpath + "." + k, filepath)
-
-    for child in node:
-        print_tags(child, tagpath, level + 1, filepath)
-
-
-for (dirpath, _, filenames) in os.walk(rootdir):
-    for f in filenames:
-        if not f.endswith(extension): continue
-
-        filepath = os.path.join(dirpath, f)
-        xmlroot = ET.parse(filepath).getroot()
-        print_tags(xmlroot, "", 0, filepath[len(rootdir)+1:])
-
-first = True
-for (tag, files) in sorted(dict_tag_files.items()):
-    if first:
-        first = False
-    else:
-        print()
-
-    print(tag)
-    for f in sorted(files):
-        print("   ", f)