Skip to content
Snippets Groups Projects
Commit afebb7be authored by Christoph Lehmann's avatar Christoph Lehmann
Browse files

[scr] documented doc scripts

parent 046947c4
No related branches found
No related tags found
No related merge requests found
# Moves an *.md file from the src directory to DocAux directory in the build
# tree augmenting it with basic extra information, such as the list of child
# pages and the page title.
function(documentationProjectFilePutIntoPlace p) function(documentationProjectFilePutIntoPlace p)
file(RELATIVE_PATH relative_path ${DocumentationProjectFileInputDir} ${p}) file(RELATIVE_PATH relative_path ${DocumentationProjectFileInputDir} ${p})
get_filename_component(dir_name ${relative_path} DIRECTORY) get_filename_component(dir_name ${relative_path} DIRECTORY)
...@@ -119,6 +122,7 @@ if (IS_DIRECTORY ${DocumentationProjectFileBuildDir}) ...@@ -119,6 +122,7 @@ if (IS_DIRECTORY ${DocumentationProjectFileBuildDir})
file(REMOVE_RECURSE ${DocumentationProjectFileBuildDir}) file(REMOVE_RECURSE ${DocumentationProjectFileBuildDir})
endif() endif()
# traverse input file hierarchy
file(GLOB_RECURSE input_paths ${DocumentationProjectFileInputDir}/c_* ${DocumentationProjectFileInputDir}/i_*) file(GLOB_RECURSE input_paths ${DocumentationProjectFileInputDir}/c_* ${DocumentationProjectFileInputDir}/i_*)
foreach(p ${input_paths}) foreach(p ${input_paths})
......
...@@ -57,10 +57,9 @@ if(DOXYGEN_FOUND) ...@@ -57,10 +57,9 @@ if(DOXYGEN_FOUND)
COMMENT "Generating project file documentation hierarchy." VERBATIM) COMMENT "Generating project file documentation hierarchy." VERBATIM)
add_dependencies(doc internal_pre_doc) add_dependencies(doc internal_pre_doc)
# TODO also check python
if (doc_use_external_tools) if (doc_use_external_tools)
set(data_dir "${PROJECT_SOURCE_DIR}/Tests/Data") set(data_dir "${PROJECT_SOURCE_DIR}/Tests/Data")
add_custom_target(internal_pre_doc2 add_custom_target(internal_pre_doc_qa_page
${BASH_TOOL_PATH} ${BASH_TOOL_PATH}
"${PROJECT_SOURCE_DIR}/scripts/doc/generate-project-file-doc-qa.sh" "${PROJECT_SOURCE_DIR}/scripts/doc/generate-project-file-doc-qa.sh"
${PROJECT_SOURCE_DIR} ${PROJECT_SOURCE_DIR}
...@@ -68,7 +67,7 @@ if(DOXYGEN_FOUND) ...@@ -68,7 +67,7 @@ if(DOXYGEN_FOUND)
${data_dir} ${data_dir}
WORKING_DIRECTORY ${PROJECT_BINARY_DIR} WORKING_DIRECTORY ${PROJECT_BINARY_DIR}
COMMENT "Generating project file documentation quality assurance pages." VERBATIM) COMMENT "Generating project file documentation quality assurance pages." VERBATIM)
add_dependencies(doc internal_pre_doc2) add_dependencies(doc internal_pre_doc_qa_page)
add_dependencies(internal_pre_doc2 internal_pre_doc) add_dependencies(internal_pre_doc_qa_page internal_pre_doc)
endif() endif()
endif() endif()
#!/usr/bin/python #!/usr/bin/python
# This script augments the parameter documentation pages by information
# such as if they are required/optional, their data typ and in which
# end-to-end tests they are used.
# It uses the cache file generated by normalize-param-cache.py
# prevent broken pipe error # prevent broken pipe error
from signal import signal, SIGPIPE, SIG_DFL from signal import signal, SIGPIPE, SIG_DFL
signal(SIGPIPE,SIG_DFL) signal(SIGPIPE,SIG_DFL)
...@@ -23,6 +28,8 @@ datadir = os.path.abspath(args.datadir) ...@@ -23,6 +28,8 @@ datadir = os.path.abspath(args.datadir)
docauxdir = os.path.abspath(args.docauxdir) docauxdir = os.path.abspath(args.docauxdir)
docdir = os.path.join(docauxdir, "dox", "ProjectFile") docdir = os.path.join(docauxdir, "dox", "ProjectFile")
# used to expand documentation entry points to full xml tag paths
# that are used in the prj file.
tag_path_expansion_table = { tag_path_expansion_table = {
"initial_condition": "process_variables.process_variable.initial_condition", "initial_condition": "process_variables.process_variable.initial_condition",
"boundary_condition": "process_variables.process_variable.boundary_conditions.boundary_condition", "boundary_condition": "process_variables.process_variable.boundary_conditions.boundary_condition",
...@@ -89,7 +96,7 @@ if False: ...@@ -89,7 +96,7 @@ if False:
for f in sorted(files): for f in sorted(files):
print(" ", f) print(" ", f)
# read parameter cache # read parameter cache (generated by normalize-param-cache.py)
with open(os.path.join(docauxdir, "documented-parameters-cache.txt")) as fh: with open(os.path.join(docauxdir, "documented-parameters-cache.txt")) as fh:
for line in fh: for line in fh:
line = line.strip().split("@@@") line = line.strip().split("@@@")
......
#!/usr/bin/python #!/usr/bin/python
# This script actually generates the QA page.
# For its usage see generate-project-file-doc-qa.sh
import sys import sys
import re import re
import os.path import os.path
......
#!/bin/sh #!/bin/sh
# expect input from get-project-params.sh # This script creates missing file in the OGS input file documentation source tree.
# It expects input from get-project-params.sh
#
# The working directory of this script must be the root of the OGS sources.
#
# Example:
# scripts/doc/get-project-params.sh . | scripts/doc/create-docu-file-stubs.sh
base="Documentation/ProjectFile" base="Documentation/ProjectFile"
......
#!/bin/bash #!/bin/bash
echo "======== $@" # This script creates the quality assurance page and augments the
# input file parameter documentation by information about end-to-end
# tests in which the respective parameters are used.
if [ $# -ne 3 ]; then if [ $# -ne 3 ]; then
echo "USAGE: $0 SRCDIR BUILDDIR DATADIR" >&2 echo "USAGE: $0 SRCDIR BUILDDIR DATADIR" >&2
......
#!/bin/bash #!/bin/bash
# This script traverses the OGS input file documentation source tree
# making a list of all input file parameter related Doxygen commands,
# i.e., those beginning with \ogs, and of all ConfigTree related C++
# code.
if [ $# -ne 1 ]; then if [ $# -ne 1 ]; then
echo "USAGE: ${0##*/} SRCDIR" >&2 echo "USAGE: ${0##*/} SRCDIR" >&2
exit 1 exit 1
...@@ -11,7 +16,7 @@ srcdir="`readlink -f "$1"`" ...@@ -11,7 +16,7 @@ srcdir="`readlink -f "$1"`"
color="" color=""
cat <<"EOF" \ cat <<"EOF" \
| grep -r $srcdir \ | grep -r "$srcdir" \
--include '*.h' \ --include '*.h' \
--include '*.cpp' \ --include '*.cpp' \
--exclude-dir '.git' \ --exclude-dir '.git' \
......
#!/usr/bin/python
# expect input from get-project-params.sh
import sys
import subprocess
print_next = True
old_fn = None
undoc_lnos = []
def add_doc_stubs(fn, lnos):
if not lnos: return
print(fn, lnos)
cmd = ["sed", "-i"]
for lno in lnos:
cmd.append("-e")
cmd.append(str(lno) + r""" i \
//! \\ogs_file_param{todo_document_parameter} \\todo project_file_docu
//! \\ogs_file_param{todo_document_parameter} \\todo project_file_docu
""")
cmd.append(fn)
subprocess.run(cmd)
del lnos[:]
for line in sys.stdin:
fn, l, content = line.split(maxsplit=2)
if fn != old_fn:
add_doc_stubs(old_fn, undoc_lnos)
old_fn = fn
if content.startswith("//!"):
print_next = False
elif print_next:
# print(line.rstrip())
undoc_lnos.append(l)
else:
print_next = True
add_doc_stubs(old_fn, undoc_lnos)
#!/usr/bin/python #!/usr/bin/python
# This script takes the output of get-project-params.sh on stdin
# and transforms it into a tabular representation for further
# processing.
import sys import sys
import re import re
import os.path import os.path
...@@ -10,10 +14,9 @@ def debug(msg): ...@@ -10,10 +14,9 @@ def debug(msg):
def write_out(*args): def write_out(*args):
print("@@@".join([str(a) for a in args])) print("@@@".join([str(a) for a in args]))
# capture #1 is the parameter path # capture #2 is the parameter path
comment = re.compile(r"^//! \\ogs_file_(param|attr)\{([A-Za-z_0-9]+)\}( \\todo .*)?$") comment = re.compile(r"^//! \\ogs_file_(param|attr)\{([A-Za-z_0-9]+)\}( \\todo .*)?$")
comment_special = re.compile(r"^//! \\ogs_file(_param|_attr)?_special(\{[A-Za-z_0-9]+\})?( \\todo .*)?$") comment_special = re.compile(r"^//! \\ogs_file(_param|_attr)?_special(\{[A-Za-z_0-9]+\})?( \\todo .*)?$")
#comment_special = re.compile(r"^//! \\ogs_file_special$")
# capture #5 is the parameter name # capture #5 is the parameter name
getter = re.compile(r'^(get|check|ignore|peek)Conf(Param|Attribute|Subtree)(List|Optional|All)?' getter = re.compile(r'^(get|check|ignore|peek)Conf(Param|Attribute|Subtree)(List|Optional|All)?'
......
#!/usr/bin/python
# prevent broken pipe error
from signal import signal, SIGPIPE, SIG_DFL
signal(SIGPIPE,SIG_DFL)
import os
import xml.etree.cElementTree as ET
import argparse
parser = argparse.ArgumentParser(description="Print XML tags")
parser.add_argument("ext", help="Extension of files to consider")
parser.add_argument("path", help="Top level directory of traversal")
args = parser.parse_args()
rootdir = os.path.abspath(args.path)
extension = '.' + args.ext
# maps tags to the set of xml files they appear in
dict_tag_files = dict()
def dict_of_set_append(dict_, key, value):
if key in dict_:
dict_[key].add(value)
else:
dict_[key] = set((value,))
def print_tags(node, path, level, filepath):
global dict_tag_files
tag = node.tag
if level>1: # skip root node
tagpath = path + "." + tag
else:
tagpath = tag
if level>0: # skip root node
dict_of_set_append(dict_tag_files, "T | " + tagpath, filepath)
for k in node.attrib:
dict_of_set_append(dict_tag_files, "A | " + tagpath + "." + k, filepath)
for child in node:
print_tags(child, tagpath, level + 1, filepath)
for (dirpath, _, filenames) in os.walk(rootdir):
for f in filenames:
if not f.endswith(extension): continue
filepath = os.path.join(dirpath, f)
xmlroot = ET.parse(filepath).getroot()
print_tags(xmlroot, "", 0, filepath[len(rootdir)+1:])
first = True
for (tag, files) in sorted(dict_tag_files.items()):
if first:
first = False
else:
print()
print(tag)
for f in sorted(files):
print(" ", f)
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment