Skip to content
GitLab
Explore
Sign in
Register
Primary navigation
Search or go to…
Project
H
HPC Container Study
Manage
Activity
Members
Labels
Plan
Issues
Issue boards
Milestones
Wiki
Requirements
Code
Merge requests
Repository
Branches
Commits
Tags
Repository graph
Compare revisions
Snippets
Locked files
Build
Pipelines
Jobs
Pipeline schedules
Test cases
Artifacts
Deploy
Releases
Package registry
Container Registry
Model registry
Operate
Environments
Terraform modules
Monitor
Incidents
Service Desk
Analyze
Value stream analytics
Contributor analytics
CI/CD analytics
Repository analytics
Code review analytics
Issue analytics
Insights
Model experiments
Help
Help
Support
GitLab documentation
Compare GitLab plans
Community forum
Contribute to GitLab
Provide feedback
Keyboard shortcuts
?
Snippets
Groups
Projects
Show more breadcrumbs
Lars Bilke
HPC Container Study
Commits
6202dd56
Unverified
Commit
6202dd56
authored
4 months ago
by
Lars Bilke
Browse files
Options
Downloads
Patches
Plain Diff
Final run.
parent
b64c7c3c
No related branches found
No related tags found
No related merge requests found
Changes
3
Hide whitespace changes
Inline
Side-by-side
Showing
3 changed files
workflow/config.ini
+2
-2
2 additions, 2 deletions
workflow/config.ini
workflow/requirements.txt
+2
-2
2 additions, 2 deletions
workflow/requirements.txt
workflow/run.py
+25
-17
25 additions, 17 deletions
workflow/run.py
with
29 additions
and
21 deletions
workflow/config.ini
+
2
−
2
View file @
6202dd56
[container]
[container]
ogs_commit:
cdb32989b54b47f059c697cc12fef62338c9702c
ogs_commit:
cdb32989b54b47f059c697cc12fef62338c9702c
guix_channel_ogs_commit:
901d7522bfdb710814f26f7858fb55f57769371f
guix_channel_ogs_commit:
f13a74511a5d9cf245940b71226c2f4e5ec81ab7
guix_channel_commit:
ebb715aabfc4e59ed14b964ac7fdd2a9f9f44526
guix_channel_commit:
522732d5c15e44fc9e061f36a41f7129edfee66f
guix_tune_option:
--tune
=
skylake-avx512
guix_tune_option:
--tune
=
skylake-avx512
This diff is collapsed.
Click to expand it.
workflow/requirements.txt
+
2
−
2
View file @
6202dd56
aiida-core~=2.6
aiida-core
[rest]
~=2.6
#git+https://github.com/aiidateam/aiida-core.git@main#egg=aiida-core
#git+https://github.com/aiidateam/aiida-core.git@main#egg=aiida-core
aiida-shell>=0.8.0
aiida-shell>=0.8.0
pandas
pandas
Jinja2
Jinja2
\ No newline at end of file
This diff is collapsed.
Click to expand it.
workflow/run.py
+
25
−
17
View file @
6202dd56
...
@@ -3,14 +3,13 @@ import warnings
...
@@ -3,14 +3,13 @@ import warnings
from
pathlib
import
Path
from
pathlib
import
Path
import
aiida
import
aiida
from
aiida.engine
import
run
,
submit
,
await_processes
,
run_get_node
from
aiida.engine
import
run
,
submit
,
await_processes
from
aiida.orm
import
load_computer
,
Bool
from
aiida.orm
import
load_computer
,
Bool
from
aiida.orm.utils.loaders
import
load_node
from
aiida.orm.utils.loaders
import
load_node
from
aiida.tools.visualization.graph
import
Graph
from
aiida.tools.visualization.graph
import
Graph
from
create_container
import
create_container
from
create_container
import
create_container
from
run_mpi_tests
import
run_mpi_tests
from
run_mpi_tests
import
run_mpi_tests
# from create_container_workchain import CreateContainerWorkChain
from
helper
import
JobSettings
,
launch_batch_job
,
compare_for_equality
from
helper
import
JobSettings
,
launch_batch_job
,
compare_for_equality
from
aiida.plugins.factories
import
DataFactory
from
aiida.plugins.factories
import
DataFactory
...
@@ -30,17 +29,19 @@ warnings.filterwarnings(action='ignore',module='.*paramiko.*')
...
@@ -30,17 +29,19 @@ warnings.filterwarnings(action='ignore',module='.*paramiko.*')
def
run_arehs
(
container_file
,
hpc_envs
,
graph_nodes
=
None
):
# build_envs, hpc_envs, graph_nodes
def
run_arehs
(
container_file
,
hpc_envs
,
graph_nodes
=
None
):
# build_envs, hpc_envs, graph_nodes
nodes
=
[]
nodes
=
[]
job_settings
=
Dict
(
dict
=
JobSettings
(
ntasks
=
1
,
cpus_per_task
=
48
,
time
=
120
,
mem_per_cpu
=
"
2GB
"
).
_asdict
())
# 8, 10
for
hpc_env
in
hpc_envs
.
get_list
():
for
hpc_env
in
hpc_envs
.
get_list
():
computer
=
load_computer
(
hpc_env
)
tasks
=
computer
.
get_default_mpiprocs_per_machine
()
job_settings
=
Dict
(
dict
=
JobSettings
(
ntasks
=
tasks
,
cpus_per_task
=
1
,
time
=
120
,
mem_per_cpu
=
"
2GB
"
).
_asdict
())
# 8, 10
node
=
submit
(
node
=
submit
(
RunWorkflowWorkChain
,
RunWorkflowWorkChain
,
container_file
=
container_file
,
container_file
=
container_file
,
computer_name
=
Str
(
hpc_env
),
computer_name
=
Str
(
hpc_env
),
input_repo
=
"
https://gitlab.opengeosys.org/bilke/dgr.git
"
,
input_repo
=
"
https://gitlab.opengeosys.org/bilke/dgr.git
"
,
# TODO: 3D coupled, run the following
workflow_command
=
f
"
snakemake --cores
{
tasks
}
--profile profiles/developer
"
+
workflow_command
=
"
snakemake --cores 48 --profile profiles/developer
\
"
results/plot/3D/Salz-Kissen_prism-id_1-xres_400/glacialcycle/simTH/parts_8/{{temporal,spatial}}/report.pdf
\
results/plot/3D/Salz-Kissen_prism-id_1-xres_400/glacialcycle/simTH/{{temporal,spatial}}/report.pdf
\
results/plot/2D/{{Ton-Nord,Ton-Sued}}_prism-id_{{1,2}}-xres_200/glacialcycle/simTHM/parts_8/{{temporal,spatial}}/report.pdf
\
results/plot/2D/{{Ton-Nord,Ton-Sued}}_prism-id_{{1,2}}-xres_200/glacialcycle/simTHM/{{temporal,spatial}}/report.pdf
\
results/plot/2D/{{Salz-Kissen,Salz-flach,Ton-Nord,Ton-Sued}}_prism-id_{{1,2}}-xres_200/glacialcycle/simT{{,H}}/{{temporal,spatial}}/report.pdf
"
,
results/plot/2D/{{Salz-Kissen,Salz-flach,Ton-Nord,Ton-Sued}}_prism-id_{{1,2}}-xres_200/glacialcycle/simT{{,H}}/{{temporal,spatial}}/report.pdf
"
,
# parts_8
# parts_8
job_settings
=
job_settings
,
job_settings
=
job_settings
,
...
@@ -96,7 +97,7 @@ def run_ogs_sim(container_file, hpc_envs, graph_nodes=None):
...
@@ -96,7 +97,7 @@ def run_ogs_sim(container_file, hpc_envs, graph_nodes=None):
job_settings
=
Dict
(
job_settings
=
Dict
(
dict
=
JobSettings
(
dict
=
JobSettings
(
ntasks
=
partitions
.
value
,
mem_per_cpu
=
"
2G
"
,
time
=
5
ntasks
=
partitions
.
value
,
mem_per_cpu
=
"
2G
"
,
time
=
20
).
_asdict
()
).
_asdict
()
)
)
input_folder
=
FolderData
(
tree
=
Path
(
"
../tests/LiquidFlowOnCube
"
).
absolute
())
input_folder
=
FolderData
(
tree
=
Path
(
"
../tests/LiquidFlowOnCube
"
).
absolute
())
...
@@ -133,14 +134,19 @@ if __name__ == "__main__":
...
@@ -133,14 +134,19 @@ if __name__ == "__main__":
aiida
.
load_profile
()
aiida
.
load_profile
()
graph_nodes
=
[]
# collect interesting nodes for graph generation
graph_nodes
=
[]
# collect interesting nodes for graph generation
build_envs
=
List
(
list
=
[
"
envinf3
"
,
"
envinf4
"
])
# , "envinf5", "nuc"
# build_envs = List(list=["envinf3", "envinf4"]) # , "envinf5", "nuc"
# if False:
build_envs
=
List
(
list
=
[
"
envinf4
"
,
"
envinf6
"
])
# "envinf3"
# ogs_container_file = create_container("ogs-template.scm", build_envs)
ogs_container_file
=
create_container
(
"
ogs-template.scm
"
,
build_envs
)
# arehs_container_file = create_container("arehs-template.scm", build_envs, "ogs-petsc-mkl", compare_hashes=False)
# guix hash -S nar ...
# tar.gz hash: 1dw5kqd4qww2bjcxkafimfbx23a9slzsdrkm1dm8kxwiiiscq1gi
# squashfs hash: 1rl7dychp2rr88g1f2p29mlrpp7gjh1jmi5jz4ww5dgi7g999qm5
# squashfs size: 734.2 MiB
#ogs_container_file = load_node(pk=53247)
ogs_container_file
=
load_node
(
pk
=
50289
)
# squashfs size: 2182.7 MiB
arehs_container_file
=
load_node
(
pk
=
5
1510
)
#
arehs_container_file = load_node(pk=5
3317
)
#
arehs_container_file = create_container("arehs-template.scm", build_envs, "ogs-petsc-mkl", compare_hashes=False)
arehs_container_file
=
create_container
(
"
arehs-template.scm
"
,
build_envs
,
"
ogs-petsc-mkl
"
,
compare_hashes
=
False
)
hpc_envs
=
List
(
hpc_envs
=
List
(
list
=
[
list
=
[
...
@@ -150,14 +156,16 @@ if __name__ == "__main__":
...
@@ -150,14 +156,16 @@ if __name__ == "__main__":
]
]
)
)
### AREHS workflow
run_arehs
(
arehs_container_file
,
hpc_envs
,
graph_nodes
)
### mpi bandwidth benchmark and simple petsc sim
### mpi bandwidth benchmark and simple petsc sim
run_mpi_tests
(
ogs_container_file
,
hpc_envs
,
graph_nodes
)
run_mpi_tests
(
ogs_container_file
,
hpc_envs
,
graph_nodes
)
#### liquid_flow.prj with structured mesh ####
#### liquid_flow.prj with structured mesh ####
run_ogs_sim
(
ogs_container_file
,
hpc_envs
,
graph_nodes
)
run_ogs_sim
(
ogs_container_file
,
hpc_envs
,
graph_nodes
)
### AREHS workflow
#graph_nodes = [load_node(pk=53226), load_node(pk=53296), load_node(pk=54696), load_node(pk=54394), load_node(pk=54452), load_node(pk=54369)]
run_arehs
(
arehs_container_file
,
hpc_envs
,
graph_nodes
)
# Render diagram
# Render diagram
graph
=
Graph
(
node_sublabel_fn
=
custom_node_sublabels
)
graph
=
Graph
(
node_sublabel_fn
=
custom_node_sublabels
)
...
...
This diff is collapsed.
Click to expand it.
Preview
0%
Loading
Try again
or
attach a new file
.
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Save comment
Cancel
Please
register
or
sign in
to comment