Run an OpenFOAM parameter study

from pathlib import Path
import os
from remote_run import (
    Executor,
    SlurmSchedulingEngine,
    GuixEnvironment,
    remote,
    SshMachine,
)
from pyopenfoam.type_conversion import OFMultiValue
from pyopenfoam.tutorials import generate_pitz_daily_case
from pyopenfoam.cli.commands import run_parallel

define execution variables

num_cpus = 5

create an OpenFOAM case study locally we are going to do a parameter study of the inlet velocity

study_path = Path("study_pitz_daily")
velocities_x = range(8, 12)
array_task_ids = range(len(velocities_x))

define a case path for each array task id

case_paths_lookup = {
    array_task_id: study_path / str(array_task_id) for array_task_id in array_task_ids
}

Define an executor with a scheduling engine. For SLURM you can pass slurm parameters directly here.

executor = Executor(
    machine=SshMachine(
        machine="shpc0003.ost.ch",
        working_directory=Path("/cluster/raid/home/reza.housseini"),
    ),
    environment=[
        GuixEnvironment(
            manifest="""
(specifications->manifest
        (list "python"
              "python-pyvista"
              "python-pint"
              "python-pyopenfoam"
              "openfoam-org"
              "openmpi"))
""",
            channels=Path("channels.scm").read_text(),
        )
    ],
    scheduling_engine=SlurmSchedulingEngine(
        job_name="openfoam_sim",
        mail_type="ALL",
        mail_user="reza.housseini@ost.ch",
        array=array_task_ids,
    ),
)

define simulation variables

duration = 0.05  # seconds

then adjust the inlet velocity and generate the cases

for velocity_x, array_task_id in zip(velocities_x, array_task_ids):
    pitz_daily_case = generate_pitz_daily_case(duration=duration, num_cpus=num_cpus)
    pitz_daily_case.files["U"].contents.update(
        {
            "boundaryField": {
                "inlet": {
                    "type": "mappedInternalValue",
                    "interpolationScheme": "cell",
                    "average": [velocity_x, 0, 0],
                    "value": OFMultiValue(("uniform", [velocity_x, 0, 0])),
                },
                "outlet": {
                    "type": "inletOutlet",
                    "inletValue": OFMultiValue(("uniform", [0, 0, 0])),
                    "value": OFMultiValue(("uniform", [0, 0, 0])),
                },
                "upperWall": {
                    "type": "fixedValue",
                    "value": OFMultiValue(("uniform", [0, 0, 0])),
                },
                "lowerWall": {
                    "type": "fixedValue",
                    "value": OFMultiValue(("uniform", [0, 0, 0])),
                },
                "frontAndBack": {"type": "empty"},
            }
        }
    )
    pitz_daily_case.write(folder=case_paths_lookup[array_task_id])

decorate your functions you want to run in an executor, here we want to execute our OpenFOAM case in parallel and return the field “alpha.liquid”

@remote
def run_study(study_path: Path):
    array_task_id = int(os.getenv("SLURM_ARRAY_TASK_ID"))
    case_path = study_path / str(array_task_id)
    run_parallel(
        case_path,
        num_cpus=num_cpus,
        reconstruct_fields=["U", "p"],
    )

this call will run on the remote machine specified in the executor, but due to the asynchronous nature of scheduling engines this will not return the result, instead you get a future to retrieve the result later

job = run_study(study_path, executor=executor)

now we wait for the remote execution to finish before retrieving the result

result = job.result(timeout=10)

Gallery generated by Sphinx-Gallery