Skip to content
Draft
24 changes: 24 additions & 0 deletions tests/0090B316/golden.txt

Large diffs are not rendered by default.

12 changes: 12 additions & 0 deletions tests/05D092C5/golden.txt

Large diffs are not rendered by default.

48 changes: 48 additions & 0 deletions tests/0DDE8A87/golden.txt

Large diffs are not rendered by default.

16 changes: 16 additions & 0 deletions tests/1A379909/golden.txt

Large diffs are not rendered by default.

12 changes: 12 additions & 0 deletions tests/59C3F366/golden.txt

Large diffs are not rendered by default.

32 changes: 32 additions & 0 deletions tests/75D7CC39/golden.txt

Large diffs are not rendered by default.

12 changes: 12 additions & 0 deletions tests/B49877F3/golden.txt

Large diffs are not rendered by default.

12 changes: 12 additions & 0 deletions tests/C3A4DBAE/golden.txt

Large diffs are not rendered by default.

58 changes: 53 additions & 5 deletions toolchain/mfc/test/case.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@
from .. import case, common
from ..state import ARG
from ..run import input
from ..build import MFCTarget, get_target
from ..build import MFCTarget, SIMULATION, get_target

Tend = 0.25
Nt = 50
Expand Down Expand Up @@ -108,11 +108,14 @@ class TestCase(case.Case):
ppn: int
trace: str
override_tol: Optional[float] = None
restart_check: bool = False

def __init__(self, trace: str, mods: dict, ppn: int = None, override_tol: float = None) -> None:
# pylint: disable=too-many-arguments, too-many-positional-arguments
def __init__(self, trace: str, mods: dict, ppn: int = None, override_tol: float = None, restart_check: bool = False) -> None:
self.trace = trace
self.ppn = ppn or 1
self.override_tol = override_tol
self.restart_check = restart_check
super().__init__({**BASE_CFG.copy(), **mods})

def run(self, targets: List[Union[str, MFCTarget]], gpus: Set[int]) -> subprocess.CompletedProcess:
Expand Down Expand Up @@ -140,6 +143,49 @@ def run(self, targets: List[Union[str, MFCTarget]], gpus: Set[int]) -> subproces

return common.system(command, print_cmd=False, text=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)

def run_restart(self, targets, gpus):
"""Run a restart roundtrip: simulate to midpoint, then restart to end."""
mid_step = (self.params['t_step_start'] + self.params['t_step_stop']) // 2
orig = dict(self.params)

try:
self.delete_output()

# Phase 1: Run to midpoint (generates restart data)
self.params = {**orig, 't_step_stop': mid_step,
't_step_save': mid_step - orig.get('t_step_start', 0)}
self.create_directory()
result1 = self.run(targets, gpus)
if result1.returncode != 0:
return result1

# Keep D/ (has steps 0 and mid_step) and p_all/ (restart data).
dirpath = self.get_dirpath()
common.delete_directory(os.path.join(dirpath, "silo_hdf5"))

# Phase 2: Restart simulation from midpoint. Only the simulation
# is run — it reads grid + IC directly from p_all/p0/<mid_step>/.
self.params = {**orig, 't_step_start': mid_step,
't_step_save': orig['t_step_stop'] - mid_step}
self.create_directory()
result2 = self.run([SIMULATION], gpus)

# Remove intermediate step files from D/ so only step 0 and
# t_step_stop remain, matching the straight run's output.
if result2.returncode == 0:
d_dir = os.path.join(dirpath, "D")
mid_tag = f"{mid_step:06d}"
for f in glob.glob(os.path.join(d_dir, f"*.{mid_tag}.dat")):
common.delete_file(f)

return result2
finally:
self.params = orig
try:
self.create_directory()
except Exception as exc:
print(f"Warning: failed to restore test directory: {exc}")

def get_trace(self) -> str:
return self.trace

Expand Down Expand Up @@ -269,6 +315,7 @@ def compute_tolerance(self) -> float:

return 1e8 * tolerance if single else tolerance

# pylint: disable=too-many-instance-attributes
@dataclasses.dataclass
class TestCaseBuilder:
trace: str
Expand All @@ -278,6 +325,7 @@ class TestCaseBuilder:
ppn: int
functor: Optional[Callable]
override_tol: Optional[float] = None
restart_check: bool = False

def get_uuid(self) -> str:
return trace_to_uuid(self.trace)
Expand All @@ -302,7 +350,7 @@ def to_case(self) -> TestCase:
if self.functor:
self.functor(dictionary)

return TestCase(self.trace, dictionary, self.ppn, self.override_tol)
return TestCase(self.trace, dictionary, self.ppn, self.override_tol, self.restart_check)


@dataclasses.dataclass
Expand Down Expand Up @@ -330,7 +378,7 @@ def define_case_f(trace: str, path: str, args: List[str] = None, ppn: int = None


# pylint: disable=too-many-arguments, too-many-positional-arguments
def define_case_d(stack: CaseGeneratorStack, newTrace: str, newMods: dict, ppn: int = None, functor: Callable = None, override_tol: float = None) -> TestCaseBuilder:
def define_case_d(stack: CaseGeneratorStack, newTrace: str, newMods: dict, ppn: int = None, functor: Callable = None, override_tol: float = None, restart_check: bool = False) -> TestCaseBuilder:
mods: dict = {}

for mod in stack.mods:
Expand All @@ -346,7 +394,7 @@ def define_case_d(stack: CaseGeneratorStack, newTrace: str, newMods: dict, ppn:
if not common.isspace(trace):
traces.append(trace)

return TestCaseBuilder(' -> '.join(traces), mods, None, None, ppn or 1, functor, override_tol)
return TestCaseBuilder(' -> '.join(traces), mods, None, None, ppn or 1, functor, override_tol, restart_check)

def input_bubbles_lagrange(self):
if "lagrange_bubblescreen" in self.trace:
Expand Down
238 changes: 238 additions & 0 deletions toolchain/mfc/test/cases.py
Original file line number Diff line number Diff line change
Expand Up @@ -1109,6 +1109,244 @@ def chemistry_cases():

chemistry_cases()

def direction_symmetry_tests():
"""3D tests with shock propagating in x and y directions.

Default 3D tests have the shock along z. These test x and y
code paths to catch direction-specific bugs in reconstruction,
Riemann solvers, and gradient calculations.
"""
for direction in ['x', 'y']:
others = [d for d in ['x', 'y', 'z'] if d != direction]
mods = {
'm': 24, 'n': 24, 'p': 24,
'x_domain%beg': 0.E+00, 'x_domain%end': 1.E+00,
'y_domain%beg': 0.E+00, 'y_domain%end': 1.E+00,
'z_domain%beg': 0.E+00, 'z_domain%end': 1.E+00,
'bc_x%beg': -3, 'bc_x%end': -3,
'bc_y%beg': -3, 'bc_y%end': -3,
'bc_z%beg': -3, 'bc_z%end': -3,
}

centroids = [0.05, 0.45, 0.9]
lengths = [0.1, 0.7, 0.2]

for patchID in range(1, 4):
mods[f'patch_icpp({patchID})%geometry'] = 9
mods[f'patch_icpp({patchID})%vel(1)'] = 0.0
mods[f'patch_icpp({patchID})%vel(2)'] = 0.0
mods[f'patch_icpp({patchID})%vel(3)'] = 0.0
mods[f'patch_icpp({patchID})%{direction}_centroid'] = centroids[patchID - 1]
mods[f'patch_icpp({patchID})%length_{direction}'] = lengths[patchID - 1]
for od in others:
mods[f'patch_icpp({patchID})%{od}_centroid'] = 0.5
mods[f'patch_icpp({patchID})%length_{od}'] = 1.0

stack.push(f'3D Direction Symmetry -> Shock in {direction.upper()}', mods)
cases.append(define_case_d(stack, '', {}))
stack.pop()

direction_symmetry_tests()

def mpi_consistency_tests():
"""ppn=2 tests for physics sensitive to MPI decomposition.

Exercises bubble dynamics, viscous flows, and hypoelasticity
with 2 MPI ranks to catch broadcast/reduction bugs.
"""
base_3d = {
'm': 29, 'n': 29, 'p': 49,
'x_domain%beg': 0.E+00, 'x_domain%end': 1.E+00,
'y_domain%beg': 0.E+00, 'y_domain%end': 1.E+00,
'z_domain%beg': 0.E+00, 'z_domain%end': 1.E+00,
'bc_x%beg': -3, 'bc_x%end': -3,
'bc_y%beg': -3, 'bc_y%end': -3,
'bc_z%beg': -3, 'bc_z%end': -3,
}

for patchID in range(1, 4):
base_3d[f'patch_icpp({patchID})%geometry'] = 9
base_3d[f'patch_icpp({patchID})%vel(1)'] = 0.0
base_3d[f'patch_icpp({patchID})%vel(2)'] = 0.0
base_3d[f'patch_icpp({patchID})%vel(3)'] = 0.0
base_3d[f'patch_icpp({patchID})%x_centroid'] = 0.5
base_3d[f'patch_icpp({patchID})%length_x'] = 1
base_3d[f'patch_icpp({patchID})%y_centroid'] = 0.5
base_3d[f'patch_icpp({patchID})%length_y'] = 1
base_3d.update({
'patch_icpp(1)%z_centroid': 0.05, 'patch_icpp(1)%length_z': 0.1,
'patch_icpp(2)%z_centroid': 0.45, 'patch_icpp(2)%length_z': 0.7,
'patch_icpp(3)%z_centroid': 0.9, 'patch_icpp(3)%length_z': 0.2,
})

# Bubbles with 2 MPI ranks
stack.push('MPI Consistency -> 3D -> Bubbles', {**base_3d,
'dt': 1e-06,
'bubbles_euler': 'T', 'nb': 1, 'polytropic': 'T', 'bubble_model': 2,
'fluid_pp(1)%gamma': 0.16, 'fluid_pp(1)%pi_inf': 3515.0,
'bub_pp%R0ref': 1.0, 'bub_pp%p0ref': 1.0, 'bub_pp%rho0ref': 1.0, 'bub_pp%T0ref': 1.0,
'bub_pp%ss': 0.07179866765358993, 'bub_pp%pv': 0.02308216136195411,
'bub_pp%vd': 0.2404125083932959,
'bub_pp%mu_l': 0.009954269975623244, 'bub_pp%mu_v': 8.758168074360729e-05,
'bub_pp%mu_g': 0.00017881922111898042, 'bub_pp%gam_v': 1.33, 'bub_pp%gam_g': 1.4,
'bub_pp%M_v': 18.02, 'bub_pp%M_g': 28.97, 'bub_pp%k_v': 0.5583395141263873,
'bub_pp%k_g': 0.7346421281308791, 'bub_pp%R_v': 1334.8378710170155,
'bub_pp%R_g': 830.2995663005393,
'patch_icpp(1)%alpha_rho(1)': 0.96, 'patch_icpp(1)%alpha(1)': 4e-02,
'patch_icpp(2)%alpha_rho(1)': 0.96, 'patch_icpp(2)%alpha(1)': 4e-02,
'patch_icpp(3)%alpha_rho(1)': 0.96, 'patch_icpp(3)%alpha(1)': 4e-02,
'patch_icpp(1)%pres': 1.0, 'patch_icpp(2)%pres': 1.0, 'patch_icpp(3)%pres': 1.0,
})
cases.append(define_case_d(stack, '', {}, ppn=2))
stack.pop()

# Viscous with 2 MPI ranks
stack.push('MPI Consistency -> 3D -> Viscous', {**base_3d,
'dt': 1e-11,
'fluid_pp(1)%Re(1)': 0.0001, 'viscous': 'T',
'patch_icpp(1)%vel(1)': 1.0,
'patch_icpp(2)%vel(1)': 1.0,
'patch_icpp(3)%vel(1)': 1.0,
})
cases.append(define_case_d(stack, '', {}, ppn=2))
stack.pop()

# Hypoelasticity with 2 MPI ranks
stack.push('MPI Consistency -> 3D -> Hypoelasticity', {**base_3d,
'dt': 1e-06,
'hypoelasticity': 'T', 'riemann_solver': 1, 'fd_order': 4,
'fluid_pp(1)%gamma': 0.3, 'fluid_pp(1)%pi_inf': 7.8E+05,
'fluid_pp(1)%G': 1.E+05,
'patch_icpp(1)%pres': 1.E+06, 'patch_icpp(1)%alpha_rho(1)': 1000.E+00,
'patch_icpp(2)%pres': 1.E+05, 'patch_icpp(2)%alpha_rho(1)': 1000.E+00,
'patch_icpp(3)%pres': 5.E+05, 'patch_icpp(3)%alpha_rho(1)': 1000.E+00,
'patch_icpp(1)%tau_e(1)': 0.E+00, 'patch_icpp(2)%tau_e(1)': 0.E+00,
'patch_icpp(3)%tau_e(1)': 0.E+00,
'patch_icpp(1)%tau_e(2)': 0.E+00, 'patch_icpp(1)%tau_e(3)': 0.E+00,
'patch_icpp(2)%tau_e(2)': 0.E+00, 'patch_icpp(2)%tau_e(3)': 0.E+00,
'patch_icpp(3)%tau_e(2)': 0.E+00, 'patch_icpp(3)%tau_e(3)': 0.E+00,
'patch_icpp(1)%tau_e(4)': 0.E+00, 'patch_icpp(1)%tau_e(5)': 0.E+00,
'patch_icpp(1)%tau_e(6)': 0.E+00,
'patch_icpp(2)%tau_e(4)': 0.E+00, 'patch_icpp(2)%tau_e(5)': 0.E+00,
'patch_icpp(2)%tau_e(6)': 0.E+00,
'patch_icpp(3)%tau_e(4)': 0.E+00, 'patch_icpp(3)%tau_e(5)': 0.E+00,
'patch_icpp(3)%tau_e(6)': 0.E+00,
})
cases.append(define_case_d(stack, '', {}, ppn=2))
stack.pop()

mpi_consistency_tests()

def restart_roundtrip_tests():
"""Tests that verify save-restart roundtrip fidelity.

Each test runs a straight simulation, then a restart from the
midpoint. The restarted output is compared against the straight
run output to verify restart I/O doesn't introduce drift.
"""
# 1D restart
stack.push('Restart Roundtrip -> 1D', {
'm': 299, 'n': 0, 'p': 0,
'x_domain%beg': 0.E+00, 'x_domain%end': 1.E+00,
'bc_x%beg': -3, 'bc_x%end': -3,
'patch_icpp(1)%geometry': 1, 'patch_icpp(2)%geometry': 1,
'patch_icpp(3)%geometry': 1,
'patch_icpp(1)%x_centroid': 0.05, 'patch_icpp(1)%length_x': 0.1,
'patch_icpp(2)%x_centroid': 0.45, 'patch_icpp(2)%length_x': 0.7,
'patch_icpp(3)%x_centroid': 0.9, 'patch_icpp(3)%length_x': 0.2,
'patch_icpp(1)%vel(1)': 0.0, 'patch_icpp(2)%vel(1)': 0.0,
'patch_icpp(3)%vel(1)': 0.0,
})
cases.append(define_case_d(stack, '', {}, restart_check=True))
stack.pop()

# 3D restart
base_3d = {
'm': 24, 'n': 24, 'p': 24,
'x_domain%beg': 0.E+00, 'x_domain%end': 1.E+00,
'y_domain%beg': 0.E+00, 'y_domain%end': 1.E+00,
'z_domain%beg': 0.E+00, 'z_domain%end': 1.E+00,
'bc_x%beg': -3, 'bc_x%end': -3,
'bc_y%beg': -3, 'bc_y%end': -3,
'bc_z%beg': -3, 'bc_z%end': -3,
}
for patchID in range(1, 4):
base_3d[f'patch_icpp({patchID})%geometry'] = 9
base_3d[f'patch_icpp({patchID})%vel(1)'] = 0.0
base_3d[f'patch_icpp({patchID})%vel(2)'] = 0.0
base_3d[f'patch_icpp({patchID})%vel(3)'] = 0.0
base_3d[f'patch_icpp({patchID})%x_centroid'] = 0.5
base_3d[f'patch_icpp({patchID})%length_x'] = 1
base_3d[f'patch_icpp({patchID})%y_centroid'] = 0.5
base_3d[f'patch_icpp({patchID})%length_y'] = 1
base_3d.update({
'patch_icpp(1)%z_centroid': 0.05, 'patch_icpp(1)%length_z': 0.1,
'patch_icpp(2)%z_centroid': 0.45, 'patch_icpp(2)%length_z': 0.7,
'patch_icpp(3)%z_centroid': 0.9, 'patch_icpp(3)%length_z': 0.2,
})
stack.push('Restart Roundtrip -> 3D', base_3d)
cases.append(define_case_d(stack, '', {}, restart_check=True))
stack.pop()

restart_roundtrip_tests()

def kernel_golden_tests():
"""Focused golden-value tests for specific physics kernels.

Grid stretching in 3D: exercises non-uniform grid spacing in all
three directions. Stretching interacts with WENO reconstruction
and gradient calculations in direction-specific ways. Not covered
by any dynamic test (only via examples at reduced resolution).
"""
base_3d = {
'm': 24, 'n': 24, 'p': 24,
'x_domain%beg': 0.E+00, 'x_domain%end': 1.E+00,
'y_domain%beg': 0.E+00, 'y_domain%end': 1.E+00,
'z_domain%beg': 0.E+00, 'z_domain%end': 1.E+00,
'bc_x%beg': -3, 'bc_x%end': -3,
'bc_y%beg': -3, 'bc_y%end': -3,
'bc_z%beg': -3, 'bc_z%end': -3,
}
for patchID in range(1, 4):
base_3d[f'patch_icpp({patchID})%geometry'] = 9
base_3d[f'patch_icpp({patchID})%vel(1)'] = 0.0
base_3d[f'patch_icpp({patchID})%vel(2)'] = 0.0
base_3d[f'patch_icpp({patchID})%vel(3)'] = 0.0
base_3d[f'patch_icpp({patchID})%x_centroid'] = 0.5
base_3d[f'patch_icpp({patchID})%length_x'] = 1
base_3d[f'patch_icpp({patchID})%y_centroid'] = 0.5
base_3d[f'patch_icpp({patchID})%length_y'] = 1
base_3d.update({
'patch_icpp(1)%z_centroid': 0.05, 'patch_icpp(1)%length_z': 0.1,
'patch_icpp(2)%z_centroid': 0.45, 'patch_icpp(2)%length_z': 0.7,
'patch_icpp(3)%z_centroid': 0.9, 'patch_icpp(3)%length_z': 0.2,
})

# 3D grid stretching in all directions.
# The cosh-based stretching expands the domain beyond the original
# bounds (e.g., [0,1] → ~[0,1.39] with a=2, x_a=0.3, x_b=0.7).
# Patches must be enlarged to cover the stretched domain, otherwise
# cells beyond the original bounds are uninitialized (zero density),
# causing ICFL blowup.
stack.push('Kernel -> 3D -> Grid Stretching', {**base_3d,
'stretch_x': 'T', 'a_x': 2.0, 'x_a': 0.3, 'x_b': 0.7, 'loops_x': 1,
'stretch_y': 'T', 'a_y': 2.0, 'y_a': 0.3, 'y_b': 0.7, 'loops_y': 1,
'stretch_z': 'T', 'a_z': 2.0, 'z_a': 0.3, 'z_b': 0.7, 'loops_z': 1,
# Enlarge x/y coverage for all patches (stretched domain reaches ~1.39)
'patch_icpp(1)%x_centroid': 0.75, 'patch_icpp(1)%length_x': 1.5,
'patch_icpp(1)%y_centroid': 0.75, 'patch_icpp(1)%length_y': 1.5,
'patch_icpp(2)%x_centroid': 0.75, 'patch_icpp(2)%length_x': 1.5,
'patch_icpp(2)%y_centroid': 0.75, 'patch_icpp(2)%length_y': 1.5,
'patch_icpp(3)%x_centroid': 0.75, 'patch_icpp(3)%length_x': 1.5,
'patch_icpp(3)%y_centroid': 0.75, 'patch_icpp(3)%length_y': 1.5,
# Extend last z-patch to cover stretched z range
'patch_icpp(3)%z_centroid': 1.15, 'patch_icpp(3)%length_z': 0.7,
})
cases.append(define_case_d(stack, '', {}))
stack.pop()

kernel_golden_tests()

# Sanity Check 1
if stack.size() != 0:
raise common.MFCException("list_cases: stack isn't fully pop'ed")
Expand Down
Loading
Loading