Skip to content

Commit c7e7ba2

Browse files
author
Elwardi
committed
fix: multi-fidelity + robust optimization composition
numeric fidelity, qMFHVKG wiring, constant-outcome crashes/bugs
1 parent ab5d916 commit c7e7ba2

8 files changed

Lines changed: 290 additions & 62 deletions

File tree

src/foambo/common.py

Lines changed: 5 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -323,8 +323,12 @@ def prepare_case(
323323
param_name = entry['parameter']
324324
template_path = entry['file_path'].lstrip('/')
325325
if param_name in parameters:
326+
val = str(parameters[param_name])
327+
value_map = entry.get('value_map')
328+
if value_map:
329+
val = value_map.get(val, val)
326330
shutil.copy2(
327-
os.path.join(case.path, template_path + "." + parameters[param_name]),
331+
os.path.join(case.path, template_path + "." + val),
328332
os.path.join(case.path, template_path)
329333
)
330334
# Process parameters with foamlib

src/foambo/constraints.py

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -148,6 +148,10 @@ def _patched_optimize_acqf(*args, **kwargs):
148148
# --- Inject nonlinear constraints ---
149149
if _active_nl_constraints and "nonlinear_inequality_constraints" not in kwargs:
150150
kwargs["nonlinear_inequality_constraints"] = _active_nl_constraints
151+
# ic_generator is required when nonlinear constraints are present
152+
if "ic_generator" not in kwargs:
153+
from botorch.optim.initializers import gen_batch_initial_conditions
154+
kwargs["ic_generator"] = gen_batch_initial_conditions
151155
log.debug("Injected %d nonlinear constraint(s) into optimize_acqf",
152156
len(_active_nl_constraints))
153157

src/foambo/default_config.py

Lines changed: 6 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -911,22 +911,23 @@ def __init__(self, **kwargs):
911911
nodes are only needed to override the default (e.g. pick MOMF).
912912
913913
**Runner dispatch (recommended):** use ``file_substitution`` with
914-
a string ``ChoiceParameter`` fidelity. Place ``Allrun.meanline``
915-
+ ``Allrun.CFD`` in the template case:
914+
``value_map`` to map numeric fidelity to file suffixes. Place
915+
``Allrun.meanline`` + ``Allrun.CFD`` in the template case:
916916
```yaml
917917
experiment:
918918
parameters:
919919
- name: fidelity
920-
values: ["meanline", "CFD"]
921-
parameter_type: str
920+
bounds: [0, 1]
921+
parameter_type: int
922922
is_fidelity: true
923-
target_value: "CFD"
923+
target_value: 1
924924
925925
optimization:
926926
case_runner:
927927
file_substitution:
928928
- parameter: fidelity
929929
file_path: /Allrun
930+
value_map: {0: "meanline", 1: "CFD"}
930931
```
931932
932933
Other resolvable names: ``qMultiFidelityKnowledgeGradient``

src/foambo/docs_concepts.py

Lines changed: 15 additions & 20 deletions
Original file line numberDiff line numberDiff line change
@@ -655,15 +655,17 @@
655655
656656
**Setup in foamBO:**
657657
658-
1. Add a fidelity parameter with ``is_fidelity: true`` and ``target_value``:
658+
1. Add a **numeric** fidelity parameter with ``is_fidelity: true`` and ``target_value``.
659+
Ax requires fidelity to be a ``RangeParameter`` (numeric), not a ``ChoiceParameter``.
660+
Use ``int`` for discrete levels or ``float`` for continuous:
659661
```yaml
660662
experiment:
661663
parameters:
662664
- name: fidelity
663-
parameter_type: str
664-
values: ["coarse", "fine"]
665+
parameter_type: int
666+
bounds: [0, 1] # 0=cheap, 1=expensive
665667
is_fidelity: true
666-
target_value: "fine"
668+
target_value: 1
667669
```
668670
669671
2. Mark one metric as the cost signal with ``is_cost: true``:
@@ -719,29 +721,22 @@
719721
- foamBO does NOT auto-wire cost for MOMF; pass ``cost_call`` manually
720722
via ``botorch_acqf_options`` if needed.
721723
722-
**Runner dispatch** via ``file_substitution`` (recommended):
723-
Use a string ``ChoiceParameter`` for fidelity and foamBO's built-in file
724-
substitution to swap the runner script (or a portion of it) per fidelity level. Place
725-
``Allrun.coarse`` and ``Allrun.fine`` in the template case:
724+
**Runner dispatch** via ``file_substitution`` with ``value_map`` (recommended):
725+
Since fidelity must be numeric, use ``value_map`` to map numeric values to
726+
human-readable file suffixes. Place ``Allrun.meanline`` and ``Allrun.CFD``
727+
in the template case:
726728
```yaml
727729
optimization:
728730
case_runner:
729731
file_substitution:
730732
- parameter: fidelity
731733
file_path: /Allrun
734+
value_map: {0: "meanline", 1: "CFD"}
732735
```
733-
When ``fidelity=coarse``, the runner copies ``Allrun.coarse`` →
734-
``Allrun`` before execution. When ``fidelity=fine``, copies
735-
``Allrun.fine`` → ``Allrun``. No if/else branching in scripts needed.
736-
737-
**Alternative** (continuous fidelity via env var):
738-
```bash
739-
if [ "$FIDELITY" = "0" ] || [ "$FIDELITY" = "0.0" ]; then
740-
./Allrun.coarse
741-
else
742-
./Allrun.fine
743-
fi
744-
```
736+
When ``fidelity=0``, copies ``Allrun.meanline`` → ``Allrun``.
737+
When ``fidelity=1``, copies ``Allrun.CFD`` → ``Allrun``.
738+
Without ``value_map``, the raw numeric value is used as the suffix
739+
(e.g. ``Allrun.0``, ``Allrun.1``).
745740
746741
**Cost model evolution:** before any trials complete, uniform cost is
747742
assumed (cost ratio = 1). As ``is_cost`` data arrives, foamBO recomputes

src/foambo/optimize.py

Lines changed: 24 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -85,6 +85,11 @@ def _update_cost_state(client, mf_cost, log=None):
8585
"""
8686
from ax.core.base_trial import TrialStatus as _TS
8787
from collections import defaultdict
88+
89+
def _get_fidelity_feature_index(cl, fid_param_name):
90+
"""Return the feature index of the fidelity parameter in the search space."""
91+
param_names = list(cl._experiment.search_space.parameters.keys())
92+
return param_names.index(fid_param_name)
8893
cost_metric_name = mf_cost["metric"]
8994
fidelity_param_name = mf_cost["fidelity_param"]
9095
# Collect observed costs per fidelity level (keyed by raw fidelity value)
@@ -128,12 +133,15 @@ def _update_cost_state(client, mf_cost, log=None):
128133
fid_weight = (max(costs) - min(costs)) / fid_range
129134
else:
130135
fid_weight = 1.0
131-
# Write into acqf_opts — Ax passes these to the input constructor
136+
# Write into acqf_opts — Ax passes these to the input constructor.
137+
# fidelity_weights is keyed by feature index (same as target_fidelities),
138+
# NOT by fidelity values. Ax extracts the fidelity feature index from
139+
# search_space_digest.fidelity_features.
132140
opts = mf_cost["acqf_opts_ref"]
133141
opts["cost_intercept"] = max(cost_intercept, 1e-6)
134-
opts["fidelity_weights"] = {
135-
int(fv): fid_weight for fv in fid_vals
136-
}
142+
# Determine the fidelity parameter's feature index
143+
fid_feature_idx = _get_fidelity_feature_index(client, mf_cost["fidelity_param"])
144+
opts["fidelity_weights"] = {fid_feature_idx: fid_weight}
137145
if log:
138146
log.info("Multi-fidelity cost updated: intercept=%.2f, weight=%.2f "
139147
"(from %d fidelity levels: %s)", cost_intercept, fid_weight,
@@ -273,12 +281,15 @@ def _patched_object_to_json(obj, **kwargs):
273281
_fidelity_cfg = getattr(exp_cfg, '_fidelity_params', None) or \
274282
getattr(type(exp_cfg), '_fidelity_params', None)
275283
if _fidelity_cfg:
284+
from ax.core.parameter import FixedParameter
276285
for pname, target_val in _fidelity_cfg.items():
277286
p = client._experiment.search_space.parameters.get(pname)
278-
if p is not None:
287+
if p is not None and not isinstance(p, FixedParameter):
279288
p._is_fidelity = True
280289
p._target_value = target_val
281290
log.info("Fidelity parameter: %s (target_value=%s)", pname, target_val)
291+
elif isinstance(p, FixedParameter):
292+
log.info("Fidelity parameter %s is fixed (specialized) — skipping MF wiring", pname)
282293

283294
## 2.0 Trial generation
284295
# Suppress Ax's verbose GenerationStrategy repr log
@@ -321,9 +332,16 @@ def _patched_object_to_json(obj, **kwargs):
321332
try:
322333
from botorch.models.gp_regression_fidelity import SingleTaskMultiFidelityGP
323334
from ax.generators.torch.botorch_modular.surrogate import SurrogateSpec
335+
from ax.generators.torch.botorch_modular.utils import ModelConfig
336+
from foambo.robustness import MFHVKGAcquisition
324337
gk["surrogate_spec"] = SurrogateSpec(
325-
botorch_model_class=SingleTaskMultiFidelityGP,
338+
model_configs=[ModelConfig(
339+
botorch_model_class=SingleTaskMultiFidelityGP,
340+
)],
341+
allow_batched_models=False,
326342
)
343+
# qMFHVKG input constructor doesn't accept X_pending
344+
spec.generator_kwargs["acquisition_class"] = MFHVKGAcquisition
327345
log.info("Multi-fidelity: auto-selected SingleTaskMultiFidelityGP surrogate "
328346
"(fidelity param: %s)", _fidelity_params[0].name)
329347
# Wire cost model for MF acquisition.

src/foambo/orchestrate.py

Lines changed: 24 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -917,11 +917,26 @@ class VariableSubstOptions(FoamBOBaseModel):
917917

918918

919919
class FileSubstOptions(FoamBOBaseModel):
920-
"""Replaces a case file with a variant based on a choice parameter value."""
921-
parameter: str = Field(description="Name of the choice parameter whose value selects the file variant",
920+
"""Replaces a case file with a variant based on a parameter value."""
921+
parameter: str = Field(description="Name of the parameter whose value selects the file variant",
922922
examples=["y"])
923923
file_path: str = Field(description="Relative path to the file (e.g. `/system/fvSolution`). Variants must exist as `<file_path>.<value>`",
924924
examples=["/constant/y"])
925+
value_map: Dict[str, str] | None = Field(default=None,
926+
description=(
927+
"Optional mapping from parameter values to file suffixes. "
928+
"Useful for numeric parameters (e.g. fidelity) where the file "
929+
"variant has a human-readable name.\n\n"
930+
"Example: ``{0: meanline, 1: CFD}`` maps fidelity=0 to "
931+
"``Allrun.meanline`` and fidelity=1 to ``Allrun.CFD``."
932+
))
933+
934+
@field_validator("value_map", mode="before")
935+
@classmethod
936+
def coerce_value_map_keys(cls, v):
937+
if isinstance(v, dict | DictConfig):
938+
return {str(k): str(val) for k, val in v.items()}
939+
return v
925940

926941

927942
class FoamJobRunnerOptions(FoamBOBaseModel):
@@ -1043,8 +1058,14 @@ def to_objective_metrics_dict(self):
10431058
}
10441059

10451060
def to_tracking_metrics_dict(self):
1061+
# Register non-objective, non-cost metrics as tracking so they appear
1062+
# in experiment.signature_to_metric (required by early stopping).
1063+
# is_cost metrics are excluded — they inflate objective_weights
1064+
# (size mismatch with GP model outputs) and their data is fetched
1065+
# by the runner anyway (just not modeled by the GP).
10461066
return {
1047-
"metrics": [m.to_metric() for m in self.metrics if not m.name in self.objective]
1067+
"metrics": [m.to_metric() for m in self.metrics
1068+
if m.name not in self.objective and not m.is_cost]
10481069
}
10491070

10501071
def to_runner_dict(self):

src/foambo/preflight.py

Lines changed: 25 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -263,6 +263,31 @@ def _check_config_coherence(cfg: DictConfig, r: PreflightResult) -> PreflightRes
263263
r.failed("MF cost metric",
264264
f"multiple is_cost metrics: {cost_metrics}; exactly one required")
265265

266+
# MF + robust + string choice params: qMFKG requires continuous optimizer,
267+
# string categoricals can't be relaxed to continuous.
268+
has_robust = cfg.get("robust_optimization") is not None
269+
if has_fidelity:
270+
choice_params = [p["name"] for p in exp.get("parameters", [])
271+
if "values" in p and p.get("parameter_type") == "str"]
272+
if choice_params:
273+
if has_robust:
274+
r.failed("MF + robust + string params",
275+
f"string choice parameters {choice_params} are incompatible with "
276+
"multi-fidelity + robust optimization (qMFKG requires continuous optimizer). "
277+
"Use numeric encoding with file_substitution.value_map instead")
278+
else:
279+
r.warned("MF + string params",
280+
f"string choice parameters {choice_params} may cause issues with "
281+
"multi-fidelity acquisition (KG-family requires continuous optimizer). "
282+
"Consider numeric encoding with file_substitution.value_map")
283+
# Fidelity param itself must be numeric
284+
fid_params = [p for p in exp.get("parameters", []) if p.get("is_fidelity")]
285+
for fp in fid_params:
286+
if "values" in fp:
287+
r.failed(f"Fidelity parameter '{fp['name']}'",
288+
"must be numeric (bounds + parameter_type int/float), not a choice parameter. "
289+
"Ax requires numeric fidelity for the GP kernel")
290+
266291
param_names = {p["name"] for p in exp.get("parameters", [])}
267292
for constraint in exp.get("parameter_constraints", []):
268293
# Validate with sympy if available, else fall back to token check

0 commit comments

Comments
 (0)