Skip to content

Commit 6528716

Browse files
author
Elwardi
committed
feat: support bootstrapping from robust optimizations
1 parent dde8083 commit 6528716

12 files changed

Lines changed: 731 additions & 6 deletions

src/foambo/api_server.py

Lines changed: 65 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -404,6 +404,71 @@ def config_docs():
404404
return SafeJSONResponse(content=get_config_docs())
405405

406406

407+
@app.get("/api/v1/bootstrap-lineage")
408+
def bootstrap_lineage():
409+
"""Return the active run's bootstrap lineage, or empty if none."""
410+
raw_cfg = getattr(_state, "raw_cfg", None)
411+
if not raw_cfg:
412+
return SafeJSONResponse(content={})
413+
lineage = raw_cfg.get("bootstrap_lineage") if hasattr(raw_cfg, "get") else None
414+
if not lineage:
415+
return SafeJSONResponse(content={})
416+
return SafeJSONResponse(content=lineage)
417+
418+
419+
@app.get("/api/v1/config/bootstrap-preview")
420+
def config_bootstrap_preview(path: str):
421+
"""Inspect a candidate bootstrap state file.
422+
423+
Returns the parent experiment name, search space parameters, and any
424+
robust_optimization context groups — so the config builder can populate
425+
a specialize: editor with valid parameter choices.
426+
"""
427+
import json, os
428+
if not path:
429+
return SafeJSONResponse(content={"error": "missing path"}, status_code=400)
430+
abs_path = path if os.path.isabs(path) else os.path.abspath(path)
431+
if not os.path.isfile(abs_path):
432+
return SafeJSONResponse(content={"error": f"not found: {abs_path}"}, status_code=404)
433+
try:
434+
with open(abs_path) as f:
435+
state = json.load(f)
436+
except (json.JSONDecodeError, OSError) as e:
437+
return SafeJSONResponse(content={"error": f"unreadable: {e}"}, status_code=400)
438+
parent_cfg = state.get("foambo_config")
439+
if parent_cfg is None:
440+
return SafeJSONResponse(
441+
content={"error": "state has no embedded foambo_config"},
442+
status_code=400,
443+
)
444+
exp = parent_cfg.get("experiment", {}) or {}
445+
robust = parent_cfg.get("robust_optimization") or {}
446+
ctx_groups = robust.get("context_groups", []) if isinstance(robust, dict) else []
447+
params = []
448+
for p in exp.get("parameters", []):
449+
if not isinstance(p, dict):
450+
continue
451+
groups = p.get("groups", []) or []
452+
params.append({
453+
"name": p.get("name"),
454+
"type": p.get("parameter_type"),
455+
"bounds": p.get("bounds"),
456+
"values": p.get("values"),
457+
"groups": groups,
458+
"is_context": any(g in ctx_groups for g in groups),
459+
})
460+
exp_block = state.get("experiment") or {}
461+
n_trials = len((state.get("trials") or {}))
462+
return SafeJSONResponse(content={
463+
"parent_name": exp.get("name"),
464+
"parent_path": abs_path,
465+
"context_groups": list(ctx_groups),
466+
"parameters": params,
467+
"n_trials": n_trials,
468+
"immutable": bool(exp_block.get("immutable_search_space_and_opt_config")),
469+
})
470+
471+
407472
def _check_etag(endpoint: str, if_none_match: Optional[str]) -> Optional[Response]:
408473
"""Return a 304 response if the ETag matches, else None."""
409474
current = _state.etag(endpoint)

src/foambo/bootstrap.py

Lines changed: 243 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,243 @@
1+
"""Bootstrap support for foamBO.
2+
3+
Allows a YAML config to inherit from a previously saved experiment state
4+
(``bootstrap: path/to/<name>_client_state.json``). The embedded ``foambo_config``
5+
from the JSON state is used as the base, with the current YAML merged on top
6+
via OmegaConf. This supports two workflows:
7+
8+
1. **Continue** — load prior GP + trials under a new experiment name / extra
9+
trials. YAML only overrides ``experiment.name`` and ``orchestration_settings.n_trials``
10+
(typically).
11+
2. **Specialize** — pin context parameters from a robust run to specific values
12+
via a top-level ``specialize: {param: value, ...}`` mapping. The search space
13+
and recorded trial arms are rewritten so only the remaining design variables
14+
are optimized; prior trial outcomes are retained as training data at the
15+
clamped context.
16+
"""
17+
18+
from __future__ import annotations
19+
20+
import json
21+
import logging
22+
import os
23+
from typing import Any
24+
25+
from omegaconf import DictConfig, OmegaConf
26+
27+
log = logging.getLogger(__name__)
28+
29+
30+
_BOOTSTRAP_META_KEY = "_bootstrap_meta"
31+
32+
33+
def resolve_bootstrap(cfg: DictConfig, yaml_path: str | None) -> DictConfig:
34+
"""If cfg has ``bootstrap:``, merge saved parent config with the current YAML.
35+
36+
Returns the merged config. The bootstrap path and any ``specialize`` map are
37+
stashed under ``cfg._bootstrap_meta`` for downstream loaders to consume. The
38+
top-level ``bootstrap`` and ``specialize`` keys are removed from the merged
39+
result.
40+
41+
Raises ``FileNotFoundError`` if the referenced JSON does not exist and
42+
``ValueError`` if it lacks an embedded ``foambo_config``.
43+
"""
44+
if "bootstrap" not in cfg or cfg.bootstrap is None:
45+
return cfg
46+
47+
raw_path = str(cfg.bootstrap)
48+
base_dir = os.path.dirname(os.path.abspath(yaml_path)) if yaml_path else os.getcwd()
49+
abs_path = raw_path if os.path.isabs(raw_path) else os.path.join(base_dir, raw_path)
50+
if not os.path.isfile(abs_path):
51+
raise FileNotFoundError(f"bootstrap path not found: {abs_path}")
52+
53+
with open(abs_path) as f:
54+
state = json.load(f)
55+
parent_cfg = state.get("foambo_config")
56+
if parent_cfg is None:
57+
raise ValueError(
58+
f"bootstrap JSON has no embedded foambo_config: {abs_path}. "
59+
"Re-save the parent experiment with a recent foamBO version."
60+
)
61+
62+
# Drop meta keys from the override prior to merge.
63+
override = OmegaConf.to_container(cfg, resolve=False)
64+
override.pop("bootstrap", None)
65+
specialize = override.pop("specialize", None)
66+
67+
# Warn if the YAML attempts to change the parent's optimization_config —
68+
# Ax locks objectives/metrics/constraints on the loaded experiment and the
69+
# override will be silently ignored.
70+
_warn_optimization_override(parent_cfg.get("optimization", {}) or {},
71+
override.get("optimization", {}) or {})
72+
73+
base = OmegaConf.create(parent_cfg)
74+
parent_name = None
75+
try:
76+
parent_name = str(base.experiment.name)
77+
except Exception:
78+
pass
79+
merged = OmegaConf.merge(base, OmegaConf.create(override))
80+
# Attach internal meta for optimize() to pick up.
81+
OmegaConf.update(
82+
merged,
83+
_BOOTSTRAP_META_KEY,
84+
{"client_state_path": abs_path, "specialize": specialize or {}},
85+
force_add=True,
86+
)
87+
# Public lineage record — preserved in the merged cfg, embedded in the
88+
# saved state JSON, and surfaced on the dashboard.
89+
lineage = {
90+
"parent_state_path": abs_path,
91+
"parent_name": parent_name,
92+
"specialize": specialize or {},
93+
}
94+
OmegaConf.update(merged, "bootstrap_lineage", lineage, force_add=True)
95+
log.info("Bootstrap: loaded parent config from %s", abs_path)
96+
if specialize:
97+
log.info("Bootstrap: specializing %d parameter(s): %s",
98+
len(specialize), ", ".join(f"{k}={v}" for k, v in specialize.items()))
99+
return merged
100+
101+
102+
_LOCKED_OPT_FIELDS = ("objective", "metrics", "outcome_constraints", "objective_thresholds")
103+
104+
105+
def _warn_optimization_override(parent: dict, override: dict) -> None:
106+
"""Log a warning for any locked optimization field that the YAML tries to change."""
107+
diffs = []
108+
for key in _LOCKED_OPT_FIELDS:
109+
if key not in override:
110+
continue
111+
new_val = override[key]
112+
old_val = parent.get(key)
113+
if _normalize(new_val) != _normalize(old_val):
114+
diffs.append(f" - {key}: parent={_fmt(old_val)} YAML={_fmt(new_val)}")
115+
if not diffs:
116+
return
117+
log.warning(
118+
"Bootstrap: the YAML override tries to change locked optimization fields. "
119+
"Ax's Client freezes objectives/metrics on the loaded experiment, so these "
120+
"changes will be IGNORED at runtime:\n%s\n"
121+
"To use a different optimization config, bootstrap is not the right tool — "
122+
"start a fresh experiment and seed past trials via "
123+
"trial_generation.generation_nodes with `file_path:` instead.",
124+
"\n".join(diffs),
125+
)
126+
127+
128+
def _normalize(v: Any) -> Any:
129+
"""Canonicalize a value for equality comparison (stable ordering, stripped whitespace)."""
130+
if isinstance(v, list):
131+
# Normalize each element; for lists of dicts, sort by a stable key.
132+
normed = [_normalize(x) for x in v]
133+
try:
134+
return sorted(normed, key=lambda x: json.dumps(x, sort_keys=True))
135+
except TypeError:
136+
return normed
137+
if isinstance(v, dict):
138+
return {k: _normalize(val) for k, val in v.items()}
139+
if isinstance(v, str):
140+
return v.strip()
141+
return v
142+
143+
144+
def _fmt(v: Any) -> str:
145+
try:
146+
return json.dumps(v, default=str)
147+
except Exception:
148+
return str(v)
149+
150+
151+
def bootstrap_meta(cfg: DictConfig) -> dict[str, Any] | None:
152+
"""Return the bootstrap meta dict if present, else None."""
153+
meta = cfg.get(_BOOTSTRAP_META_KEY) if hasattr(cfg, "get") else None
154+
if meta is None:
155+
return None
156+
return OmegaConf.to_container(meta, resolve=True)
157+
158+
159+
def strip_bootstrap_meta(cfg: DictConfig) -> DictConfig:
160+
"""Remove the bootstrap meta key from cfg (call before embedding in state)."""
161+
if _BOOTSTRAP_META_KEY in cfg:
162+
del cfg[_BOOTSTRAP_META_KEY]
163+
return cfg
164+
165+
166+
def apply_specialization(client, specialize: dict[str, Any]) -> None:
167+
"""Pin context parameters to fixed values on a loaded client.
168+
169+
Mutates the experiment's search space (replacing affected parameters with
170+
``FixedParameter``) and rewrites recorded arm parameters so prior trials
171+
remain valid training data under the new space. The GP refits on the next
172+
generation call.
173+
174+
Limitations:
175+
- Information about how those parameters influence outcomes is lost: all
176+
trials now appear to share the same context value. This is the intended
177+
behavior for "optimize conditional on this context point".
178+
- Ax experiments with ``immutable_search_space_and_opt_config`` cannot be
179+
specialized in-place; this raises a clear error.
180+
"""
181+
if not specialize:
182+
return
183+
184+
from ax.core.parameter import FixedParameter, RangeParameter, ChoiceParameter
185+
from ax.core.search_space import SearchSpace
186+
187+
exp = client._experiment
188+
# Ax sets immutable_search_space_and_opt_config=True on every experiment
189+
# produced via Client.configure_experiment. That only means generator runs
190+
# skipped caching a per-trial search-space copy; the live search space is
191+
# still a plain attribute. Unset the flag so we can mutate it here.
192+
if getattr(exp, "immutable_search_space_and_opt_config", False):
193+
try:
194+
from ax.core.experiment import Keys
195+
exp._properties.pop(Keys.IMMUTABLE_SEARCH_SPACE_AND_OPT_CONF, None)
196+
except Exception:
197+
exp._properties = {
198+
k: v for k, v in (exp._properties or {}).items()
199+
if "immutable" not in str(k).lower()
200+
}
201+
log.info("Specialization: cleared parent's immutable_search_space_and_opt_config flag")
202+
203+
parent_params = exp.search_space.parameters
204+
missing = [k for k in specialize if k not in parent_params]
205+
if missing:
206+
raise ValueError(f"specialize keys not in parent search space: {missing}")
207+
208+
new_params = []
209+
for name, p in parent_params.items():
210+
if name in specialize:
211+
value = specialize[name]
212+
# Coerce to parameter type (yaml may give str/int/float mix).
213+
if isinstance(p, RangeParameter):
214+
value = float(value) if p.parameter_type.name in ("FLOAT",) else int(value)
215+
new_params.append(FixedParameter(
216+
name=name,
217+
parameter_type=p.parameter_type,
218+
value=value,
219+
))
220+
else:
221+
new_params.append(p.clone())
222+
new_ss = SearchSpace(parameters=new_params)
223+
exp._search_space = new_ss
224+
225+
# Clamp arm parameters on every existing trial so past observations are
226+
# consistent with the new search space.
227+
for t in exp.trials.values():
228+
for arm in t.arms:
229+
for pname, pval in specialize.items():
230+
if pname in arm._parameters:
231+
arm._parameters[pname] = pval
232+
233+
# Invalidate any cached adapter state so the next gen refits on the clamped
234+
# data under the new search space.
235+
gs = getattr(client, "_generation_strategy", None)
236+
if gs is not None and getattr(gs, "adapter", None) is not None:
237+
try:
238+
gs._curr._fitted_adapter = None
239+
except Exception:
240+
pass
241+
242+
log.info("Specialization applied: search space now has %d fixed parameter(s)",
243+
sum(1 for p in new_params if isinstance(p, FixedParameter)))

src/foambo/config.py

Lines changed: 9 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -22,12 +22,20 @@ def load_config(config_path: Optional[str] = None) -> DictConfig:
2222
"""
2323
Load configuration from a YAML file and return as OmegaConf DictConfig.
2424
If no path is given, use the default config filename.
25+
26+
If the YAML contains a top-level ``bootstrap:`` pointing at a saved state
27+
JSON, the embedded parent ``foambo_config`` is loaded and the current YAML
28+
is merged on top.
2529
"""
2630
if config_path is None:
2731
config_path = DEFAULT_CONFIG
2832
with open(config_path, 'r') as f:
2933
data = yaml.safe_load(f)
30-
return OmegaConf.create(data)
34+
cfg = OmegaConf.create(data)
35+
if isinstance(cfg, DictConfig) and cfg.get("bootstrap"):
36+
from .bootstrap import resolve_bootstrap
37+
cfg = resolve_bootstrap(cfg, yaml_path=config_path)
38+
return cfg
3139

3240
def save_default_config(path: Optional[str] = None):
3341
"""

0 commit comments

Comments
 (0)