|
| 1 | +"""Bootstrap support for foamBO. |
| 2 | +
|
| 3 | +Allows a YAML config to inherit from a previously saved experiment state |
| 4 | +(``bootstrap: path/to/<name>_client_state.json``). The embedded ``foambo_config`` |
| 5 | +from the JSON state is used as the base, with the current YAML merged on top |
| 6 | +via OmegaConf. This supports two workflows: |
| 7 | +
|
| 8 | +1. **Continue** — load prior GP + trials under a new experiment name / extra |
| 9 | + trials. YAML only overrides ``experiment.name`` and ``orchestration_settings.n_trials`` |
| 10 | + (typically). |
| 11 | +2. **Specialize** — pin context parameters from a robust run to specific values |
| 12 | + via a top-level ``specialize: {param: value, ...}`` mapping. The search space |
| 13 | + and recorded trial arms are rewritten so only the remaining design variables |
| 14 | + are optimized; prior trial outcomes are retained as training data at the |
| 15 | + clamped context. |
| 16 | +""" |
| 17 | + |
| 18 | +from __future__ import annotations |
| 19 | + |
| 20 | +import json |
| 21 | +import logging |
| 22 | +import os |
| 23 | +from typing import Any |
| 24 | + |
| 25 | +from omegaconf import DictConfig, OmegaConf |
| 26 | + |
| 27 | +log = logging.getLogger(__name__) |
| 28 | + |
| 29 | + |
| 30 | +_BOOTSTRAP_META_KEY = "_bootstrap_meta" |
| 31 | + |
| 32 | + |
| 33 | +def resolve_bootstrap(cfg: DictConfig, yaml_path: str | None) -> DictConfig: |
| 34 | + """If cfg has ``bootstrap:``, merge saved parent config with the current YAML. |
| 35 | +
|
| 36 | + Returns the merged config. The bootstrap path and any ``specialize`` map are |
| 37 | + stashed under ``cfg._bootstrap_meta`` for downstream loaders to consume. The |
| 38 | + top-level ``bootstrap`` and ``specialize`` keys are removed from the merged |
| 39 | + result. |
| 40 | +
|
| 41 | + Raises ``FileNotFoundError`` if the referenced JSON does not exist and |
| 42 | + ``ValueError`` if it lacks an embedded ``foambo_config``. |
| 43 | + """ |
| 44 | + if "bootstrap" not in cfg or cfg.bootstrap is None: |
| 45 | + return cfg |
| 46 | + |
| 47 | + raw_path = str(cfg.bootstrap) |
| 48 | + base_dir = os.path.dirname(os.path.abspath(yaml_path)) if yaml_path else os.getcwd() |
| 49 | + abs_path = raw_path if os.path.isabs(raw_path) else os.path.join(base_dir, raw_path) |
| 50 | + if not os.path.isfile(abs_path): |
| 51 | + raise FileNotFoundError(f"bootstrap path not found: {abs_path}") |
| 52 | + |
| 53 | + with open(abs_path) as f: |
| 54 | + state = json.load(f) |
| 55 | + parent_cfg = state.get("foambo_config") |
| 56 | + if parent_cfg is None: |
| 57 | + raise ValueError( |
| 58 | + f"bootstrap JSON has no embedded foambo_config: {abs_path}. " |
| 59 | + "Re-save the parent experiment with a recent foamBO version." |
| 60 | + ) |
| 61 | + |
| 62 | + # Drop meta keys from the override prior to merge. |
| 63 | + override = OmegaConf.to_container(cfg, resolve=False) |
| 64 | + override.pop("bootstrap", None) |
| 65 | + specialize = override.pop("specialize", None) |
| 66 | + |
| 67 | + # Warn if the YAML attempts to change the parent's optimization_config — |
| 68 | + # Ax locks objectives/metrics/constraints on the loaded experiment and the |
| 69 | + # override will be silently ignored. |
| 70 | + _warn_optimization_override(parent_cfg.get("optimization", {}) or {}, |
| 71 | + override.get("optimization", {}) or {}) |
| 72 | + |
| 73 | + base = OmegaConf.create(parent_cfg) |
| 74 | + parent_name = None |
| 75 | + try: |
| 76 | + parent_name = str(base.experiment.name) |
| 77 | + except Exception: |
| 78 | + pass |
| 79 | + merged = OmegaConf.merge(base, OmegaConf.create(override)) |
| 80 | + # Attach internal meta for optimize() to pick up. |
| 81 | + OmegaConf.update( |
| 82 | + merged, |
| 83 | + _BOOTSTRAP_META_KEY, |
| 84 | + {"client_state_path": abs_path, "specialize": specialize or {}}, |
| 85 | + force_add=True, |
| 86 | + ) |
| 87 | + # Public lineage record — preserved in the merged cfg, embedded in the |
| 88 | + # saved state JSON, and surfaced on the dashboard. |
| 89 | + lineage = { |
| 90 | + "parent_state_path": abs_path, |
| 91 | + "parent_name": parent_name, |
| 92 | + "specialize": specialize or {}, |
| 93 | + } |
| 94 | + OmegaConf.update(merged, "bootstrap_lineage", lineage, force_add=True) |
| 95 | + log.info("Bootstrap: loaded parent config from %s", abs_path) |
| 96 | + if specialize: |
| 97 | + log.info("Bootstrap: specializing %d parameter(s): %s", |
| 98 | + len(specialize), ", ".join(f"{k}={v}" for k, v in specialize.items())) |
| 99 | + return merged |
| 100 | + |
| 101 | + |
| 102 | +_LOCKED_OPT_FIELDS = ("objective", "metrics", "outcome_constraints", "objective_thresholds") |
| 103 | + |
| 104 | + |
| 105 | +def _warn_optimization_override(parent: dict, override: dict) -> None: |
| 106 | + """Log a warning for any locked optimization field that the YAML tries to change.""" |
| 107 | + diffs = [] |
| 108 | + for key in _LOCKED_OPT_FIELDS: |
| 109 | + if key not in override: |
| 110 | + continue |
| 111 | + new_val = override[key] |
| 112 | + old_val = parent.get(key) |
| 113 | + if _normalize(new_val) != _normalize(old_val): |
| 114 | + diffs.append(f" - {key}: parent={_fmt(old_val)} YAML={_fmt(new_val)}") |
| 115 | + if not diffs: |
| 116 | + return |
| 117 | + log.warning( |
| 118 | + "Bootstrap: the YAML override tries to change locked optimization fields. " |
| 119 | + "Ax's Client freezes objectives/metrics on the loaded experiment, so these " |
| 120 | + "changes will be IGNORED at runtime:\n%s\n" |
| 121 | + "To use a different optimization config, bootstrap is not the right tool — " |
| 122 | + "start a fresh experiment and seed past trials via " |
| 123 | + "trial_generation.generation_nodes with `file_path:` instead.", |
| 124 | + "\n".join(diffs), |
| 125 | + ) |
| 126 | + |
| 127 | + |
| 128 | +def _normalize(v: Any) -> Any: |
| 129 | + """Canonicalize a value for equality comparison (stable ordering, stripped whitespace).""" |
| 130 | + if isinstance(v, list): |
| 131 | + # Normalize each element; for lists of dicts, sort by a stable key. |
| 132 | + normed = [_normalize(x) for x in v] |
| 133 | + try: |
| 134 | + return sorted(normed, key=lambda x: json.dumps(x, sort_keys=True)) |
| 135 | + except TypeError: |
| 136 | + return normed |
| 137 | + if isinstance(v, dict): |
| 138 | + return {k: _normalize(val) for k, val in v.items()} |
| 139 | + if isinstance(v, str): |
| 140 | + return v.strip() |
| 141 | + return v |
| 142 | + |
| 143 | + |
| 144 | +def _fmt(v: Any) -> str: |
| 145 | + try: |
| 146 | + return json.dumps(v, default=str) |
| 147 | + except Exception: |
| 148 | + return str(v) |
| 149 | + |
| 150 | + |
| 151 | +def bootstrap_meta(cfg: DictConfig) -> dict[str, Any] | None: |
| 152 | + """Return the bootstrap meta dict if present, else None.""" |
| 153 | + meta = cfg.get(_BOOTSTRAP_META_KEY) if hasattr(cfg, "get") else None |
| 154 | + if meta is None: |
| 155 | + return None |
| 156 | + return OmegaConf.to_container(meta, resolve=True) |
| 157 | + |
| 158 | + |
| 159 | +def strip_bootstrap_meta(cfg: DictConfig) -> DictConfig: |
| 160 | + """Remove the bootstrap meta key from cfg (call before embedding in state).""" |
| 161 | + if _BOOTSTRAP_META_KEY in cfg: |
| 162 | + del cfg[_BOOTSTRAP_META_KEY] |
| 163 | + return cfg |
| 164 | + |
| 165 | + |
| 166 | +def apply_specialization(client, specialize: dict[str, Any]) -> None: |
| 167 | + """Pin context parameters to fixed values on a loaded client. |
| 168 | +
|
| 169 | + Mutates the experiment's search space (replacing affected parameters with |
| 170 | + ``FixedParameter``) and rewrites recorded arm parameters so prior trials |
| 171 | + remain valid training data under the new space. The GP refits on the next |
| 172 | + generation call. |
| 173 | +
|
| 174 | + Limitations: |
| 175 | + - Information about how those parameters influence outcomes is lost: all |
| 176 | + trials now appear to share the same context value. This is the intended |
| 177 | + behavior for "optimize conditional on this context point". |
| 178 | + - Ax experiments with ``immutable_search_space_and_opt_config`` cannot be |
| 179 | + specialized in-place; this raises a clear error. |
| 180 | + """ |
| 181 | + if not specialize: |
| 182 | + return |
| 183 | + |
| 184 | + from ax.core.parameter import FixedParameter, RangeParameter, ChoiceParameter |
| 185 | + from ax.core.search_space import SearchSpace |
| 186 | + |
| 187 | + exp = client._experiment |
| 188 | + # Ax sets immutable_search_space_and_opt_config=True on every experiment |
| 189 | + # produced via Client.configure_experiment. That only means generator runs |
| 190 | + # skipped caching a per-trial search-space copy; the live search space is |
| 191 | + # still a plain attribute. Unset the flag so we can mutate it here. |
| 192 | + if getattr(exp, "immutable_search_space_and_opt_config", False): |
| 193 | + try: |
| 194 | + from ax.core.experiment import Keys |
| 195 | + exp._properties.pop(Keys.IMMUTABLE_SEARCH_SPACE_AND_OPT_CONF, None) |
| 196 | + except Exception: |
| 197 | + exp._properties = { |
| 198 | + k: v for k, v in (exp._properties or {}).items() |
| 199 | + if "immutable" not in str(k).lower() |
| 200 | + } |
| 201 | + log.info("Specialization: cleared parent's immutable_search_space_and_opt_config flag") |
| 202 | + |
| 203 | + parent_params = exp.search_space.parameters |
| 204 | + missing = [k for k in specialize if k not in parent_params] |
| 205 | + if missing: |
| 206 | + raise ValueError(f"specialize keys not in parent search space: {missing}") |
| 207 | + |
| 208 | + new_params = [] |
| 209 | + for name, p in parent_params.items(): |
| 210 | + if name in specialize: |
| 211 | + value = specialize[name] |
| 212 | + # Coerce to parameter type (yaml may give str/int/float mix). |
| 213 | + if isinstance(p, RangeParameter): |
| 214 | + value = float(value) if p.parameter_type.name in ("FLOAT",) else int(value) |
| 215 | + new_params.append(FixedParameter( |
| 216 | + name=name, |
| 217 | + parameter_type=p.parameter_type, |
| 218 | + value=value, |
| 219 | + )) |
| 220 | + else: |
| 221 | + new_params.append(p.clone()) |
| 222 | + new_ss = SearchSpace(parameters=new_params) |
| 223 | + exp._search_space = new_ss |
| 224 | + |
| 225 | + # Clamp arm parameters on every existing trial so past observations are |
| 226 | + # consistent with the new search space. |
| 227 | + for t in exp.trials.values(): |
| 228 | + for arm in t.arms: |
| 229 | + for pname, pval in specialize.items(): |
| 230 | + if pname in arm._parameters: |
| 231 | + arm._parameters[pname] = pval |
| 232 | + |
| 233 | + # Invalidate any cached adapter state so the next gen refits on the clamped |
| 234 | + # data under the new search space. |
| 235 | + gs = getattr(client, "_generation_strategy", None) |
| 236 | + if gs is not None and getattr(gs, "adapter", None) is not None: |
| 237 | + try: |
| 238 | + gs._curr._fitted_adapter = None |
| 239 | + except Exception: |
| 240 | + pass |
| 241 | + |
| 242 | + log.info("Specialization applied: search space now has %d fixed parameter(s)", |
| 243 | + sum(1 for p in new_params if isinstance(p, FixedParameter))) |
0 commit comments