@@ -149,6 +149,8 @@ def from_adapters(
149149 ]:
150150 data = stacked [layer ]
151151 comps , svals , mean = compute_svd (data , num_components = None , center = True )
152+ check_tensor_health (comps , f"{ layer } .components_{ side .lower ()} " )
153+ check_tensor_health (mean , f"{ layer } .mean_{ side .lower ()} " )
152154
153155 if adaptive_k :
154156 # Per-layer: each layer/side gets its own k
@@ -265,6 +267,12 @@ def absorb(self, new_adapter: LoRAWeights, new_task_id: str) -> None:
265267 reruns SVD to produce an updated basis.
266268 """
267269 check_adapter_matches_subspace (new_adapter , self , "absorb" )
270+ if new_task_id in self .tasks :
271+ import warnings
272+ warnings .warn (
273+ f"Task '{ new_task_id } ' already exists and will be overwritten by absorb." ,
274+ stacklevel = 2 ,
275+ )
268276 logger .info ("Absorbing adapter '%s' (full SVD recompute, %d existing tasks)" , new_task_id , len (self .tasks ))
269277 # Reconstruct all existing tasks as full adapters
270278 all_adapters = []
@@ -308,6 +316,12 @@ def absorb_incremental(self, new_adapter: LoRAWeights, new_task_id: str) -> None
308316 approximation trade-off.
309317 """
310318 check_adapter_matches_subspace (new_adapter , self , "absorb_incremental" )
319+ if new_task_id in self .tasks :
320+ import warnings
321+ warnings .warn (
322+ f"Task '{ new_task_id } ' already exists and will be overwritten by absorb_incremental." ,
323+ stacklevel = 2 ,
324+ )
311325 logger .debug ("Absorbing adapter '%s' incrementally" , new_task_id )
312326 loadings_a : dict [str , Tensor ] = {}
313327 loadings_b : dict [str , Tensor ] = {}
@@ -390,6 +404,11 @@ def from_adapters_streaming(
390404 paths = [Path (p ) for p in adapter_paths ]
391405 if task_ids is None :
392406 task_ids = [p .name for p in paths ]
407+ if len (task_ids ) != len (paths ):
408+ raise ValueError (
409+ f"task_ids length ({ len (task_ids )} ) must match "
410+ f"adapter_paths length ({ len (paths )} )"
411+ )
393412
394413 # Initialize from first adapter(s) — use first two if available
395414 # so SVD has enough samples to find >1 component
@@ -633,8 +652,16 @@ def get_trainable_params(
633652
634653 return params
635654
655+ @staticmethod
656+ def _safe_filename (task_id : str ) -> str :
657+ """Convert a task ID to a filesystem-safe filename component."""
658+ import re
659+ return re .sub (r'[^\w\-.]' , '_' , task_id )
660+
636661 def save (self , path : str | Path ) -> None :
637662 """Serialize the subspace to disk."""
663+ import json
664+
638665 path = Path (path )
639666 path .mkdir (parents = True , exist_ok = True )
640667
@@ -650,19 +677,22 @@ def save(self, path: str | Path) -> None:
650677
651678 save_file (tensors , str (path / "subspace.safetensors" ))
652679
653- # Save per-task loadings
680+ # Save per-task loadings (with sanitized filenames)
681+ tid_to_filename : dict [str , str ] = {}
654682 for tid , proj in self .tasks .items ():
683+ safe_name = self ._safe_filename (tid )
684+ tid_to_filename [tid ] = safe_name
655685 task_tensors = {}
656686 for layer in self .layer_names :
657687 task_tensors [f"{ layer } .loadings_a" ] = proj .loadings_a [layer ].contiguous ()
658688 task_tensors [f"{ layer } .loadings_b" ] = proj .loadings_b [layer ].contiguous ()
659- save_file (task_tensors , str (path / f"task_{ tid } .safetensors" ))
689+ save_file (task_tensors , str (path / f"task_{ safe_name } .safetensors" ))
660690
661- # Save metadata
662- import json
691+ # Save metadata (includes filename mapping for safe round-trip)
663692 meta = {
664693 "layer_names" : self .layer_names ,
665694 "task_ids" : list (self .tasks .keys ()),
695+ "task_filenames" : tid_to_filename ,
666696 "rank" : self .rank ,
667697 "num_components" : self .num_components ,
668698 }
@@ -683,6 +713,8 @@ def load(cls, path: str | Path) -> SharedSubspace:
683713 task_ids = meta ["task_ids" ]
684714 rank = meta ["rank" ]
685715 num_components = meta ["num_components" ]
716+ # Support both old format (no mapping) and new format
717+ tid_to_filename = meta .get ("task_filenames" , {})
686718
687719 tensors = load_file (str (path / "subspace.safetensors" ))
688720 components_a = {l : tensors [f"{ l } .components_a" ] for l in layer_names }
@@ -694,7 +726,8 @@ def load(cls, path: str | Path) -> SharedSubspace:
694726
695727 tasks = {}
696728 for tid in task_ids :
697- task_tensors = load_file (str (path / f"task_{ tid } .safetensors" ))
729+ safe_name = tid_to_filename .get (tid , tid )
730+ task_tensors = load_file (str (path / f"task_{ safe_name } .safetensors" ))
698731 loadings_a = {l : task_tensors [f"{ l } .loadings_a" ] for l in layer_names }
699732 loadings_b = {l : task_tensors [f"{ l } .loadings_b" ] for l in layer_names }
700733 tasks [tid ] = TaskProjection (
0 commit comments