99from typing import TYPE_CHECKING , ClassVar
1010from typing_extensions import override
1111
12- from ._constants import MAX_KEY_LENGTH , MAX_RECORD_SIZE
12+ from ._constants import MAX_RECORD_SIZE
1313from ._encoding import validate_no_surrogates
1414from ._exceptions import LimitError , TransactionError
1515from ._json import serialize_json , utf8_byte_length
16- from ._keys import Key , KeySpecifier , key_length , validate_key_arity
16+ from ._keys import Key , KeySpecifier , validate_key_arity , validate_key_length
1717from ._readable import ReadableMixin
1818from ._records import build_tombstone , extract_key , validate_record
1919
@@ -52,6 +52,7 @@ class Transaction(ReadableMixin):
5252 """
5353
5454 __slots__ : ClassVar [tuple [str , ...]] = (
55+ "_buffer_serialized" ,
5556 "_buffer_updates" ,
5657 "_cached_sorted_keys" ,
5758 "_file_mtime" ,
@@ -69,6 +70,7 @@ class Transaction(ReadableMixin):
6970 _snapshot : "dict[Key, JSONObject]"
7071 _start_state : "dict[Key, JSONObject]"
7172 _buffer_updates : "dict[Key, JSONObject | None]"
73+ _buffer_serialized : "dict[Key, str]"
7274 _written_keys : set [Key ]
7375 _finalized : bool
7476 _file_mtime : float
@@ -99,6 +101,7 @@ def __init__(
99101 # reloaded state. Safe because _start_state values are never modified.
100102 self ._start_state = state .copy ()
101103 self ._buffer_updates = {}
104+ self ._buffer_serialized = {}
102105 self ._written_keys = set ()
103106 self ._finalized = False
104107 # Cache file stats for skip-reload optimization at commit time
@@ -154,18 +157,18 @@ def put(self, record: "JSONObject") -> None:
154157
155158 # Extract and validate key
156159 key = extract_key (record , self ._key_specifier )
157- key_len = key_length (key )
158- if key_len > MAX_KEY_LENGTH :
159- msg = f"key length { key_len } bytes exceeds maximum { MAX_KEY_LENGTH } "
160- raise LimitError (msg )
160+ validate_key_length (key )
161161
162- # Serialize record to check size limit (we don't store the serialized form)
162+ # Serialize record to check size limit and cache for commit
163163 serialized = serialize_json (record )
164164 record_bytes = utf8_byte_length (serialized )
165165 if record_bytes > MAX_RECORD_SIZE :
166166 msg = f"record size { record_bytes } bytes exceeds maximum { MAX_RECORD_SIZE } "
167167 raise LimitError (msg )
168168
169+ # Cache serialized form before deep copy (record hasn't been modified)
170+ self ._buffer_serialized [key ] = serialized
171+
169172 # Buffer the update (only keep latest value per key)
170173 record_copy = copy .deepcopy (record )
171174 self ._buffer_updates [key ] = record_copy
@@ -196,11 +199,15 @@ def delete(self, key: Key) -> bool:
196199 # Validate key arity matches specifier
197200 validate_key_arity (key , self ._key_specifier )
198201
202+ # Validate key length
203+ validate_key_length (key )
204+
199205 # Check if key exists in snapshot
200206 existed = key in self ._snapshot
201207
202208 # Buffer the delete (only keep latest state per key)
203209 self ._buffer_updates [key ] = None
210+ _ = self ._buffer_serialized .pop (key , None )
204211 self ._written_keys .add (key )
205212
206213 # Update snapshot
@@ -239,8 +246,8 @@ def commit(self) -> None:
239246 tombstone = build_tombstone (key , self ._key_specifier )
240247 lines .append (serialize_json (tombstone ))
241248 else :
242- # Record (put)
243- lines .append (serialize_json ( value ) )
249+ # Record (put) - use cached serialization from put()
250+ lines .append (self . _buffer_serialized [ key ] )
244251
245252 # Commit via table (handles locking and conflict detection)
246253 # Transaction is a friend class of Table - protected access is intentional
@@ -249,7 +256,6 @@ def commit(self) -> None:
249256 self ._start_state ,
250257 self ._written_keys ,
251258 self ._buffer_updates ,
252- self ._file_mtime ,
253259 self ._file_size ,
254260 )
255261 finally :
0 commit comments