Skip to content

Commit 53e31a3

Browse files
author
maebahesioru
committed
fix: downgrade model TargetClosedError to debug, retry stream proxy on port failure
- model_management: TargetClosedError in set_model_from_display -> debug - app.py: retry stream proxy up to 3 times with port increment on failure - server.py: add STREAM_PORT_ACTUAL global
1 parent 0cd308b commit 53e31a3

3 files changed

Lines changed: 22 additions & 12 deletions

File tree

src/api/app.py

Lines changed: 16 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -88,18 +88,23 @@ async def _start_stream_proxy():
8888
port = int(STREAM_PORT or 3120)
8989
STREAM_PROXY_SERVER_ENV = os.environ.get('UNIFIED_PROXY_CONFIG') or os.environ.get('HTTPS_PROXY') or os.environ.get('HTTP_PROXY')
9090
server.logger.info(f'Starting STREAM proxy on port {port} with upstream proxy: {STREAM_PROXY_SERVER_ENV}')
91-
server.STREAM_QUEUE = multiprocessing.Queue()
92-
server.STREAM_PROCESS = multiprocessing.Process(target=proxy.start, args=(server.STREAM_QUEUE, port, STREAM_PROXY_SERVER_ENV))
93-
server.STREAM_PROCESS.start()
94-
server.logger.info('STREAM proxy process started. Waiting for port readiness...')
95-
if await _wait_for_port(port, timeout=30.0):
96-
server.logger.info(f'STREAM proxy port {port} is ready.')
97-
else:
98-
server.logger.error(f'STREAM proxy port {port} not ready after timeout. Browser may fail to connect.')
99-
if server.STREAM_PROCESS and server.STREAM_PROCESS.is_alive():
100-
server.logger.warning('STREAM proxy process is alive but port not listening.')
91+
for attempt in range(3):
92+
current_port = port + attempt
93+
server.STREAM_QUEUE = multiprocessing.Queue()
94+
server.STREAM_PROCESS = multiprocessing.Process(target=proxy.start, args=(server.STREAM_QUEUE, current_port, STREAM_PROXY_SERVER_ENV))
95+
server.STREAM_PROCESS.start()
96+
server.logger.info(f'STREAM proxy process started on port {current_port}. Waiting for port readiness...')
97+
if await _wait_for_port(current_port, timeout=30.0):
98+
server.STREAM_PORT_ACTUAL = current_port
99+
server.logger.info(f'STREAM proxy port {current_port} is ready.')
100+
if current_port != port:
101+
server.logger.warning(f'STREAM proxy using fallback port {current_port} (requested {port}).')
102+
return
101103
else:
102-
server.logger.error(f'STREAM proxy process died. Exit code: {server.STREAM_PROCESS.exitcode}')
104+
server.logger.warning(f'STREAM proxy port {current_port} not ready, killing process...')
105+
server.STREAM_PROCESS.terminate()
106+
server.STREAM_PROCESS.join(timeout=3)
107+
server.logger.error(f'STREAM proxy failed to start after 3 attempts.')
103108

104109
async def _initialize_browser_and_page():
105110
import server

src/browser/model_management.py

Lines changed: 5 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -484,4 +484,8 @@ async def _set_model_from_page_display(page: AsyncPage, set_storage: bool=False)
484484
await page.evaluate("(prefsStr) => localStorage.setItem('aiStudioUserPreference', prefsStr)", json.dumps(prefs_to_set))
485485
logger.info(f" ✅ localStorage.aiStudioUserPreference 已更新。isAdvancedOpen: {prefs_to_set.get('isAdvancedOpen')}, areToolsOpen: {prefs_to_set.get('areToolsOpen')} (期望: True), promptModel: '{prefs_to_set.get('promptModel', '未设置/保留原样')}'。")
486486
except Exception as e_set_disp:
487-
logger.error(f' 尝试从页面显示设置模型时出错: {e_set_disp}', exc_info=True)
487+
from playwright._impl._errors import TargetClosedError
488+
if isinstance(e_set_disp, TargetClosedError):
489+
logger.debug(f' 尝试从页面显示设置模型时出错 (browser closed): {e_set_disp}')
490+
else:
491+
logger.error(f' 尝试从页面显示设置模型时出错: {e_set_disp}', exc_info=True)

src/server.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -35,6 +35,7 @@
3535
from api import generate_sse_chunk, generate_sse_stop_chunk, generate_sse_error_chunk, use_helper_get_response, use_stream_response, clear_stream_queue, prepare_combined_prompt, validate_chat_request, _process_request_refactored, create_app, queue_worker
3636
STREAM_QUEUE: Optional[multiprocessing.Queue] = None
3737
STREAM_PROCESS = None
38+
STREAM_PORT_ACTUAL: Optional[int] = None
3839
playwright_manager: Optional[AsyncPlaywright] = None
3940
browser_instance: Optional[AsyncBrowser] = None
4041
page_instance: Optional[AsyncPage] = None

0 commit comments

Comments
 (0)