|
| 1 | +import base64 |
| 2 | +from enum import StrEnum |
| 3 | +from typing import Any |
| 4 | + |
| 5 | +import httpx |
| 6 | +from uipath._utils._ssl_context import get_httpx_client_kwargs |
| 7 | + |
| 8 | +IMAGE_MIME_TYPES: set[str] = { |
| 9 | + "image/png", |
| 10 | + "image/jpeg", |
| 11 | + "image/gif", |
| 12 | + "image/webp", |
| 13 | +} |
| 14 | + |
| 15 | + |
| 16 | +class LlmProvider(StrEnum): |
| 17 | + OPENAI = "openai" |
| 18 | + BEDROCK = "bedrock" |
| 19 | + VERTEX = "vertex" |
| 20 | + UNKNOWN = "unknown" |
| 21 | + |
| 22 | + |
| 23 | +def is_pdf(mime_type: str) -> bool: |
| 24 | + """Check if the MIME type represents a PDF document.""" |
| 25 | + return mime_type.lower() == "application/pdf" |
| 26 | + |
| 27 | + |
| 28 | +def is_image(mime_type: str) -> bool: |
| 29 | + """Check if the MIME type represents a supported image format (PNG, JPEG, GIF, WebP).""" |
| 30 | + return mime_type.lower() in IMAGE_MIME_TYPES |
| 31 | + |
| 32 | + |
| 33 | +def detect_provider(model_name: str) -> LlmProvider: |
| 34 | + """Detect the LLM provider (Bedrock, OpenAI, or Vertex) based on the model name.""" |
| 35 | + if not model_name: |
| 36 | + raise ValueError(f"Unsupported model: {model_name}") |
| 37 | + |
| 38 | + model_lower = model_name.lower() |
| 39 | + |
| 40 | + if "anthropic" in model_lower or "claude" in model_lower: |
| 41 | + return LlmProvider.BEDROCK |
| 42 | + |
| 43 | + if "gpt" in model_lower: |
| 44 | + return LlmProvider.OPENAI |
| 45 | + |
| 46 | + if "gemini" in model_lower: |
| 47 | + return LlmProvider.VERTEX |
| 48 | + |
| 49 | + raise ValueError(f"Unsupported model: {model_name}") |
| 50 | + |
| 51 | + |
| 52 | +async def _download_file(url: str) -> str: |
| 53 | + """Download a file from a URL and return its content as a base64 string.""" |
| 54 | + async with httpx.AsyncClient(**get_httpx_client_kwargs()) as client: |
| 55 | + response = await client.get(url) |
| 56 | + response.raise_for_status() |
| 57 | + file_content = response.content |
| 58 | + |
| 59 | + return base64.b64encode(file_content).decode("utf-8") |
| 60 | + |
| 61 | + |
| 62 | +async def build_message_content_part_from_data( |
| 63 | + url: str, |
| 64 | + filename: str, |
| 65 | + mime_type: str, |
| 66 | + model: str, |
| 67 | +) -> dict[str, Any]: |
| 68 | + """Download a file and build a provider-specific message content part. |
| 69 | +
|
| 70 | + The format varies based on the detected provider (Bedrock, OpenAI, or Vertex). |
| 71 | + """ |
| 72 | + provider = detect_provider(model) |
| 73 | + |
| 74 | + if provider == LlmProvider.BEDROCK: |
| 75 | + raise ValueError("Anthropic models are not yet supported for file attachments") |
| 76 | + |
| 77 | + if provider == LlmProvider.OPENAI: |
| 78 | + return await _build_openai_content_part_from_data( |
| 79 | + url, mime_type, filename, False |
| 80 | + ) |
| 81 | + |
| 82 | + if provider == LlmProvider.VERTEX: |
| 83 | + raise ValueError("Gemini models are not yet supported for file attachments") |
| 84 | + |
| 85 | + raise ValueError(f"Unsupported provider: {provider}") |
| 86 | + |
| 87 | + |
| 88 | +async def _build_openai_content_part_from_data( |
| 89 | + url: str, |
| 90 | + mime_type: str, |
| 91 | + filename: str, |
| 92 | + download_image: bool, |
| 93 | +) -> dict[str, Any]: |
| 94 | + """Build a content part for OpenAI models (base64-encoded or URL reference).""" |
| 95 | + if download_image: |
| 96 | + base64_content = await _download_file(url) |
| 97 | + if is_image(mime_type): |
| 98 | + data_url = f"data:{mime_type};base64,{base64_content}" |
| 99 | + return { |
| 100 | + "type": "input_image", |
| 101 | + "image_url": data_url, |
| 102 | + } |
| 103 | + |
| 104 | + if is_pdf(mime_type): |
| 105 | + return { |
| 106 | + "type": "input_file", |
| 107 | + "filename": filename, |
| 108 | + "file_data": base64_content, |
| 109 | + } |
| 110 | + |
| 111 | + elif is_image(mime_type): |
| 112 | + return { |
| 113 | + "type": "input_image", |
| 114 | + "image_url": url, |
| 115 | + } |
| 116 | + |
| 117 | + elif is_pdf(mime_type): |
| 118 | + return { |
| 119 | + "type": "input_file", |
| 120 | + "file_url": url, |
| 121 | + } |
| 122 | + |
| 123 | + raise ValueError(f"Unsupported mime_type: {mime_type}") |
0 commit comments