|
1 | 1 | import sys |
2 | 2 | from functools import wraps |
| 3 | +from collections.abc import Iterable |
3 | 4 |
|
4 | 5 | import sentry_sdk |
5 | 6 | from sentry_sdk import consts |
|
25 | 26 | if TYPE_CHECKING: |
26 | 27 | from typing import ( |
27 | 28 | Any, |
28 | | - Iterable, |
29 | 29 | List, |
30 | 30 | Optional, |
31 | 31 | Callable, |
|
34 | 34 | Union, |
35 | 35 | ) |
36 | 36 | from sentry_sdk.tracing import Span |
| 37 | + from sentry_sdk._types import TextPart |
37 | 38 |
|
38 | | - from openai.types.responses import ResponseInputParam |
| 39 | + from openai.types.responses import ResponseInputParam, ResponseInputItemParam |
| 40 | + from openai import Omit |
39 | 41 |
|
40 | 42 | try: |
41 | 43 | try: |
|
52 | 54 | from openai.resources import Embeddings, AsyncEmbeddings |
53 | 55 |
|
54 | 56 | if TYPE_CHECKING: |
55 | | - from openai.types.chat import ChatCompletionMessageParam, ChatCompletionChunk |
| 57 | + from openai.types.chat import ( |
| 58 | + ChatCompletionMessageParam, |
| 59 | + ChatCompletionChunk, |
| 60 | + ChatCompletionSystemMessageParam, |
| 61 | + ) |
56 | 62 | except ImportError: |
57 | 63 | raise DidNotEnable("OpenAI not installed") |
58 | 64 |
|
@@ -193,6 +199,45 @@ def _calculate_token_usage( |
193 | 199 | ) |
194 | 200 |
|
195 | 201 |
|
| 202 | +def _is_system_instruction_completions(message: "ChatCompletionMessageParam") -> bool: |
| 203 | + return isinstance(message, dict) and message.get("role") == "system" |
| 204 | + |
| 205 | + |
| 206 | +def _get_system_instructions_completions( |
| 207 | + messages: "Iterable[ChatCompletionMessageParam]", |
| 208 | +) -> "list[ChatCompletionMessageParam]": |
| 209 | + if not isinstance(messages, Iterable): |
| 210 | + return [] |
| 211 | + |
| 212 | + return [ |
| 213 | + message for message in messages if _is_system_instruction_completions(message) |
| 214 | + ] |
| 215 | + |
| 216 | + |
| 217 | +def _transform_system_instructions( |
| 218 | + system_instructions: "list[ChatCompletionSystemMessageParam]", |
| 219 | +) -> "list[TextPart]": |
| 220 | + instruction_text_parts: "list[TextPart]" = [] |
| 221 | + |
| 222 | + for instruction in system_instructions: |
| 223 | + if not isinstance(instruction, dict): |
| 224 | + continue |
| 225 | + |
| 226 | + content = instruction.get("content") |
| 227 | + |
| 228 | + if isinstance(content, str): |
| 229 | + instruction_text_parts.append({"type": "text", "content": content}) |
| 230 | + |
| 231 | + elif isinstance(content, list): |
| 232 | + for part in content: |
| 233 | + if isinstance(part, dict) and part.get("type") == "text": |
| 234 | + text = part.get("text", "") |
| 235 | + if text: |
| 236 | + instruction_text_parts.append({"type": "text", "content": text}) |
| 237 | + |
| 238 | + return instruction_text_parts |
| 239 | + |
| 240 | + |
196 | 241 | def _get_input_messages( |
197 | 242 | kwargs: "dict[str, Any]", |
198 | 243 | ) -> "Optional[Union[Iterable[Any], list[str]]]": |
@@ -270,17 +315,48 @@ def _set_completions_api_input_data( |
270 | 315 | kwargs: "dict[str, Any]", |
271 | 316 | integration: "OpenAIIntegration", |
272 | 317 | ) -> None: |
273 | | - messages: "Optional[Union[Iterable[ChatCompletionMessageParam], list[str]]]" = ( |
274 | | - _get_input_messages(kwargs) |
| 318 | + messages: "Optional[Union[str, Iterable[ChatCompletionMessageParam]]]" = kwargs.get( |
| 319 | + "messages" |
275 | 320 | ) |
276 | 321 |
|
277 | | - if ( |
278 | | - messages is not None |
279 | | - and len(messages) > 0 # type: ignore |
280 | | - and should_send_default_pii() |
281 | | - and integration.include_prompts |
282 | | - ): |
283 | | - normalized_messages = normalize_message_roles(messages) # type: ignore |
| 322 | + if not should_send_default_pii() or not integration.include_prompts: |
| 323 | + set_data_normalized(span, SPANDATA.GEN_AI_OPERATION_NAME, "responses") |
| 324 | + _commmon_set_input_data(span, kwargs) |
| 325 | + return |
| 326 | + |
| 327 | + if messages is None: |
| 328 | + set_data_normalized(span, SPANDATA.GEN_AI_OPERATION_NAME, "chat") |
| 329 | + _commmon_set_input_data(span, kwargs) |
| 330 | + return |
| 331 | + |
| 332 | + system_instructions = _get_system_instructions_completions(messages) |
| 333 | + if len(system_instructions) > 0: |
| 334 | + set_data_normalized( |
| 335 | + span, |
| 336 | + SPANDATA.GEN_AI_SYSTEM_INSTRUCTIONS, |
| 337 | + _transform_system_instructions(system_instructions), |
| 338 | + unpack=False, |
| 339 | + ) |
| 340 | + |
| 341 | + if isinstance(messages, str): |
| 342 | + normalized_messages = normalize_message_roles([messages]) # type: ignore |
| 343 | + scope = sentry_sdk.get_current_scope() |
| 344 | + messages_data = truncate_and_annotate_messages(normalized_messages, span, scope) |
| 345 | + if messages_data is not None: |
| 346 | + set_data_normalized( |
| 347 | + span, SPANDATA.GEN_AI_REQUEST_MESSAGES, messages_data, unpack=False |
| 348 | + ) |
| 349 | + set_data_normalized(span, SPANDATA.GEN_AI_OPERATION_NAME, "chat") |
| 350 | + _commmon_set_input_data(span, kwargs) |
| 351 | + return |
| 352 | + |
| 353 | + non_system_messages = [ |
| 354 | + message |
| 355 | + for message in messages |
| 356 | + if not _is_system_instruction_completions(message) |
| 357 | + ] |
| 358 | + if len(non_system_messages) > 0: |
| 359 | + normalized_messages = normalize_message_roles(non_system_messages) # type: ignore |
284 | 360 | scope = sentry_sdk.get_current_scope() |
285 | 361 | messages_data = truncate_and_annotate_messages(normalized_messages, span, scope) |
286 | 362 | if messages_data is not None: |
|
0 commit comments