Skip to content

Commit 1e2da15

Browse files
feat(openai): Set system instruction attribute for Completions API (#5359)
1 parent f749ae4 commit 1e2da15

File tree

2 files changed

+598
-59
lines changed

2 files changed

+598
-59
lines changed

sentry_sdk/integrations/openai.py

Lines changed: 88 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,6 @@
11
import sys
22
from functools import wraps
3+
from collections.abc import Iterable
34

45
import sentry_sdk
56
from sentry_sdk import consts
@@ -25,7 +26,6 @@
2526
if TYPE_CHECKING:
2627
from typing import (
2728
Any,
28-
Iterable,
2929
List,
3030
Optional,
3131
Callable,
@@ -34,8 +34,10 @@
3434
Union,
3535
)
3636
from sentry_sdk.tracing import Span
37+
from sentry_sdk._types import TextPart
3738

38-
from openai.types.responses import ResponseInputParam
39+
from openai.types.responses import ResponseInputParam, ResponseInputItemParam
40+
from openai import Omit
3941

4042
try:
4143
try:
@@ -52,7 +54,11 @@
5254
from openai.resources import Embeddings, AsyncEmbeddings
5355

5456
if TYPE_CHECKING:
55-
from openai.types.chat import ChatCompletionMessageParam, ChatCompletionChunk
57+
from openai.types.chat import (
58+
ChatCompletionMessageParam,
59+
ChatCompletionChunk,
60+
ChatCompletionSystemMessageParam,
61+
)
5662
except ImportError:
5763
raise DidNotEnable("OpenAI not installed")
5864

@@ -193,6 +199,45 @@ def _calculate_token_usage(
193199
)
194200

195201

202+
def _is_system_instruction_completions(message: "ChatCompletionMessageParam") -> bool:
203+
return isinstance(message, dict) and message.get("role") == "system"
204+
205+
206+
def _get_system_instructions_completions(
207+
messages: "Iterable[ChatCompletionMessageParam]",
208+
) -> "list[ChatCompletionMessageParam]":
209+
if not isinstance(messages, Iterable):
210+
return []
211+
212+
return [
213+
message for message in messages if _is_system_instruction_completions(message)
214+
]
215+
216+
217+
def _transform_system_instructions(
218+
system_instructions: "list[ChatCompletionSystemMessageParam]",
219+
) -> "list[TextPart]":
220+
instruction_text_parts: "list[TextPart]" = []
221+
222+
for instruction in system_instructions:
223+
if not isinstance(instruction, dict):
224+
continue
225+
226+
content = instruction.get("content")
227+
228+
if isinstance(content, str):
229+
instruction_text_parts.append({"type": "text", "content": content})
230+
231+
elif isinstance(content, list):
232+
for part in content:
233+
if isinstance(part, dict) and part.get("type") == "text":
234+
text = part.get("text", "")
235+
if text:
236+
instruction_text_parts.append({"type": "text", "content": text})
237+
238+
return instruction_text_parts
239+
240+
196241
def _get_input_messages(
197242
kwargs: "dict[str, Any]",
198243
) -> "Optional[Union[Iterable[Any], list[str]]]":
@@ -270,17 +315,48 @@ def _set_completions_api_input_data(
270315
kwargs: "dict[str, Any]",
271316
integration: "OpenAIIntegration",
272317
) -> None:
273-
messages: "Optional[Union[Iterable[ChatCompletionMessageParam], list[str]]]" = (
274-
_get_input_messages(kwargs)
318+
messages: "Optional[Union[str, Iterable[ChatCompletionMessageParam]]]" = kwargs.get(
319+
"messages"
275320
)
276321

277-
if (
278-
messages is not None
279-
and len(messages) > 0 # type: ignore
280-
and should_send_default_pii()
281-
and integration.include_prompts
282-
):
283-
normalized_messages = normalize_message_roles(messages) # type: ignore
322+
if not should_send_default_pii() or not integration.include_prompts:
323+
set_data_normalized(span, SPANDATA.GEN_AI_OPERATION_NAME, "responses")
324+
_commmon_set_input_data(span, kwargs)
325+
return
326+
327+
if messages is None:
328+
set_data_normalized(span, SPANDATA.GEN_AI_OPERATION_NAME, "chat")
329+
_commmon_set_input_data(span, kwargs)
330+
return
331+
332+
system_instructions = _get_system_instructions_completions(messages)
333+
if len(system_instructions) > 0:
334+
set_data_normalized(
335+
span,
336+
SPANDATA.GEN_AI_SYSTEM_INSTRUCTIONS,
337+
_transform_system_instructions(system_instructions),
338+
unpack=False,
339+
)
340+
341+
if isinstance(messages, str):
342+
normalized_messages = normalize_message_roles([messages]) # type: ignore
343+
scope = sentry_sdk.get_current_scope()
344+
messages_data = truncate_and_annotate_messages(normalized_messages, span, scope)
345+
if messages_data is not None:
346+
set_data_normalized(
347+
span, SPANDATA.GEN_AI_REQUEST_MESSAGES, messages_data, unpack=False
348+
)
349+
set_data_normalized(span, SPANDATA.GEN_AI_OPERATION_NAME, "chat")
350+
_commmon_set_input_data(span, kwargs)
351+
return
352+
353+
non_system_messages = [
354+
message
355+
for message in messages
356+
if not _is_system_instruction_completions(message)
357+
]
358+
if len(non_system_messages) > 0:
359+
normalized_messages = normalize_message_roles(non_system_messages) # type: ignore
284360
scope = sentry_sdk.get_current_scope()
285361
messages_data = truncate_and_annotate_messages(normalized_messages, span, scope)
286362
if messages_data is not None:

0 commit comments

Comments
 (0)