From 7480e4651d1939e975ec56773a6f3f78410408ea Mon Sep 17 00:00:00 2001 From: Ogi <86684834+obostjancic@users.noreply.github.com> Date: Fri, 6 Feb 2026 10:57:41 +0100 Subject: [PATCH] feat(attributes): add gen_ai.conversation.id --- generated/attributes/all.md | 3 +- generated/attributes/gen_ai.md | 12 +++++++ .../sentry-conventions/src/attributes.ts | 33 +++++++++++++++++++ .../gen_ai/gen_ai__conversation__id.json | 10 ++++++ python/src/sentry_conventions/attributes.py | 18 ++++++++++ 5 files changed, 75 insertions(+), 1 deletion(-) create mode 100644 model/attributes/gen_ai/gen_ai__conversation__id.json diff --git a/generated/attributes/all.md b/generated/attributes/all.md index 4dd6efa1..f262b961 100644 --- a/generated/attributes/all.md +++ b/generated/attributes/all.md @@ -4,7 +4,7 @@ This page lists all available attributes across all categories. -Total attributes: 424 +Total attributes: 425 ## Stable Attributes @@ -77,6 +77,7 @@ Total attributes: 424 | [`gen_ai.agent.name`](./gen_ai.md#gen_aiagentname) | The name of the agent being used. | | [`gen_ai.assistant.message`](./gen_ai.md#gen_aiassistantmessage) | The assistant message passed to the model. | | [`gen_ai.choice`](./gen_ai.md#gen_aichoice) | The model's response message. | +| [`gen_ai.conversation.id`](./gen_ai.md#gen_aiconversationid) | The unique identifier for a conversation (session, thread), used to store and correlate messages within this conversation. | | [`gen_ai.cost.input_tokens`](./gen_ai.md#gen_aicostinput_tokens) | The cost of tokens used to process the AI input (prompt) in USD (without cached input tokens). | | [`gen_ai.cost.output_tokens`](./gen_ai.md#gen_aicostoutput_tokens) | The cost of tokens used for creating the AI output in USD (without reasoning tokens). | | [`gen_ai.cost.total_tokens`](./gen_ai.md#gen_aicosttotal_tokens) | The total cost for the tokens used. | diff --git a/generated/attributes/gen_ai.md b/generated/attributes/gen_ai.md index 985defd9..ca3b5f2e 100644 --- a/generated/attributes/gen_ai.md +++ b/generated/attributes/gen_ai.md @@ -6,6 +6,7 @@ - [gen_ai.agent.name](#gen_aiagentname) - [gen_ai.assistant.message](#gen_aiassistantmessage) - [gen_ai.choice](#gen_aichoice) + - [gen_ai.conversation.id](#gen_aiconversationid) - [gen_ai.cost.input_tokens](#gen_aicostinput_tokens) - [gen_ai.cost.output_tokens](#gen_aicostoutput_tokens) - [gen_ai.cost.total_tokens](#gen_aicosttotal_tokens) @@ -92,6 +93,17 @@ The model's response message. | Exists in OpenTelemetry | No | | Example | `The weather in Paris is rainy and overcast, with temperatures around 57°F` | +### gen_ai.conversation.id + +The unique identifier for a conversation (session, thread), used to store and correlate messages within this conversation. + +| Property | Value | +| --- | --- | +| Type | `string` | +| Has PII | maybe | +| Exists in OpenTelemetry | Yes | +| Example | `conv_5j66UpCpwteGg4YSxUnt7lPY` | + ### gen_ai.cost.input_tokens The cost of tokens used to process the AI input (prompt) in USD (without cached input tokens). diff --git a/javascript/sentry-conventions/src/attributes.ts b/javascript/sentry-conventions/src/attributes.ts index 4bd9d9f1..a6862287 100644 --- a/javascript/sentry-conventions/src/attributes.ts +++ b/javascript/sentry-conventions/src/attributes.ts @@ -2065,6 +2065,26 @@ export const GEN_AI_CHOICE = 'gen_ai.choice'; */ export type GEN_AI_CHOICE_TYPE = string; +// Path: model/attributes/gen_ai/gen_ai__conversation__id.json + +/** + * The unique identifier for a conversation (session, thread), used to store and correlate messages within this conversation. `gen_ai.conversation.id` + * + * Attribute Value Type: `string` {@link GEN_AI_CONVERSATION_ID_TYPE} + * + * Contains PII: maybe + * + * Attribute defined in OTEL: Yes + * + * @example "conv_5j66UpCpwteGg4YSxUnt7lPY" + */ +export const GEN_AI_CONVERSATION_ID = 'gen_ai.conversation.id'; + +/** + * Type for {@link GEN_AI_CONVERSATION_ID} gen_ai.conversation.id + */ +export type GEN_AI_CONVERSATION_ID_TYPE = string; + // Path: model/attributes/gen_ai/gen_ai__cost__input_tokens.json /** @@ -8946,6 +8966,7 @@ export const ATTRIBUTE_TYPE: Record = { [GEN_AI_AGENT_NAME]: 'string', [GEN_AI_ASSISTANT_MESSAGE]: 'string', [GEN_AI_CHOICE]: 'string', + [GEN_AI_CONVERSATION_ID]: 'string', [GEN_AI_COST_INPUT_TOKENS]: 'double', [GEN_AI_COST_OUTPUT_TOKENS]: 'double', [GEN_AI_COST_TOTAL_TOKENS]: 'double', @@ -9373,6 +9394,7 @@ export type AttributeName = | typeof GEN_AI_AGENT_NAME | typeof GEN_AI_ASSISTANT_MESSAGE | typeof GEN_AI_CHOICE + | typeof GEN_AI_CONVERSATION_ID | typeof GEN_AI_COST_INPUT_TOKENS | typeof GEN_AI_COST_OUTPUT_TOKENS | typeof GEN_AI_COST_TOTAL_TOKENS @@ -10763,6 +10785,16 @@ export const ATTRIBUTE_METADATA: Record = { isInOtel: false, example: 'The weather in Paris is rainy and overcast, with temperatures around 57°F', }, + [GEN_AI_CONVERSATION_ID]: { + brief: + 'The unique identifier for a conversation (session, thread), used to store and correlate messages within this conversation.', + type: 'string', + pii: { + isPii: 'maybe', + }, + isInOtel: true, + example: 'conv_5j66UpCpwteGg4YSxUnt7lPY', + }, [GEN_AI_COST_INPUT_TOKENS]: { brief: 'The cost of tokens used to process the AI input (prompt) in USD (without cached input tokens).', type: 'double', @@ -14145,6 +14177,7 @@ export type Attributes = { [GEN_AI_AGENT_NAME]?: GEN_AI_AGENT_NAME_TYPE; [GEN_AI_ASSISTANT_MESSAGE]?: GEN_AI_ASSISTANT_MESSAGE_TYPE; [GEN_AI_CHOICE]?: GEN_AI_CHOICE_TYPE; + [GEN_AI_CONVERSATION_ID]?: GEN_AI_CONVERSATION_ID_TYPE; [GEN_AI_COST_INPUT_TOKENS]?: GEN_AI_COST_INPUT_TOKENS_TYPE; [GEN_AI_COST_OUTPUT_TOKENS]?: GEN_AI_COST_OUTPUT_TOKENS_TYPE; [GEN_AI_COST_TOTAL_TOKENS]?: GEN_AI_COST_TOTAL_TOKENS_TYPE; diff --git a/model/attributes/gen_ai/gen_ai__conversation__id.json b/model/attributes/gen_ai/gen_ai__conversation__id.json new file mode 100644 index 00000000..e115cc89 --- /dev/null +++ b/model/attributes/gen_ai/gen_ai__conversation__id.json @@ -0,0 +1,10 @@ +{ + "key": "gen_ai.conversation.id", + "brief": "The unique identifier for a conversation (session, thread), used to store and correlate messages within this conversation.", + "type": "string", + "pii": { + "key": "maybe" + }, + "is_in_otel": true, + "example": "conv_5j66UpCpwteGg4YSxUnt7lPY" +} diff --git a/python/src/sentry_conventions/attributes.py b/python/src/sentry_conventions/attributes.py index cb135868..c125a290 100644 --- a/python/src/sentry_conventions/attributes.py +++ b/python/src/sentry_conventions/attributes.py @@ -1242,6 +1242,16 @@ class ATTRIBUTE_NAMES(metaclass=_AttributeNamesMeta): Example: "The weather in Paris is rainy and overcast, with temperatures around 57°F" """ + # Path: model/attributes/gen_ai/gen_ai__conversation__id.json + GEN_AI_CONVERSATION_ID: Literal["gen_ai.conversation.id"] = "gen_ai.conversation.id" + """The unique identifier for a conversation (session, thread), used to store and correlate messages within this conversation. + + Type: str + Contains PII: maybe + Defined in OTEL: Yes + Example: "conv_5j66UpCpwteGg4YSxUnt7lPY" + """ + # Path: model/attributes/gen_ai/gen_ai__cost__input_tokens.json GEN_AI_COST_INPUT_TOKENS: Literal["gen_ai.cost.input_tokens"] = ( "gen_ai.cost.input_tokens" @@ -5632,6 +5642,13 @@ class ATTRIBUTE_NAMES(metaclass=_AttributeNamesMeta): is_in_otel=False, example="The weather in Paris is rainy and overcast, with temperatures around 57°F", ), + "gen_ai.conversation.id": AttributeMetadata( + brief="The unique identifier for a conversation (session, thread), used to store and correlate messages within this conversation.", + type=AttributeType.STRING, + pii=PiiInfo(isPii=IsPii.MAYBE), + is_in_otel=True, + example="conv_5j66UpCpwteGg4YSxUnt7lPY", + ), "gen_ai.cost.input_tokens": AttributeMetadata( brief="The cost of tokens used to process the AI input (prompt) in USD (without cached input tokens).", type=AttributeType.DOUBLE, @@ -8249,6 +8266,7 @@ class ATTRIBUTE_NAMES(metaclass=_AttributeNamesMeta): "gen_ai.agent.name": str, "gen_ai.assistant.message": str, "gen_ai.choice": str, + "gen_ai.conversation.id": str, "gen_ai.cost.input_tokens": float, "gen_ai.cost.output_tokens": float, "gen_ai.cost.total_tokens": float,