-
Notifications
You must be signed in to change notification settings - Fork 835
feat(anthropic): add Messages.create sync instrumentation #4034
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
base: main
Are you sure you want to change the base?
Changes from all commits
58d2824
371c580
84003d9
1aa239d
a37e12b
9601d0c
fd24edc
5e57910
c21a700
0c30279
94035fb
99f274e
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
| Original file line number | Diff line number | Diff line change | ||||||
|---|---|---|---|---|---|---|---|---|
| @@ -0,0 +1,108 @@ | ||||||||
| # Copyright The OpenTelemetry Authors | ||||||||
| # | ||||||||
| # Licensed under the Apache License, Version 2.0 (the "License"); | ||||||||
| # you may not use this file except in compliance with the License. | ||||||||
| # You may obtain a copy of the License at | ||||||||
| # | ||||||||
| # http://www.apache.org/licenses/LICENSE-2.0 | ||||||||
| # | ||||||||
| # Unless required by applicable law or agreed to in writing, software | ||||||||
| # distributed under the License is distributed on an "AS IS" BASIS, | ||||||||
| # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | ||||||||
| # See the License for the specific language governing permissions and | ||||||||
| # limitations under the License. | ||||||||
|
|
||||||||
| """Utility functions for Anthropic instrumentation.""" | ||||||||
|
|
||||||||
| from __future__ import annotations | ||||||||
|
|
||||||||
| from os import environ | ||||||||
| from typing import Any, Optional | ||||||||
| from urllib.parse import urlparse | ||||||||
|
|
||||||||
| from opentelemetry.semconv._incubating.attributes import ( | ||||||||
| gen_ai_attributes as GenAIAttributes, | ||||||||
| ) | ||||||||
| from opentelemetry.semconv._incubating.attributes import ( | ||||||||
| server_attributes as ServerAttributes, | ||||||||
| ) | ||||||||
| from opentelemetry.semconv.attributes import ( | ||||||||
| error_attributes as ErrorAttributes, | ||||||||
| ) | ||||||||
| from opentelemetry.trace import Span | ||||||||
| from opentelemetry.trace.status import Status, StatusCode | ||||||||
|
|
||||||||
| OTEL_INSTRUMENTATION_GENAI_CAPTURE_MESSAGE_CONTENT = ( | ||||||||
| "OTEL_INSTRUMENTATION_GENAI_CAPTURE_MESSAGE_CONTENT" | ||||||||
| ) | ||||||||
|
|
||||||||
|
|
||||||||
| def is_content_enabled() -> bool: | ||||||||
| """Check if content capture is enabled via environment variable.""" | ||||||||
| capture_content = environ.get( | ||||||||
| OTEL_INSTRUMENTATION_GENAI_CAPTURE_MESSAGE_CONTENT, "false" | ||||||||
| ) | ||||||||
| return capture_content.lower() == "true" | ||||||||
|
|
||||||||
|
|
||||||||
| def set_server_address_and_port( | ||||||||
| client_instance: Any, attributes: dict[str, Any] | ||||||||
| ) -> None: | ||||||||
| """Extract server address and port from the Anthropic client instance.""" | ||||||||
| base_client = getattr(client_instance, "_client", None) | ||||||||
| base_url = getattr(base_client, "base_url", None) | ||||||||
| if not base_url: | ||||||||
| return | ||||||||
|
|
||||||||
| port: Optional[int] = None | ||||||||
| if hasattr(base_url, "host"): | ||||||||
| # httpx.URL object | ||||||||
| attributes[ServerAttributes.SERVER_ADDRESS] = base_url.host | ||||||||
| port = getattr(base_url, "port", None) | ||||||||
| elif isinstance(base_url, str): | ||||||||
| url = urlparse(base_url) | ||||||||
| attributes[ServerAttributes.SERVER_ADDRESS] = url.hostname | ||||||||
| port = url.port | ||||||||
|
|
||||||||
| if port and port != 443 and port > 0: | ||||||||
| attributes[ServerAttributes.SERVER_PORT] = port | ||||||||
|
|
||||||||
|
|
||||||||
| def set_span_attribute(span: Span, name: str, value: Any) -> None: | ||||||||
| """Set a span attribute if the value is not None.""" | ||||||||
| if value is None: | ||||||||
| return | ||||||||
| span.set_attribute(name, value) | ||||||||
|
|
||||||||
|
|
||||||||
| def get_llm_request_attributes( | ||||||||
| kwargs: dict[str, Any], client_instance: Any | ||||||||
| ) -> dict[str, Any]: | ||||||||
|
Member
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Can you annotate the return type like this? Lines 105 to 107 in 4531513
|
||||||||
| """Extract LLM request attributes from kwargs.""" | ||||||||
| attributes = { | ||||||||
| GenAIAttributes.GEN_AI_OPERATION_NAME: GenAIAttributes.GenAiOperationNameValues.CHAT.value, | ||||||||
| GenAIAttributes.GEN_AI_SYSTEM: GenAIAttributes.GenAiSystemValues.ANTHROPIC.value, # pyright: ignore[reportDeprecated] | ||||||||
| GenAIAttributes.GEN_AI_REQUEST_MODEL: kwargs.get("model"), | ||||||||
| GenAIAttributes.GEN_AI_REQUEST_MAX_TOKENS: kwargs.get("max_tokens"), | ||||||||
| GenAIAttributes.GEN_AI_REQUEST_TEMPERATURE: kwargs.get("temperature"), | ||||||||
| GenAIAttributes.GEN_AI_REQUEST_TOP_P: kwargs.get("top_p"), | ||||||||
| GenAIAttributes.GEN_AI_REQUEST_TOP_K: kwargs.get("top_k"), | ||||||||
| GenAIAttributes.GEN_AI_REQUEST_STOP_SEQUENCES: kwargs.get( | ||||||||
| "stop_sequences" | ||||||||
| ), | ||||||||
|
Comment on lines
+85
to
+92
Member
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. For extracting parameters from the untyped kwargs dict, can you define a function with copied method signature from the Anthropic code being instrumented (and add a link to the code in a comment) and call it with Example from Vertex: Lines 100 to 102 in 4531513
|
||||||||
| } | ||||||||
|
|
||||||||
| set_server_address_and_port(client_instance, attributes) | ||||||||
|
|
||||||||
| # Filter out None values | ||||||||
| return {k: v for k, v in attributes.items() if v is not None} | ||||||||
|
|
||||||||
|
|
||||||||
| def handle_span_exception(span: Span, error: Exception) -> None: | ||||||||
| """Handle an exception by setting span status and error attributes.""" | ||||||||
| span.set_status(Status(StatusCode.ERROR, str(error))) | ||||||||
| if span.is_recording(): | ||||||||
| span.set_attribute( | ||||||||
| ErrorAttributes.ERROR_TYPE, type(error).__qualname__ | ||||||||
| ) | ||||||||
| span.end() | ||||||||
| Original file line number | Diff line number | Diff line change |
|---|---|---|
| @@ -0,0 +1,60 @@ | ||
| interactions: | ||
| - request: | ||
| body: |- | ||
| { | ||
| "max_tokens": 100, | ||
| "messages": [ | ||
| { | ||
| "role": "user", | ||
| "content": "Hello" | ||
| } | ||
| ], | ||
| "model": "invalid-model-name" | ||
| } | ||
| headers: | ||
| accept: | ||
| - application/json | ||
| accept-encoding: | ||
| - gzip, deflate | ||
| anthropic-version: | ||
| - '2023-06-01' | ||
| connection: | ||
| - keep-alive | ||
| content-length: | ||
| - '110' | ||
| content-type: | ||
| - application/json | ||
| host: | ||
| - api.anthropic.com | ||
| user-agent: | ||
| - Anthropic/Python | ||
| x-api-key: | ||
| - test_anthropic_api_key | ||
| method: POST | ||
| uri: https://api.anthropic.com/v1/messages | ||
| response: | ||
| body: | ||
| string: |- | ||
| { | ||
| "type": "error", | ||
| "error": { | ||
| "type": "not_found_error", | ||
| "message": "model: invalid-model-name" | ||
| } | ||
| } | ||
| headers: | ||
| Connection: | ||
| - keep-alive | ||
| Content-Type: | ||
| - application/json | ||
| Date: | ||
| - Mon, 15 Dec 2024 10:00:04 GMT | ||
| Server: | ||
| - cloudflare | ||
| content-length: | ||
| - '105' | ||
| status: | ||
| code: 404 | ||
| message: Not Found | ||
| version: 1 | ||
|
|
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
@keith-decker can TelemetryHandler cover all of the common code in this PR? This seems like a great PR to start integrating GenAI util with