opt
/
hc_python
/
lib
/
python3.12
/
site-packages
/
sentry_sdk
/
integrations
/
Go to Home Directory
+
Upload
Create File
root@0UT1S:~$
Execute
By Order of Mr.0UT1S
[DIR] ..
N/A
[DIR] __pycache__
N/A
[DIR] celery
N/A
[DIR] django
N/A
[DIR] grpc
N/A
[DIR] opentelemetry
N/A
[DIR] redis
N/A
[DIR] spark
N/A
__init__.py
9.95 KB
Rename
Delete
_asgi_common.py
3.11 KB
Rename
Delete
_wsgi_common.py
7.38 KB
Rename
Delete
aiohttp.py
12.59 KB
Rename
Delete
anthropic.py
9.21 KB
Rename
Delete
argv.py
911 bytes
Rename
Delete
ariadne.py
5.70 KB
Rename
Delete
arq.py
7.67 KB
Rename
Delete
asgi.py
12.46 KB
Rename
Delete
asyncio.py
4.65 KB
Rename
Delete
asyncpg.py
6.37 KB
Rename
Delete
atexit.py
1.61 KB
Rename
Delete
aws_lambda.py
17.53 KB
Rename
Delete
beam.py
5.06 KB
Rename
Delete
boto3.py
4.31 KB
Rename
Delete
bottle.py
6.46 KB
Rename
Delete
chalice.py
4.59 KB
Rename
Delete
clickhouse_driver.py
5.12 KB
Rename
Delete
cloud_resource_context.py
7.60 KB
Rename
Delete
cohere.py
9.05 KB
Rename
Delete
dedupe.py
1.38 KB
Rename
Delete
dramatiq.py
5.45 KB
Rename
Delete
excepthook.py
2.35 KB
Rename
Delete
executing.py
1.95 KB
Rename
Delete
falcon.py
9.28 KB
Rename
Delete
fastapi.py
4.61 KB
Rename
Delete
flask.py
8.54 KB
Rename
Delete
gcp.py
8.08 KB
Rename
Delete
gnu_backtrace.py
2.83 KB
Rename
Delete
gql.py
4.08 KB
Rename
Delete
graphene.py
4.92 KB
Rename
Delete
httpx.py
5.73 KB
Rename
Delete
huey.py
5.32 KB
Rename
Delete
huggingface_hub.py
6.38 KB
Rename
Delete
langchain.py
17.30 KB
Rename
Delete
launchdarkly.py
1.90 KB
Rename
Delete
litestar.py
11.30 KB
Rename
Delete
logging.py
13.09 KB
Rename
Delete
loguru.py
3.77 KB
Rename
Delete
modules.py
820 bytes
Rename
Delete
openai.py
15.19 KB
Rename
Delete
openfeature.py
1.27 KB
Rename
Delete
pure_eval.py
4.47 KB
Rename
Delete
pymongo.py
6.23 KB
Rename
Delete
pyramid.py
7.19 KB
Rename
Delete
quart.py
7.26 KB
Rename
Delete
ray.py
4.06 KB
Rename
Delete
rq.py
5.18 KB
Rename
Delete
rust_tracing.py
8.87 KB
Rename
Delete
sanic.py
12.66 KB
Rename
Delete
serverless.py
1.76 KB
Rename
Delete
socket.py
3.09 KB
Rename
Delete
sqlalchemy.py
4.27 KB
Rename
Delete
starlette.py
25.69 KB
Rename
Delete
starlite.py
10.37 KB
Rename
Delete
statsig.py
1.20 KB
Rename
Delete
stdlib.py
8.62 KB
Rename
Delete
strawberry.py
13.79 KB
Rename
Delete
sys_exit.py
2.43 KB
Rename
Delete
threading.py
3.92 KB
Rename
Delete
tornado.py
7.05 KB
Rename
Delete
trytond.py
1.61 KB
Rename
Delete
typer.py
1.77 KB
Rename
Delete
unleash.py
1.05 KB
Rename
Delete
wsgi.py
10.50 KB
Rename
Delete
from functools import wraps from sentry_sdk import consts from sentry_sdk.ai.monitoring import record_token_usage from sentry_sdk.consts import SPANDATA from sentry_sdk.ai.utils import set_data_normalized from typing import TYPE_CHECKING if TYPE_CHECKING: from typing import Any, Callable, Iterator from sentry_sdk.tracing import Span import sentry_sdk from sentry_sdk.scope import should_send_default_pii from sentry_sdk.integrations import DidNotEnable, Integration from sentry_sdk.utils import capture_internal_exceptions, event_from_exception try: from cohere.client import Client from cohere.base_client import BaseCohere from cohere import ( ChatStreamEndEvent, NonStreamedChatResponse, ) if TYPE_CHECKING: from cohere import StreamedChatResponse except ImportError: raise DidNotEnable("Cohere not installed") try: # cohere 5.9.3+ from cohere import StreamEndStreamedChatResponse except ImportError: from cohere import StreamedChatResponse_StreamEnd as StreamEndStreamedChatResponse COLLECTED_CHAT_PARAMS = { "model": SPANDATA.AI_MODEL_ID, "k": SPANDATA.AI_TOP_K, "p": SPANDATA.AI_TOP_P, "seed": SPANDATA.AI_SEED, "frequency_penalty": SPANDATA.AI_FREQUENCY_PENALTY, "presence_penalty": SPANDATA.AI_PRESENCE_PENALTY, "raw_prompting": SPANDATA.AI_RAW_PROMPTING, } COLLECTED_PII_CHAT_PARAMS = { "tools": SPANDATA.AI_TOOLS, "preamble": SPANDATA.AI_PREAMBLE, } COLLECTED_CHAT_RESP_ATTRS = { "generation_id": "ai.generation_id", "is_search_required": "ai.is_search_required", "finish_reason": "ai.finish_reason", } COLLECTED_PII_CHAT_RESP_ATTRS = { "citations": "ai.citations", "documents": "ai.documents", "search_queries": "ai.search_queries", "search_results": "ai.search_results", "tool_calls": "ai.tool_calls", } class CohereIntegration(Integration): identifier = "cohere" origin = f"auto.ai.{identifier}" def __init__(self, include_prompts=True): # type: (CohereIntegration, bool) -> None self.include_prompts = include_prompts @staticmethod def setup_once(): # type: () -> None BaseCohere.chat = _wrap_chat(BaseCohere.chat, streaming=False) Client.embed = _wrap_embed(Client.embed) BaseCohere.chat_stream = _wrap_chat(BaseCohere.chat_stream, streaming=True) def _capture_exception(exc): # type: (Any) -> None event, hint = event_from_exception( exc, client_options=sentry_sdk.get_client().options, mechanism={"type": "cohere", "handled": False}, ) sentry_sdk.capture_event(event, hint=hint) def _wrap_chat(f, streaming): # type: (Callable[..., Any], bool) -> Callable[..., Any] def collect_chat_response_fields(span, res, include_pii): # type: (Span, NonStreamedChatResponse, bool) -> None if include_pii: if hasattr(res, "text"): set_data_normalized( span, SPANDATA.AI_RESPONSES, [res.text], ) for pii_attr in COLLECTED_PII_CHAT_RESP_ATTRS: if hasattr(res, pii_attr): set_data_normalized(span, "ai." + pii_attr, getattr(res, pii_attr)) for attr in COLLECTED_CHAT_RESP_ATTRS: if hasattr(res, attr): set_data_normalized(span, "ai." + attr, getattr(res, attr)) if hasattr(res, "meta"): if hasattr(res.meta, "billed_units"): record_token_usage( span, prompt_tokens=res.meta.billed_units.input_tokens, completion_tokens=res.meta.billed_units.output_tokens, ) elif hasattr(res.meta, "tokens"): record_token_usage( span, prompt_tokens=res.meta.tokens.input_tokens, completion_tokens=res.meta.tokens.output_tokens, ) if hasattr(res.meta, "warnings"): set_data_normalized(span, "ai.warnings", res.meta.warnings) @wraps(f) def new_chat(*args, **kwargs): # type: (*Any, **Any) -> Any integration = sentry_sdk.get_client().get_integration(CohereIntegration) if ( integration is None or "message" not in kwargs or not isinstance(kwargs.get("message"), str) ): return f(*args, **kwargs) message = kwargs.get("message") span = sentry_sdk.start_span( op=consts.OP.COHERE_CHAT_COMPLETIONS_CREATE, name="cohere.client.Chat", origin=CohereIntegration.origin, ) span.__enter__() try: res = f(*args, **kwargs) except Exception as e: _capture_exception(e) span.__exit__(None, None, None) raise e from None with capture_internal_exceptions(): if should_send_default_pii() and integration.include_prompts: set_data_normalized( span, SPANDATA.AI_INPUT_MESSAGES, list( map( lambda x: { "role": getattr(x, "role", "").lower(), "content": getattr(x, "message", ""), }, kwargs.get("chat_history", []), ) ) + [{"role": "user", "content": message}], ) for k, v in COLLECTED_PII_CHAT_PARAMS.items(): if k in kwargs: set_data_normalized(span, v, kwargs[k]) for k, v in COLLECTED_CHAT_PARAMS.items(): if k in kwargs: set_data_normalized(span, v, kwargs[k]) set_data_normalized(span, SPANDATA.AI_STREAMING, False) if streaming: old_iterator = res def new_iterator(): # type: () -> Iterator[StreamedChatResponse] with capture_internal_exceptions(): for x in old_iterator: if isinstance(x, ChatStreamEndEvent) or isinstance( x, StreamEndStreamedChatResponse ): collect_chat_response_fields( span, x.response, include_pii=should_send_default_pii() and integration.include_prompts, ) yield x span.__exit__(None, None, None) return new_iterator() elif isinstance(res, NonStreamedChatResponse): collect_chat_response_fields( span, res, include_pii=should_send_default_pii() and integration.include_prompts, ) span.__exit__(None, None, None) else: set_data_normalized(span, "unknown_response", True) span.__exit__(None, None, None) return res return new_chat def _wrap_embed(f): # type: (Callable[..., Any]) -> Callable[..., Any] @wraps(f) def new_embed(*args, **kwargs): # type: (*Any, **Any) -> Any integration = sentry_sdk.get_client().get_integration(CohereIntegration) if integration is None: return f(*args, **kwargs) with sentry_sdk.start_span( op=consts.OP.COHERE_EMBEDDINGS_CREATE, name="Cohere Embedding Creation", origin=CohereIntegration.origin, ) as span: if "texts" in kwargs and ( should_send_default_pii() and integration.include_prompts ): if isinstance(kwargs["texts"], str): set_data_normalized(span, "ai.texts", [kwargs["texts"]]) elif ( isinstance(kwargs["texts"], list) and len(kwargs["texts"]) > 0 and isinstance(kwargs["texts"][0], str) ): set_data_normalized( span, SPANDATA.AI_INPUT_MESSAGES, kwargs["texts"] ) if "model" in kwargs: set_data_normalized(span, SPANDATA.AI_MODEL_ID, kwargs["model"]) try: res = f(*args, **kwargs) except Exception as e: _capture_exception(e) raise e from None if ( hasattr(res, "meta") and hasattr(res.meta, "billed_units") and hasattr(res.meta.billed_units, "input_tokens") ): record_token_usage( span, prompt_tokens=res.meta.billed_units.input_tokens, total_tokens=res.meta.billed_units.input_tokens, ) return res return new_embed
Save