X7ROOT File Manager
Current Path:
/opt/hc_python/lib/python3.12/site-packages/sentry_sdk/integrations
opt
/
hc_python
/
lib
/
python3.12
/
site-packages
/
sentry_sdk
/
integrations
/
📁
..
📄
__init__.py
(9.98 KB)
📁
__pycache__
📄
_asgi_common.py
(3.11 KB)
📄
_wsgi_common.py
(7.38 KB)
📄
aiohttp.py
(12.59 KB)
📄
anthropic.py
(9.21 KB)
📄
argv.py
(911 B)
📄
ariadne.py
(5.7 KB)
📄
arq.py
(7.67 KB)
📄
asgi.py
(12.48 KB)
📄
asyncio.py
(3.94 KB)
📄
asyncpg.py
(6.37 KB)
📄
atexit.py
(1.61 KB)
📄
aws_lambda.py
(17.53 KB)
📄
beam.py
(5.06 KB)
📄
boto3.py
(4.31 KB)
📄
bottle.py
(6.46 KB)
📁
celery
📄
chalice.py
(4.59 KB)
📄
clickhouse_driver.py
(5.12 KB)
📄
cloud_resource_context.py
(7.6 KB)
📄
cohere.py
(9.11 KB)
📄
dedupe.py
(1.38 KB)
📁
django
📄
dramatiq.py
(5.45 KB)
📄
excepthook.py
(2.35 KB)
📄
executing.py
(1.95 KB)
📄
falcon.py
(9.28 KB)
📄
fastapi.py
(4.61 KB)
📄
flask.py
(8.54 KB)
📄
gcp.py
(8.08 KB)
📄
gnu_backtrace.py
(2.83 KB)
📄
gql.py
(4.08 KB)
📄
graphene.py
(4.92 KB)
📁
grpc
📄
httpx.py
(5.73 KB)
📄
huey.py
(5.32 KB)
📄
huggingface_hub.py
(6.4 KB)
📄
langchain.py
(17.3 KB)
📄
launchdarkly.py
(1.89 KB)
📄
litestar.py
(11.3 KB)
📄
logging.py
(13.19 KB)
📄
loguru.py
(3.54 KB)
📄
modules.py
(820 B)
📄
openai.py
(15.22 KB)
📄
openfeature.py
(1.21 KB)
📁
opentelemetry
📄
pure_eval.py
(4.47 KB)
📄
pymongo.py
(6.23 KB)
📄
pyramid.py
(7.19 KB)
📄
quart.py
(7.26 KB)
📄
ray.py
(4.06 KB)
📁
redis
📄
rq.py
(5.18 KB)
📄
rust_tracing.py
(8.87 KB)
📄
sanic.py
(12.66 KB)
📄
serverless.py
(1.76 KB)
📄
socket.py
(3.09 KB)
📁
spark
📄
sqlalchemy.py
(4.27 KB)
📄
starlette.py
(25.79 KB)
📄
starlite.py
(10.37 KB)
📄
statsig.py
(1.2 KB)
📄
stdlib.py
(8.62 KB)
📄
strawberry.py
(13.79 KB)
📄
sys_exit.py
(2.43 KB)
📄
threading.py
(5.27 KB)
📄
tornado.py
(7.05 KB)
📄
trytond.py
(1.61 KB)
📄
typer.py
(1.77 KB)
📄
unleash.py
(1.03 KB)
📄
wsgi.py
(10.5 KB)
Editing: huggingface_hub.py
from functools import wraps from sentry_sdk import consts from sentry_sdk.ai.monitoring import record_token_usage from sentry_sdk.ai.utils import set_data_normalized from sentry_sdk.consts import SPANDATA from typing import Any, Iterable, Callable import sentry_sdk from sentry_sdk.scope import should_send_default_pii from sentry_sdk.integrations import DidNotEnable, Integration from sentry_sdk.utils import ( capture_internal_exceptions, event_from_exception, ) try: import huggingface_hub.inference._client from huggingface_hub import ChatCompletionStreamOutput, TextGenerationOutput except ImportError: raise DidNotEnable("Huggingface not installed") class HuggingfaceHubIntegration(Integration): identifier = "huggingface_hub" origin = f"auto.ai.{identifier}" def __init__(self, include_prompts=True): # type: (HuggingfaceHubIntegration, bool) -> None self.include_prompts = include_prompts @staticmethod def setup_once(): # type: () -> None huggingface_hub.inference._client.InferenceClient.text_generation = ( _wrap_text_generation( huggingface_hub.inference._client.InferenceClient.text_generation ) ) def _capture_exception(exc): # type: (Any) -> None event, hint = event_from_exception( exc, client_options=sentry_sdk.get_client().options, mechanism={"type": "huggingface_hub", "handled": False}, ) sentry_sdk.capture_event(event, hint=hint) def _wrap_text_generation(f): # type: (Callable[..., Any]) -> Callable[..., Any] @wraps(f) def new_text_generation(*args, **kwargs): # type: (*Any, **Any) -> Any integration = sentry_sdk.get_client().get_integration(HuggingfaceHubIntegration) if integration is None: return f(*args, **kwargs) if "prompt" in kwargs: prompt = kwargs["prompt"] elif len(args) >= 2: kwargs["prompt"] = args[1] prompt = kwargs["prompt"] args = (args[0],) + args[2:] else: # invalid call, let it return error return f(*args, **kwargs) model = kwargs.get("model") streaming = kwargs.get("stream") span = sentry_sdk.start_span( op=consts.OP.HUGGINGFACE_HUB_CHAT_COMPLETIONS_CREATE, name="Text Generation", origin=HuggingfaceHubIntegration.origin, ) span.__enter__() try: res = f(*args, **kwargs) except Exception as e: _capture_exception(e) span.__exit__(None, None, None) raise e from None with capture_internal_exceptions(): if should_send_default_pii() and integration.include_prompts: set_data_normalized(span, SPANDATA.AI_INPUT_MESSAGES, prompt) set_data_normalized(span, SPANDATA.AI_MODEL_ID, model) set_data_normalized(span, SPANDATA.AI_STREAMING, streaming) if isinstance(res, str): if should_send_default_pii() and integration.include_prompts: set_data_normalized( span, SPANDATA.AI_RESPONSES, [res], ) span.__exit__(None, None, None) return res if isinstance(res, TextGenerationOutput): if should_send_default_pii() and integration.include_prompts: set_data_normalized( span, SPANDATA.AI_RESPONSES, [res.generated_text], ) if res.details is not None and res.details.generated_tokens > 0: record_token_usage(span, total_tokens=res.details.generated_tokens) span.__exit__(None, None, None) return res if not isinstance(res, Iterable): # we only know how to deal with strings and iterables, ignore set_data_normalized(span, "unknown_response", True) span.__exit__(None, None, None) return res if kwargs.get("details", False): # res is Iterable[TextGenerationStreamOutput] def new_details_iterator(): # type: () -> Iterable[ChatCompletionStreamOutput] with capture_internal_exceptions(): tokens_used = 0 data_buf: list[str] = [] for x in res: if hasattr(x, "token") and hasattr(x.token, "text"): data_buf.append(x.token.text) if hasattr(x, "details") and hasattr( x.details, "generated_tokens" ): tokens_used = x.details.generated_tokens yield x if ( len(data_buf) > 0 and should_send_default_pii() and integration.include_prompts ): set_data_normalized( span, SPANDATA.AI_RESPONSES, "".join(data_buf) ) if tokens_used > 0: record_token_usage(span, total_tokens=tokens_used) span.__exit__(None, None, None) return new_details_iterator() else: # res is Iterable[str] def new_iterator(): # type: () -> Iterable[str] data_buf: list[str] = [] with capture_internal_exceptions(): for s in res: if isinstance(s, str): data_buf.append(s) yield s if ( len(data_buf) > 0 and should_send_default_pii() and integration.include_prompts ): set_data_normalized( span, SPANDATA.AI_RESPONSES, "".join(data_buf) ) span.__exit__(None, None, None) return new_iterator() return new_text_generation
Upload File
Create Folder