Skip to content

Commit

Permalink
(fix) initializing OTEL Logging on LiteLLM Proxy - ensure OTEL logger…
Browse files Browse the repository at this point in the history
… is initialized only once (#7435)

* add otel to _custom_logger_compatible_callbacks_literal

* remove extra code

* fix _get_custom_logger_settings_from_proxy_server

* update unit tests
  • Loading branch information
ishaan-jaff authored Dec 27, 2024
1 parent 539f166 commit 17d5ff2
Show file tree
Hide file tree
Showing 6 changed files with 45 additions and 45 deletions.
1 change: 1 addition & 0 deletions litellm/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -59,6 +59,7 @@
"dynamic_rate_limiter",
"langsmith",
"prometheus",
"otel",
"datadog",
"datadog_llm_observability",
"galileo",
Expand Down
15 changes: 15 additions & 0 deletions litellm/integrations/opentelemetry.py
Original file line number Diff line number Diff line change
Expand Up @@ -114,6 +114,21 @@ def __init__(

# init CustomLogger params
super().__init__(**kwargs)
self._init_otel_logger_on_litellm_proxy()

def _init_otel_logger_on_litellm_proxy(self):
"""
Initializes OpenTelemetry for litellm proxy server
- Adds Otel as a service callback
- Sets `proxy_server.open_telemetry_logger` to self
"""
from litellm.proxy import proxy_server

# Add Otel as a service callback
if "otel" not in litellm.service_callback:
litellm.service_callback.append("otel")
setattr(proxy_server, "open_telemetry_logger", self)

def log_success_event(self, kwargs, response_obj, start_time, end_time):
self._handle_sucess(kwargs, response_obj, start_time, end_time)
Expand Down
24 changes: 22 additions & 2 deletions litellm/litellm_core_utils/litellm_logging.py
Original file line number Diff line number Diff line change
Expand Up @@ -2327,8 +2327,11 @@ def _init_custom_logger_compatible_class( # noqa: PLR0915
for callback in _in_memory_loggers:
if isinstance(callback, OpenTelemetry):
return callback # type: ignore

otel_logger = OpenTelemetry()
otel_logger = OpenTelemetry(
**_get_custom_logger_settings_from_proxy_server(
callback_name=logging_integration
)
)
_in_memory_loggers.append(otel_logger)
return otel_logger # type: ignore

Expand Down Expand Up @@ -2544,6 +2547,23 @@ def get_custom_logger_compatible_class( # noqa: PLR0915
return None


def _get_custom_logger_settings_from_proxy_server(callback_name: str) -> Dict:
"""
Get the settings for a custom logger from the proxy server config.yaml
Proxy server config.yaml defines callback_settings as:
callback_settings:
otel:
message_logging: False
"""
from litellm.proxy.proxy_server import callback_settings

if callback_settings:
return dict(callback_settings.get(callback_name, {}))
return {}


def use_custom_pricing_for_model(litellm_params: Optional[dict]) -> bool:
if litellm_params is None:
return False
Expand Down
18 changes: 1 addition & 17 deletions litellm/proxy/common_utils/callback_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@ def initialize_callbacks_on_proxy( # noqa: PLR0915
litellm_settings: dict,
callback_specific_params: dict = {},
):
from litellm.proxy.proxy_server import callback_settings, prisma_client
from litellm.proxy.proxy_server import prisma_client

verbose_proxy_logger.debug(
f"{blue_color_code}initializing callbacks={value} on proxy{reset_color_code}"
Expand All @@ -30,22 +30,6 @@ def initialize_callbacks_on_proxy( # noqa: PLR0915
and callback in litellm._known_custom_logger_compatible_callbacks
):
imported_list.append(callback)
elif isinstance(callback, str) and callback == "otel":
from litellm.integrations.opentelemetry import OpenTelemetry
from litellm.proxy import proxy_server

_otel_settings = {}
if isinstance(callback_settings, dict) and "otel" in callback_settings:
_otel_settings = callback_settings["otel"]

open_telemetry_logger = OpenTelemetry(**_otel_settings)

# Add Otel as a service callback
if "otel" not in litellm.service_callback:
litellm.service_callback.append("otel")

imported_list.append(open_telemetry_logger)
setattr(proxy_server, "open_telemetry_logger", open_telemetry_logger)
elif isinstance(callback, str) and callback == "presidio":
from litellm.proxy.guardrails.guardrail_hooks.presidio import (
_OPTIONAL_PresidioPIIMasking,
Expand Down
31 changes: 5 additions & 26 deletions litellm/proxy/proxy_config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -11,30 +11,9 @@ model_list:
litellm_params:
model: bedrock/*

litellm_settings:
callbacks: ["otel"]

guardrails:
- guardrail_name: "bedrock-pre-guard"
litellm_params:
guardrail: bedrock # supported values: "aporia", "bedrock", "lakera"
mode: "during_call"
guardrailIdentifier: ff6ujrregl1q
guardrailVersion: "DRAFT"

# for /files endpoints
# For /fine_tuning/jobs endpoints
finetune_settings:
- custom_llm_provider: azure
api_base: os.environ/AZURE_BATCHES_API_BASE
api_key: os.environ/AZURE_BATCHES_API_KEY
api_version: "2024-05-01-preview"
- custom_llm_provider: openai
api_key: os.environ/OPENAI_API_KEY

# for /files endpoints
files_settings:
- custom_llm_provider: azure
api_base: os.environ/AZURE_BATCHES_API_BASE
api_key: os.environ/AZURE_BATCHES_API_KEY
api_version: "2024-05-01-preview"
- custom_llm_provider: openai
api_key: os.environ/OPENAI_API_KEY
# callback_settings:
# otel:
# message_logging: False
Original file line number Diff line number Diff line change
Expand Up @@ -65,6 +65,7 @@
"langtrace": OpenTelemetry,
"mlflow": MlflowLogger,
"langfuse": LangfusePromptManagement,
"otel": OpenTelemetry,
}

expected_env_vars = {
Expand Down

0 comments on commit 17d5ff2

Please sign in to comment.