5
5
import litellm
6
6
from litellm ._logging import verbose_proxy_logger
7
7
from litellm .caching .caching import RedisCache
8
+ from litellm .litellm_core_utils .safe_json_dumps import safe_dumps
8
9
from litellm .litellm_core_utils .sensitive_data_masker import SensitiveDataMasker
10
+ from litellm .proxy ._types import ProxyErrorTypes , ProxyException
9
11
from litellm .proxy .auth .user_api_key_auth import user_api_key_auth
12
+ from litellm .types .caching import CachePingResponse
10
13
11
14
masker = SensitiveDataMasker ()
12
15
18
21
19
22
@router .get (
20
23
"/ping" ,
24
+ response_model = CachePingResponse ,
21
25
dependencies = [Depends (user_api_key_auth )],
22
26
)
23
27
async def cache_ping ():
@@ -27,27 +31,17 @@ async def cache_ping():
27
31
litellm_cache_params : Dict [str , Any ] = {}
28
32
specific_cache_params : Dict [str , Any ] = {}
29
33
try :
30
-
31
34
if litellm .cache is None :
32
35
raise HTTPException (
33
36
status_code = 503 , detail = "Cache not initialized. litellm.cache is None"
34
37
)
35
- litellm_cache_params = {}
36
- specific_cache_params = {}
37
- for k , v in vars (litellm .cache ).items ():
38
- try :
39
- if k == "cache" :
40
- continue
41
- litellm_cache_params [k ] = v
42
- except Exception :
43
- litellm_cache_params [k ] = "<unable to copy or convert>"
44
- for k , v in vars (litellm .cache .cache ).items ():
45
- try :
46
- specific_cache_params [k ] = v
47
- except Exception :
48
- specific_cache_params [k ] = "<unable to copy or convert>"
49
- litellm_cache_params = masker .mask_dict (litellm_cache_params )
50
- specific_cache_params = masker .mask_dict (specific_cache_params )
38
+ litellm_cache_params = masker .mask_dict (vars (litellm .cache ))
39
+ # remove field that might reference itself
40
+ litellm_cache_params .pop ("cache" , None )
41
+ specific_cache_params = (
42
+ masker .mask_dict (vars (litellm .cache .cache )) if litellm .cache else {}
43
+ )
44
+
51
45
if litellm .cache .type == "redis" :
52
46
# ping the redis cache
53
47
ping_response = await litellm .cache .ping ()
@@ -63,24 +57,35 @@ async def cache_ping():
63
57
)
64
58
verbose_proxy_logger .debug ("/cache/ping: done with set_cache()" )
65
59
66
- return {
67
- " status" : "healthy" ,
68
- " cache_type" : litellm .cache .type ,
69
- " ping_response" : True ,
70
- " set_cache_response" : "success" ,
71
- " litellm_cache_params" : litellm_cache_params ,
72
- " redis_cache_params" : specific_cache_params ,
73
- }
60
+ return CachePingResponse (
61
+ status = "healthy" ,
62
+ cache_type = str ( litellm .cache .type ) ,
63
+ ping_response = True ,
64
+ set_cache_response = "success" ,
65
+ litellm_cache_params = safe_dumps ( litellm_cache_params ) ,
66
+ redis_cache_params = safe_dumps ( specific_cache_params ) ,
67
+ )
74
68
else :
75
- return {
76
- " status" : "healthy" ,
77
- " cache_type" : litellm .cache .type ,
78
- " litellm_cache_params" : litellm_cache_params ,
79
- }
69
+ return CachePingResponse (
70
+ status = "healthy" ,
71
+ cache_type = str ( litellm .cache .type ) ,
72
+ litellm_cache_params = safe_dumps ( litellm_cache_params ) ,
73
+ )
80
74
except Exception as e :
81
- raise HTTPException (
82
- status_code = 503 ,
83
- detail = f"Service Unhealthy ({ str (e )} ).Cache parameters: { litellm_cache_params } .specific_cache_params: { specific_cache_params } " ,
75
+ import traceback
76
+
77
+ traceback .print_exc ()
78
+ error_message = {
79
+ "message" : f"Service Unhealthy ({ str (e )} )" ,
80
+ "litellm_cache_params" : safe_dumps (litellm_cache_params ),
81
+ "redis_cache_params" : safe_dumps (specific_cache_params ),
82
+ "traceback" : traceback .format_exc (),
83
+ }
84
+ raise ProxyException (
85
+ message = safe_dumps (error_message ),
86
+ type = ProxyErrorTypes .cache_ping_error ,
87
+ param = "cache_ping" ,
88
+ code = 503 ,
84
89
)
85
90
86
91
0 commit comments