Skip to content

Commit 91ed05d

Browse files
krrishdholakiaelabbarwdhh1995superpoussin22ma-armenta
authored
Litellm dev contributor prs 01 31 2025 (BerriAI#8168)
* Add O3-Mini for Azure and Remove Vision Support (BerriAI#8161) * Azure Released O3-mini at the same time as OAI, so i've added support here. Confirmed to work with Sweden Central. * [FIX] replace cgi for python 3.13 with email.Message as suggested in PEP 594 (BerriAI#8160) * Update model_prices_and_context_window.json (BerriAI#8120) codestral2501 pricing on vertex_ai * Fix/db view names (BerriAI#8119) * Fix to case sensitive DB Views name * Fix to case sensitive DB View names * Added quotes to check query as well * Added quotes to create view query * test: handle server error for flaky test vertex ai has unstable endpoints --------- Co-authored-by: Wanis Elabbar <[email protected]> Co-authored-by: Honghua Dong <[email protected]> Co-authored-by: superpoussin22 <[email protected]> Co-authored-by: Miguel Armenta <[email protected]>
1 parent d0c5639 commit 91ed05d

File tree

6 files changed

+83
-11
lines changed

6 files changed

+83
-11
lines changed

db_scripts/create_views.py

+2-2
Original file line numberDiff line numberDiff line change
@@ -168,11 +168,11 @@ async def check_view_exists(): # noqa: PLR0915
168168
print("MonthlyGlobalSpendPerUserPerKey Created!") # noqa
169169

170170
try:
171-
await db.query_raw("""SELECT 1 FROM DailyTagSpend LIMIT 1""")
171+
await db.query_raw("""SELECT 1 FROM "DailyTagSpend" LIMIT 1""")
172172
print("DailyTagSpend Exists!") # noqa
173173
except Exception:
174174
sql_query = """
175-
CREATE OR REPLACE VIEW DailyTagSpend AS
175+
CREATE OR REPLACE VIEW "DailyTagSpend" AS
176176
SELECT
177177
jsonb_array_elements_text(request_tags) AS individual_request_tag,
178178
DATE(s."startTime") AS spend_date,

litellm/litellm_core_utils/prompt_templates/factory.py

+4-3
Original file line numberDiff line numberDiff line change
@@ -2153,7 +2153,7 @@ def stringify_json_tool_call_content(messages: List) -> List:
21532153

21542154
import base64
21552155
import mimetypes
2156-
from cgi import parse_header
2156+
from email.message import Message
21572157

21582158
import httpx
21592159

@@ -2174,8 +2174,9 @@ def stringify_json_tool_call_content(messages: List) -> List:
21742174

21752175

21762176
def _parse_content_type(content_type: str) -> str:
2177-
main_type, _ = parse_header(content_type)
2178-
return main_type
2177+
m = Message()
2178+
m['content-type'] = content_type
2179+
return m.get_content_type()
21792180

21802181

21812182
class BedrockImageProcessor:

litellm/model_prices_and_context_window_backup.json

+36-2
Original file line numberDiff line numberDiff line change
@@ -211,7 +211,7 @@
211211
"cache_read_input_token_cost": 0.00000055,
212212
"litellm_provider": "openai",
213213
"mode": "chat",
214-
"supports_vision": true,
214+
"supports_vision": false,
215215
"supports_prompt_caching": true
216216
},
217217
"o3-mini-2025-01-31": {
@@ -223,7 +223,7 @@
223223
"cache_read_input_token_cost": 0.00000055,
224224
"litellm_provider": "openai",
225225
"mode": "chat",
226-
"supports_vision": true,
226+
"supports_vision": false,
227227
"supports_prompt_caching": true
228228
},
229229
"o1-mini-2024-09-12": {
@@ -929,6 +929,30 @@
929929
"input_cost_per_character": 0.000030,
930930
"litellm_provider": "openai"
931931
},
932+
"azure/o3-mini": {
933+
"max_tokens": 100000,
934+
"max_input_tokens": 200000,
935+
"max_output_tokens": 100000,
936+
"input_cost_per_token": 0.0000011,
937+
"output_cost_per_token": 0.0000044,
938+
"cache_read_input_token_cost": 0.00000055,
939+
"litellm_provider": "openai",
940+
"mode": "chat",
941+
"supports_vision": false,
942+
"supports_prompt_caching": true
943+
},
944+
"azure/o3-mini-2025-01-31": {
945+
"max_tokens": 100000,
946+
"max_input_tokens": 200000,
947+
"max_output_tokens": 100000,
948+
"input_cost_per_token": 0.0000011,
949+
"output_cost_per_token": 0.0000044,
950+
"cache_read_input_token_cost": 0.00000055,
951+
"litellm_provider": "openai",
952+
"mode": "chat",
953+
"supports_vision": false,
954+
"supports_prompt_caching": true
955+
},
932956
"azure/tts-1": {
933957
"mode": "audio_speech",
934958
"input_cost_per_character": 0.000015,
@@ -3876,6 +3900,16 @@
38763900
"mode": "chat",
38773901
"supports_function_calling": true
38783902
},
3903+
"vertex_ai/codestral@2405": {
3904+
"max_tokens": 128000,
3905+
"max_input_tokens": 128000,
3906+
"max_output_tokens": 128000,
3907+
"input_cost_per_token": 0.0000002,
3908+
"output_cost_per_token": 0.0000006,
3909+
"litellm_provider": "vertex_ai-mistral_models",
3910+
"mode": "chat",
3911+
"supports_function_calling": true
3912+
},
38793913
"vertex_ai/imagegeneration@006": {
38803914
"output_cost_per_image": 0.020,
38813915
"litellm_provider": "vertex_ai-image-models",

litellm/proxy/db/create_views.py

+2-2
Original file line numberDiff line numberDiff line change
@@ -158,11 +158,11 @@ async def create_missing_views(db: _db): # noqa: PLR0915
158158
print("MonthlyGlobalSpendPerUserPerKey Created!") # noqa
159159

160160
try:
161-
await db.query_raw("""SELECT 1 FROM DailyTagSpend LIMIT 1""")
161+
await db.query_raw("""SELECT 1 FROM "DailyTagSpend" LIMIT 1""")
162162
print("DailyTagSpend Exists!") # noqa
163163
except Exception:
164164
sql_query = """
165-
CREATE OR REPLACE VIEW DailyTagSpend AS
165+
CREATE OR REPLACE VIEW "DailyTagSpend" AS
166166
SELECT
167167
jsonb_array_elements_text(request_tags) AS individual_request_tag,
168168
DATE(s."startTime") AS spend_date,

model_prices_and_context_window.json

+36-2
Original file line numberDiff line numberDiff line change
@@ -211,7 +211,7 @@
211211
"cache_read_input_token_cost": 0.00000055,
212212
"litellm_provider": "openai",
213213
"mode": "chat",
214-
"supports_vision": true,
214+
"supports_vision": false,
215215
"supports_prompt_caching": true
216216
},
217217
"o3-mini-2025-01-31": {
@@ -223,7 +223,7 @@
223223
"cache_read_input_token_cost": 0.00000055,
224224
"litellm_provider": "openai",
225225
"mode": "chat",
226-
"supports_vision": true,
226+
"supports_vision": false,
227227
"supports_prompt_caching": true
228228
},
229229
"o1-mini-2024-09-12": {
@@ -929,6 +929,30 @@
929929
"input_cost_per_character": 0.000030,
930930
"litellm_provider": "openai"
931931
},
932+
"azure/o3-mini": {
933+
"max_tokens": 100000,
934+
"max_input_tokens": 200000,
935+
"max_output_tokens": 100000,
936+
"input_cost_per_token": 0.0000011,
937+
"output_cost_per_token": 0.0000044,
938+
"cache_read_input_token_cost": 0.00000055,
939+
"litellm_provider": "openai",
940+
"mode": "chat",
941+
"supports_vision": false,
942+
"supports_prompt_caching": true
943+
},
944+
"azure/o3-mini-2025-01-31": {
945+
"max_tokens": 100000,
946+
"max_input_tokens": 200000,
947+
"max_output_tokens": 100000,
948+
"input_cost_per_token": 0.0000011,
949+
"output_cost_per_token": 0.0000044,
950+
"cache_read_input_token_cost": 0.00000055,
951+
"litellm_provider": "openai",
952+
"mode": "chat",
953+
"supports_vision": false,
954+
"supports_prompt_caching": true
955+
},
932956
"azure/tts-1": {
933957
"mode": "audio_speech",
934958
"input_cost_per_character": 0.000015,
@@ -3876,6 +3900,16 @@
38763900
"mode": "chat",
38773901
"supports_function_calling": true
38783902
},
3903+
"vertex_ai/codestral@2405": {
3904+
"max_tokens": 128000,
3905+
"max_input_tokens": 128000,
3906+
"max_output_tokens": 128000,
3907+
"input_cost_per_token": 0.0000002,
3908+
"output_cost_per_token": 0.0000006,
3909+
"litellm_provider": "vertex_ai-mistral_models",
3910+
"mode": "chat",
3911+
"supports_function_calling": true
3912+
},
38793913
"vertex_ai/imagegeneration@006": {
38803914
"output_cost_per_image": 0.020,
38813915
"litellm_provider": "vertex_ai-image-models",

tests/local_testing/test_batch_completions.py

+3
Original file line numberDiff line numberDiff line change
@@ -43,6 +43,9 @@ def test_batch_completions():
4343
except Timeout as e:
4444
print(f"IN TIMEOUT")
4545
pass
46+
except litellm.InternalServerError as e:
47+
print(f"IN INTERNAL SERVER ERROR")
48+
pass
4649
except Exception as e:
4750
pytest.fail(f"An error occurred: {e}")
4851

0 commit comments

Comments
 (0)