Skip to content
This repository was archived by the owner on May 19, 2025. It is now read-only.

Commit c73b918

Browse files
committed
Merge branch 'main' into grit-prod
2 parents 742f995 + 05dfc9e commit c73b918

File tree

5 files changed

+20
-47
lines changed

5 files changed

+20
-47
lines changed

litellm/llms/prompt_templates/factory.py

Lines changed: 0 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -700,13 +700,6 @@ def anthropic_messages_pt(messages: list):
700700
if assistant_content:
701701
new_messages.append({"role": "assistant", "content": assistant_content})
702702

703-
if (
704-
msg_i < len(messages)
705-
and messages[msg_i]["role"] != user_message_types
706-
and messages[msg_i]["role"] != "assistant"
707-
):
708-
raise Exception(f"Invalid role passed in - {messages[msg_i]}")
709-
710703
if new_messages[0]["role"] != "user":
711704
if litellm.modify_params:
712705
new_messages.insert(

litellm/tests/test_completion.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -182,6 +182,7 @@ def test_completion_claude_3_function_call():
182182

183183
def test_completion_claude_3_multi_turn_conversations():
184184
litellm.set_verbose = True
185+
litellm.modify_params = True
185186
messages = [
186187
{"role": "assistant", "content": "?"}, # test first user message auto injection
187188
{"role": "user", "content": "Hi!"},

litellm/tests/test_python_38.py

Lines changed: 15 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,7 @@
11
import sys, os, time
22
import traceback, asyncio
33
import pytest
4+
import subprocess
45

56
sys.path.insert(
67
0, os.path.abspath("../..")
@@ -16,3 +17,17 @@ def test_using_litellm():
1617
pytest.fail(
1718
f"Error occurred: {e}. Installing litellm on python3.8 failed please retry"
1819
)
20+
21+
22+
def test_litellm_proxy_server():
23+
# Install the litellm[proxy] package
24+
subprocess.run(["pip", "install", "litellm[proxy]"])
25+
26+
# Import the proxy_server module
27+
try:
28+
import litellm.proxy.proxy_server
29+
except ImportError:
30+
pytest.fail("Failed to import litellm.proxy_server")
31+
32+
# Assertion to satisfy the test, you can add other checks as needed
33+
assert True

litellm/utils.py

Lines changed: 0 additions & 34 deletions
Original file line numberDiff line numberDiff line change
@@ -2358,8 +2358,6 @@ def function_setup(
23582358
)
23592359
if "logger_fn" in kwargs:
23602360
user_logger_fn = kwargs["logger_fn"]
2361-
# CRASH REPORTING TELEMETRY
2362-
crash_reporting(*args, **kwargs)
23632361
# INIT LOGGER - for user-specified integrations
23642362
model = args[0] if len(args) > 0 else kwargs.get("model", None)
23652363
call_type = original_function.__name__
@@ -2464,25 +2462,6 @@ def post_call_processing(original_response, model):
24642462
except Exception as e:
24652463
raise e
24662464

2467-
def crash_reporting(*args, **kwargs):
2468-
if litellm.telemetry:
2469-
try:
2470-
model = args[0] if len(args) > 0 else kwargs["model"]
2471-
exception = kwargs["exception"] if "exception" in kwargs else None
2472-
custom_llm_provider = (
2473-
kwargs["custom_llm_provider"]
2474-
if "custom_llm_provider" in kwargs
2475-
else None
2476-
)
2477-
safe_crash_reporting(
2478-
model=model,
2479-
exception=exception,
2480-
custom_llm_provider=custom_llm_provider,
2481-
) # log usage-crash details. Do not log any user details. If you want to turn this off, set `litellm.telemetry=False`.
2482-
except:
2483-
# [Non-Blocking Error]
2484-
pass
2485-
24862465
@wraps(original_function)
24872466
def wrapper(*args, **kwargs):
24882467
# Prints Exactly what was passed to litellm function - don't execute any logic here - it should just print
@@ -2777,7 +2756,6 @@ def wrapper(*args, **kwargs):
27772756
kwargs["model"] = context_window_fallback_dict[model]
27782757
return original_function(*args, **kwargs)
27792758
traceback_exception = traceback.format_exc()
2780-
crash_reporting(*args, **kwargs, exception=traceback_exception)
27812759
end_time = datetime.datetime.now()
27822760
# LOG FAILURE - handle streaming failure logging in the _next_ object, remove `handle_failure` once it's deprecated
27832761
if logging_obj:
@@ -3199,7 +3177,6 @@ async def wrapper_async(*args, **kwargs):
31993177
return result
32003178
except Exception as e:
32013179
traceback_exception = traceback.format_exc()
3202-
crash_reporting(*args, **kwargs, exception=traceback_exception)
32033180
end_time = datetime.datetime.now()
32043181
if logging_obj:
32053182
try:
@@ -8287,17 +8264,6 @@ def exception_type(
82878264
raise original_exception
82888265

82898266

8290-
####### CRASH REPORTING ################
8291-
def safe_crash_reporting(model=None, exception=None, custom_llm_provider=None):
8292-
data = {
8293-
"model": model,
8294-
"exception": str(exception),
8295-
"custom_llm_provider": custom_llm_provider,
8296-
}
8297-
executor.submit(litellm_telemetry, data)
8298-
# threading.Thread(target=litellm_telemetry, args=(data,), daemon=True).start()
8299-
8300-
83018267
def get_or_generate_uuid():
83028268
temp_dir = os.path.join(os.path.abspath(os.sep), "tmp")
83038269
uuid_file = os.path.join(temp_dir, "litellm_uuid.txt")

pyproject.toml

Lines changed: 4 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
[tool.poetry]
22
name = "litellm"
3-
version = "1.33.5"
3+
version = "1.33.8"
44
description = "Library to easily interface with LLM API providers"
55
authors = ["BerriAI"]
66
license = "MIT"
@@ -31,11 +31,10 @@ pyyaml = {version = "^6.0.1", optional = true}
3131
rq = {version = "*", optional = true}
3232
orjson = {version = "^3.9.7", optional = true}
3333
apscheduler = {version = "^3.10.4", optional = true}
34-
streamlit = {version = "^1.29.0", optional = true}
3534
fastapi-sso = { version = "^0.10.0", optional = true }
3635
PyJWT = { version = "^2.8.0", optional = true }
3736
python-multipart = { version = "^0.0.6", optional = true }
38-
argon2-cffi = { version = "^23.1.0", optional = true }
37+
cryptography = { version = "41.0.3", optional = true }
3938

4039
[tool.poetry.extras]
4140
proxy = [
@@ -50,15 +49,14 @@ proxy = [
5049
"fastapi-sso",
5150
"PyJWT",
5251
"python-multipart",
53-
"argon2-cffi",
52+
"cryptography"
5453
]
5554

5655
extra_proxy = [
5756
"prisma",
5857
"azure-identity",
5958
"azure-keyvault-secrets",
6059
"google-cloud-kms",
61-
"streamlit",
6260
"resend"
6361
]
6462

@@ -77,7 +75,7 @@ requires = ["poetry-core", "wheel"]
7775
build-backend = "poetry.core.masonry.api"
7876

7977
[tool.commitizen]
80-
version = "1.33.5"
78+
version = "1.33.8"
8179
version_files = [
8280
"pyproject.toml:^version"
8381
]

0 commit comments

Comments
 (0)