diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 4a64b72b..87e73bca 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -24,7 +24,7 @@ jobs: - 5433:5433 redis: - image: redis:7.4.4 + image: redis:7.4.5 ports: - 6379:6379 @@ -46,7 +46,7 @@ jobs: - uses: actions/setup-python@8d9ed9ac5c53483de85588cdf95a591a75ab9f55 # v5 with: - python-version: "3.13.3" + python-version: "3.13.5" cache: "poetry" - name: Validate lockfile diff --git a/.python-version b/.python-version index 2c20ac9b..86f8c02e 100644 --- a/.python-version +++ b/.python-version @@ -1 +1 @@ -3.13.3 +3.13.5 diff --git a/Dockerfile b/Dockerfile index 18945b18..3b943eb7 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,4 +1,4 @@ -FROM python:3.13.3 +FROM python:3.13.5 LABEL maintainer "ODL DevOps " # Add package files, install updated node and pip diff --git a/RELEASE.rst b/RELEASE.rst index ba897e6a..7c54eb55 100644 --- a/RELEASE.rst +++ b/RELEASE.rst @@ -1,6 +1,24 @@ Release Notes ============= +Version 0.11.0 +-------------- + +- New Canvas-specific syllabus bot endpoint (#238) +- fix(deps): update dependency uvicorn to ^0.35.0 (#247) +- fix(deps): update dependency starlette to v0.47.1 (#246) +- fix(deps): update dependency ruff to v0.12.3 (#245) +- chore(deps): update node.js to v22.17.0 (#244) +- chore(deps): update nginx docker tag to v1.29.0 (#243) +- fix(deps): update dependency next to v15.3.5 (#242) +- fix(deps): update dependency langmem to ^0.0.28 (#241) +- chore(deps): update redis docker tag to v7.4.5 (#240) +- chore(deps): update dependency eslint-config-next to v15.3.5 (#239) +- update open-learning-ai-tutor (#236) +- fix(deps): update django-health-check digest to 5267d8f (#225) +- fix(deps): update python docker tag to v3.13.5 (#231) +- Remove pytz from unit test (#58) + Version 0.10.2 (Released July 09, 2025) -------------- diff --git a/ai_chatbots/chatbots.py b/ai_chatbots/chatbots.py index 58dcbae9..22f47211 100644 --- a/ai_chatbots/chatbots.py +++ b/ai_chatbots/chatbots.py @@ -429,6 +429,9 @@ class SyllabusAgentState(SummaryState): course_id: Annotated[list[str], add] collection_name: Annotated[list[str], add] related_courses: Annotated[list[str], add] + # str representation of a boolean value, because the + # langgraph JsonPlusSerializer can't handle booleans + exclude_canvas: Annotated[Optional[list[str]], add] class SyllabusBot(SummarizingChatbot): diff --git a/ai_chatbots/conftest.py b/ai_chatbots/conftest.py index ad0632be..5773c6b2 100644 --- a/ai_chatbots/conftest.py +++ b/ai_chatbots/conftest.py @@ -108,6 +108,7 @@ def syllabus_agent_state(): ], course_id=["MITx+10.00.2x", "MITx+6.00.1x"], collection_name=[None, "vector512"], + exclude_canvas=["True", "True"], ) diff --git a/ai_chatbots/consumers.py b/ai_chatbots/consumers.py index 518f0c12..ad3617c6 100644 --- a/ai_chatbots/consumers.py +++ b/ai_chatbots/consumers.py @@ -428,10 +428,12 @@ def create_chatbot( def process_extra_state(self, data: dict) -> dict: """Process extra state parameters if any""" + user = self.scope.get("user", None) related_courses = data.get("related_courses", []) params = { "course_id": [data.get("course_id")], "collection_name": [data.get("collection_name")], + "exclude_canvas": [str(not user or user.is_anonymous or not user.is_staff)], } if related_courses: params["related_courses"] = related_courses @@ -460,6 +462,23 @@ async def create_checkpointer( ) +class CanvasSyllabusBotHttpConsumer(SyllabusBotHttpConsumer): + """ + Async HTTP consumer for the Canvas syllabus bot. + Inherits from SyllabusBotHttpConsumer to reuse the logic. + """ + + ROOM_NAME = "CanvasSyllabusBot" + throttle_scope = "canvas_syllabus_bot" + + def process_extra_state(self, data: dict) -> dict: + """Process extra state parameters if any""" + return { + **super().process_extra_state(data), + "exclude_canvas": [str(False)], + } + + class TutorBotHttpConsumer(BaseBotHttpConsumer): """ Async HTTP consumer for the tutor bot. diff --git a/ai_chatbots/consumers_test.py b/ai_chatbots/consumers_test.py index 31c15809..267457e8 100644 --- a/ai_chatbots/consumers_test.py +++ b/ai_chatbots/consumers_test.py @@ -239,12 +239,28 @@ async def test_syllabus_create_chatbot( }, ], ) -def test_process_extra_state(request_params): +def test_syllabus_process_extra_state(syllabus_consumer, request_params): """Test that the process_extra_state function returns the expected values.""" - consumer = consumers.SyllabusBotHttpConsumer() - assert consumer.process_extra_state(request_params) == { + + assert syllabus_consumer.process_extra_state(request_params) == { "course_id": [request_params.get("course_id")], "collection_name": [request_params.get("collection_name", None)], + "exclude_canvas": ["True"], + } + + +def test_canvas_process_extra_state(syllabus_consumer, async_user): + """Test that the canvas syllabus process_extra_state function returns False for exclude_canvas.""" + consumer = consumers.CanvasSyllabusBotHttpConsumer() + consumer.scope = {"user": async_user, "cookies": {}, "session": None} + consumer.channel_name = "test_syllabus_channel" + + assert consumer.process_extra_state( + {"message": "hello", "course_id": "MITx+6.00.1x"} + ) == { + "course_id": ["MITx+6.00.1x"], + "collection_name": [None], + "exclude_canvas": ["False"], } @@ -440,6 +456,7 @@ async def test_consumer_handle(mocker, mock_http_consumer_send, syllabus_consume extra_state={ "course_id": [payload["course_id"]], "collection_name": [payload["collection_name"]], + "exclude_canvas": ["True"], }, ) assert await UserChatSession.objects.filter( diff --git a/ai_chatbots/factories.py b/ai_chatbots/factories.py index d4be488f..883ea206 100644 --- a/ai_chatbots/factories.py +++ b/ai_chatbots/factories.py @@ -210,6 +210,7 @@ class SyllabusAgentStateFactory(factory.Factory): messages = [factory.SubFactory(HumanMessageFactory)] course_id = [factory.Faker("uuid4")] collection_name = [factory.Faker("word")] + exclude_canvas = ["True"] class Meta: model = SyllabusAgentState diff --git a/ai_chatbots/routing.py b/ai_chatbots/routing.py index 39601f05..dbb4c315 100644 --- a/ai_chatbots/routing.py +++ b/ai_chatbots/routing.py @@ -16,6 +16,11 @@ consumers.SyllabusBotHttpConsumer.as_asgi(), name="syllabus_agent_sse", ), + re_path( + r"http/canvas_syllabus_agent/", + consumers.CanvasSyllabusBotHttpConsumer.as_asgi(), + name="canvas_syllabus_agent_sse", + ), re_path( r"http/video_gpt_agent/", consumers.VideoGPTBotHttpConsumer.as_asgi(), @@ -32,5 +37,5 @@ r"http/tutor_agent/", consumers.TutorBotHttpConsumer.as_asgi(), name="tutor_agent_sse", - ) + ), ] diff --git a/ai_chatbots/tools.py b/ai_chatbots/tools.py index 2534b991..1c0b7d1c 100644 --- a/ai_chatbots/tools.py +++ b/ai_chatbots/tools.py @@ -239,15 +239,22 @@ class VideoGPTToolSchema(pydantic.BaseModel): ) -def _content_file_search(url, params): - log.debug("Searching MIT API with params: %s", params) +def _content_file_search(url, params, *, exclude_canvas=True): try: + # Convert the exclude_canvas parameter to a boolean if it is a string + if exclude_canvas and exclude_canvas == "False": + exclude_canvas = False response = request_with_token(url, params, timeout=30) response.raise_for_status() raw_results = response.json().get("results", []) # Simplify the response to only include the main properties simplified_results = [] for result in raw_results: + platform = result.get("platform", {}).get("code") + # Currently, canvas contentfiles have blank platform values, + # those from other sources do not. + if exclude_canvas and (not platform or platform == "canvas"): + continue simplified_result = { "chunk_content": result.get("chunk_content"), "run_title": result.get("run_title"), @@ -275,6 +282,7 @@ def search_content_files( url = settings.AI_MIT_SYLLABUS_URL course_id = state.get("course_id", [None])[-1] or readable_id collection_name = state.get("collection_name", [None])[-1] + exclude_canvas = state.get("exclude_canvas", ["True"])[-1] params = { "q": q, "resource_readable_id": course_id, @@ -282,8 +290,7 @@ def search_content_files( } if collection_name: params["collection_name"] = collection_name - log.info("Searching MIT API with params: %s", params) - return _content_file_search(url, params) + return _content_file_search(url, params, exclude_canvas=exclude_canvas) @tool(args_schema=SearchRelatedCourseContentFilesToolSchema) diff --git a/ai_chatbots/tools_test.py b/ai_chatbots/tools_test.py index 31dd0f3e..25f0214e 100644 --- a/ai_chatbots/tools_test.py +++ b/ai_chatbots/tools_test.py @@ -20,6 +20,17 @@ def mock_get_resources(mocker, search_results): ) +@pytest.fixture +def mock_get_content_files(mocker, content_chunk_results): + """Mock resource requests.get for all tests.""" + return mocker.patch( + "ai_chatbots.tools.requests.get", + return_value=mocker.Mock( + json=mocker.Mock(return_value=content_chunk_results), status_code=200 + ), + ) + + @pytest.mark.parametrize( "params", [ @@ -112,14 +123,14 @@ def test_request_exception(mocker): @pytest.mark.parametrize("no_collection_name", [True, False]) def test_search_content_files( # noqa: PLR0913 settings, - mock_get_resources, + mock_get_content_files, syllabus_agent_state, - search_results, + content_chunk_results, search_url, limit, no_collection_name, ): - """Test that the search_courses tool returns expected results w/expected params.""" + """Test that the search_content_files tool returns expected results w/expected params.""" settings.AI_MIT_SYLLABUS_URL = search_url settings.AI_MIT_CONTENT_SEARCH_LIMIT = limit settings.LEARN_ACCESS_TOKEN = "test_token" # noqa: S105 @@ -136,10 +147,35 @@ def test_search_content_files( # noqa: PLR0913 results = json.loads( search_content_files.invoke({"q": "main topics", "state": syllabus_agent_state}) ) - mock_get_resources.assert_called_once_with( + mock_get_content_files.assert_called_once_with( search_url, params=expected_params, headers={"Authorization": f"Bearer {settings.LEARN_ACCESS_TOKEN}"}, timeout=30, ) - assert len(results["results"]) == len(search_results["results"]) + assert len(results["results"]) == len(content_chunk_results["results"]) + + +@pytest.mark.parametrize("exclude_canvas", [True, False]) +def test_search_canvas_content_files( + settings, mocker, syllabus_agent_state, content_chunk_results, exclude_canvas +): + """Test that search_content_files returns canvas results only if exclude_canvas is False.""" + settings.LEARN_ACCESS_TOKEN = "test_token" # noqa: S105 + + syllabus_agent_state["exclude_canvas"] = [str(exclude_canvas)] + for result in content_chunk_results["results"]: + result["platform"]["code"] = "canvas" + mocker.patch( + "ai_chatbots.tools.requests.get", + return_value=mocker.Mock( + json=mocker.Mock(return_value=content_chunk_results), status_code=200 + ), + ) + results = json.loads( + search_content_files.invoke({"q": "main topics", "state": syllabus_agent_state}) + ) + + assert len(results["results"]) == ( + len(content_chunk_results["results"]) if not exclude_canvas else 0 + ) diff --git a/config/apisix/apisix.yaml b/config/apisix/apisix.yaml index 97907b8d..ed6d4d64 100644 --- a/config/apisix/apisix.yaml +++ b/config/apisix/apisix.yaml @@ -6,37 +6,24 @@ upstreams: routes: - id: 1 - name: "websocket" - desc: "Special handling for websocket URLs." - priority: 1 + name: "canvas_syllabus_agent" + desc: "Protected route for canvas syllabus agent - requires canvas_token header" + priority: 20 upstream_id: 1 - enable_websocket: true + uri: "/http/canvas_syllabus_agent/" plugins: - openid-connect: - client_id: ${{KEYCLOAK_CLIENT_ID}} - client_secret: ${{KEYCLOAK_CLIENT_SECRET}} - discovery: ${{KEYCLOAK_DISCOVERY_URL}} - realm: ${{KEYCLOAK_REALM}} - scope: ${{KEYCLOAK_SCOPES}} - bearer_only: false - introspection_endpoint_auth_method: "client_secret_post" - ssl_verify: false - session: - secret: ${{APISIX_SESSION_SECRET_KEY}} - logout_path: "/logout" - post_logout_redirect_uri: ${{APISIX_LOGOUT_URL}} - unauth_action: "pass" + key-auth: + header: "canvas_token" + _meta: + disable: false + consumer-restriction: + whitelist: + - "canvas_agent" cors: allow_origins: "**" allow_methods: "**" allow_headers: "**" allow_credential: true - response-rewrite: - headers: - set: - Referrer-Policy: "origin" - uris: - - "/ws/*" - id: 2 name: "passauth" desc: "Wildcard route that can use auth but doesn't require it." @@ -108,4 +95,10 @@ routes: uris: - "/admin/login/*" - "/http/login/" + +consumers: + - username: "canvas_agent" + plugins: + key-auth: + key: ${{CANVAS_AI_TOKEN}} #END diff --git a/docker-compose.apps.yml b/docker-compose.apps.yml index 02ca92ae..1fb28332 100644 --- a/docker-compose.apps.yml +++ b/docker-compose.apps.yml @@ -39,7 +39,7 @@ services: watch: working_dir: /src - image: node:22.15 + image: node:22.17 entrypoint: ["/bin/sh", "-c"] command: - | diff --git a/docker-compose.services.yml b/docker-compose.services.yml index 5bbe8fcc..07a6dfee 100644 --- a/docker-compose.services.yml +++ b/docker-compose.services.yml @@ -28,7 +28,7 @@ services: - ./config/postgres:/docker-entrypoint-initdb.d redis: - image: redis:7.4.4 + image: redis:7.4.5 healthcheck: test: ["CMD", "redis-cli", "ping", "|", "grep", "PONG"] interval: 3s diff --git a/env/backend.env b/env/backend.env index 6b844818..7825ba71 100644 --- a/env/backend.env +++ b/env/backend.env @@ -37,6 +37,7 @@ NGINX_UWSGI_PASS=web:8001 # APISIX settings APISIX_LOGOUT_URL=http://ai.open.odl.local:8003/ APISIX_SESSION_SECRET_KEY= +CANVAS_AI_TOKEN=3f8a7c2e1b9d4e5f6a0b7c8d9e2f1a3b # pragma: allowlist-secret KEYCLOAK_REALM=ol-local KEYCLOAK_CLIENT_ID=apisix # This is not a secret. This is for the pack-in Keycloak, only for local use. diff --git a/frontend-demo/.nvmrc b/frontend-demo/.nvmrc index b8ffd707..fc37597b 100644 --- a/frontend-demo/.nvmrc +++ b/frontend-demo/.nvmrc @@ -1 +1 @@ -22.15.0 +22.17.0 diff --git a/frontend-demo/package.json b/frontend-demo/package.json index e32ee524..5f5aa729 100644 --- a/frontend-demo/package.json +++ b/frontend-demo/package.json @@ -29,7 +29,7 @@ "axios": "^1.7.7", "better-react-mathjax": "^2.3.0", "formik": "^2.4.6", - "next": "15.3.4", + "next": "15.3.5", "react": "19.1.0", "react-dom": "19.1.0", "tiny-invariant": "^1.3.3", @@ -54,7 +54,7 @@ "@typescript-eslint/typescript-estree": "^8.13.0", "eslint": "^8", "eslint-config-mitodl": "^2.1.0", - "eslint-config-next": "15.3.4", + "eslint-config-next": "15.3.5", "eslint-config-prettier": "^10.0.0", "eslint-import-resolver-typescript": "^4.0.0", "eslint-plugin-import": "^2.29.1", diff --git a/frontend-demo/yarn.lock b/frontend-demo/yarn.lock index 157a908f..0fcc4c32 100644 --- a/frontend-demo/yarn.lock +++ b/frontend-demo/yarn.lock @@ -1600,12 +1600,12 @@ __metadata: languageName: node linkType: hard -"@next/eslint-plugin-next@npm:15.3.4": - version: 15.3.4 - resolution: "@next/eslint-plugin-next@npm:15.3.4" +"@next/eslint-plugin-next@npm:15.3.5": + version: 15.3.5 + resolution: "@next/eslint-plugin-next@npm:15.3.5" dependencies: fast-glob: "npm:3.3.1" - checksum: 10c0/17792484ee19550bc04167a212426b7907daaf3920546976a002261779c905fcb75ac05964359d78ffe5e6f5173f85237a1bdb11d6c07e5f3c4d6bf0b8d4bc3f + checksum: 10c0/fb86ead7d0c0b47cd670d866e4c1dfd28d288f30d2db8756a57b5c63a0e47db874202e69e5cf7990815a1e75fb012876acc602d90b43ad4eeeb2b167e20949b6 languageName: node linkType: hard @@ -4531,11 +4531,11 @@ __metadata: languageName: node linkType: hard -"eslint-config-next@npm:15.3.4": - version: 15.3.4 - resolution: "eslint-config-next@npm:15.3.4" +"eslint-config-next@npm:15.3.5": + version: 15.3.5 + resolution: "eslint-config-next@npm:15.3.5" dependencies: - "@next/eslint-plugin-next": "npm:15.3.4" + "@next/eslint-plugin-next": "npm:15.3.5" "@rushstack/eslint-patch": "npm:^1.10.3" "@typescript-eslint/eslint-plugin": "npm:^5.4.2 || ^6.0.0 || ^7.0.0 || ^8.0.0" "@typescript-eslint/parser": "npm:^5.4.2 || ^6.0.0 || ^7.0.0 || ^8.0.0" @@ -4551,7 +4551,7 @@ __metadata: peerDependenciesMeta: typescript: optional: true - checksum: 10c0/f7ad0760e4a848ed0ce5543c4d9efea02cd70b49e123875cd87b7c791e72c2d8671988ed103f7468394e90c5361978aafae67bbbb60bd87acb1a83ccbd5f9636 + checksum: 10c0/fae92e297d834202ed327873946d279bda458041db663f1c7f4bbe7426af54a4de3c8f4b6a3193d2bebee32e48e138de044e4f05668e30c2fe0822e413f08117 languageName: node linkType: hard @@ -10457,7 +10457,7 @@ __metadata: better-react-mathjax: "npm:^2.3.0" eslint: "npm:^8" eslint-config-mitodl: "npm:^2.1.0" - eslint-config-next: "npm:15.3.4" + eslint-config-next: "npm:15.3.5" eslint-config-prettier: "npm:^10.0.0" eslint-import-resolver-typescript: "npm:^4.0.0" eslint-plugin-import: "npm:^2.29.1" diff --git a/main/authentication_test.py b/main/authentication_test.py index d66e9f72..21d3884b 100644 --- a/main/authentication_test.py +++ b/main/authentication_test.py @@ -1,8 +1,7 @@ """Tests for authentication""" - +import zoneinfo from datetime import datetime, timedelta -import pytz from django.conf import settings from rest_framework_jwt.settings import api_settings @@ -76,7 +75,7 @@ def test_ignore_expired_jwt_authentication_valid(rf, user): def test_ignore_expired_jwt_authentication_expired(rf, user): """Tests that IgnoreExpiredJwtAuthentication returns None if token is expired""" payload = jwt_payload_handler(user) - payload["exp"] = datetime.now(tz=pytz.timezone(settings.TIME_ZONE)) - timedelta( + payload["exp"] = datetime.now(tz=zoneinfo.ZoneInfo(settings.TIME_ZONE)) - timedelta( seconds=100 ) token = jwt_encode_handler(payload) diff --git a/main/migrations/0002_canvas_syllabus_rate_limit.py b/main/migrations/0002_canvas_syllabus_rate_limit.py new file mode 100644 index 00000000..331b2dfa --- /dev/null +++ b/main/migrations/0002_canvas_syllabus_rate_limit.py @@ -0,0 +1,32 @@ +# Generated by Django 4.2.18 on 2025-02-09 11:50 + +from django.conf import settings +from django.db import migrations + + +def set_canvas_rate_limits(apps, schema_editor): + """Populate initial rate limits for consumers""" + ConsumerThrottleLimit = apps.get_model("main", "ConsumerThrottleLimit") + ConsumerThrottleLimit.objects.get_or_create( + throttle_key="canvas_syllabus_bot", + defaults={ + "auth_limit": 1000, + "anon_limit": 1000, + "interval": "day", + }, + ) + + +class Migration(migrations.Migration): + initial = True + + dependencies = [ + migrations.swappable_dependency(settings.AUTH_USER_MODEL), + ("main", "0001_initial"), + ] + + operations = [ + migrations.RunPython( + set_canvas_rate_limits, reverse_code=migrations.RunPython.noop + ) + ] diff --git a/main/settings.py b/main/settings.py index 54e4356c..7b8d4b9d 100644 --- a/main/settings.py +++ b/main/settings.py @@ -31,7 +31,7 @@ from main.sentry import init_sentry from openapi.settings_spectacular import open_spectacular_settings -VERSION = "0.10.2" +VERSION = "0.11.0" log = logging.getLogger() diff --git a/nginx/Dockerfile b/nginx/Dockerfile index 141fe7fb..cef5fb12 100644 --- a/nginx/Dockerfile +++ b/nginx/Dockerfile @@ -2,7 +2,7 @@ # it's primary purpose is to emulate heroku-buildpack-nginx's # functionality that compiles config/nginx.conf.erb # See https://github.com/heroku/heroku-buildpack-nginx/blob/fefac6c569f28182b3459cb8e34b8ccafc403fde/bin/start-nginx -FROM nginx:1.28.0 +FROM nginx:1.29.0 # Logs are configured to a relatic path under /etc/nginx # but the container expects /var/log diff --git a/poetry.lock b/poetry.lock index 61fbafa9..af4b6dc6 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1722,25 +1722,26 @@ django = ">=3.2" [[package]] name = "django-health-check" -version = "3.18.4.dev18+g9cfe2ea" -description = "Run checks on services like databases, queue servers, celery processes, etc." +version = "3.20.1.dev2+g5267d8f" +description = "Monitor the health of your Django app and its connected services." optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" files = [] develop = false [package.dependencies] -django = ">=2.2" +Django = ">=4.2" [package.extras] docs = ["sphinx"] +lint = ["ruff (==0.12.1)"] test = ["boto3", "celery", "django-storages", "pytest", "pytest-cov", "pytest-django", "redis"] [package.source] type = "git" url = "https://github.com/revsys/django-health-check" -reference = "9cfe2eaec5a15219513a36210b34875c03c64fe4" -resolved_reference = "9cfe2eaec5a15219513a36210b34875c03c64fe4" +reference = "5267d8fd68b1ee307637d6ffca6d6d12cc744cdc" +resolved_reference = "5267d8fd68b1ee307637d6ffca6d6d12cc744cdc" [[package]] name = "django-hijack" @@ -1972,13 +1973,13 @@ email = ["email-validator (>=2.1,<3.0)"] [[package]] name = "e2b" -version = "1.5.2" +version = "1.5.6" description = "E2B SDK that give agents cloud environments" optional = false python-versions = "<4.0,>=3.9" files = [ - {file = "e2b-1.5.2-py3-none-any.whl", hash = "sha256:8cf755f2ff04098daa7ac778f768eee1df730a6181637fe124210345999890b3"}, - {file = "e2b-1.5.2.tar.gz", hash = "sha256:29ed891ae04ffafff1744c57eff55901200f15030d34ac3fe76d6672e2bf7845"}, + {file = "e2b-1.5.6-py3-none-any.whl", hash = "sha256:ca567af466bb370bef01cb2211ba231dbbe743137387c2a40cecf8819d6f9535"}, + {file = "e2b-1.5.6.tar.gz", hash = "sha256:05da24b27d7a855edd374935c47a9e9faa9b4c3cc41a039a0ac3ee1cebedccf9"}, ] [package.dependencies] @@ -1992,18 +1993,18 @@ typing-extensions = ">=4.1.0" [[package]] name = "e2b-code-interpreter" -version = "1.5.1" +version = "1.5.2" description = "E2B Code Interpreter - Stateful code execution" optional = false python-versions = "<4.0,>=3.9" files = [ - {file = "e2b_code_interpreter-1.5.1-py3-none-any.whl", hash = "sha256:c8ee6f77bcb9c53422df336abbd37d5bf6318c3967b87444b39e3428a54c5e08"}, - {file = "e2b_code_interpreter-1.5.1.tar.gz", hash = "sha256:e39485dd2ffb148a902e8c05c8f573feeb7ca87f8498f02a4db65630e76364e1"}, + {file = "e2b_code_interpreter-1.5.2-py3-none-any.whl", hash = "sha256:5c3188d8f25226b28fef4b255447cc6a4c36afb748bdd5180b45be486d5169f3"}, + {file = "e2b_code_interpreter-1.5.2.tar.gz", hash = "sha256:3bd6ea70596290e85aaf0a2f19f28bf37a5e73d13086f5e6a0080bb591c5a547"}, ] [package.dependencies] attrs = ">=21.3.0" -e2b = ">=1.4.0,<2.0.0" +e2b = ">=1.5.4,<2.0.0" httpx = ">=0.20.0,<1.0.0" [[package]] @@ -3917,13 +3918,13 @@ orjson = ">=3.10.1" [[package]] name = "langmem" -version = "0.0.27" +version = "0.0.28" description = "Prebuilt utilities for memory management and retrieval." optional = false python-versions = ">=3.10" files = [ - {file = "langmem-0.0.27-py3-none-any.whl", hash = "sha256:25e9f06ad7c420442cf4b62caff6f805b124dfb2e2cc9cacc464d7a455fbafda"}, - {file = "langmem-0.0.27.tar.gz", hash = "sha256:729c1eb77c4cd8d9f2285f908a68a1e622ef01f074eeeb8cbbc7343f296efc53"}, + {file = "langmem-0.0.28-py3-none-any.whl", hash = "sha256:9b2a145f8ff61bdf3158e8ce46f4b76d6a946f9cd21f6dc51b0db95cae718033"}, + {file = "langmem-0.0.28.tar.gz", hash = "sha256:625f76b457e6f052545a5949a7281c0ebb98d0eb78c68a29a6dce7e3e402f1a3"}, ] [package.dependencies] @@ -4792,13 +4793,13 @@ sympy = "*" [[package]] name = "open-learning-ai-tutor" -version = "0.2.3" +version = "0.2.4" description = "AI powered tutor" optional = false python-versions = "~=3.12" files = [ - {file = "open_learning_ai_tutor-0.2.3-py3-none-any.whl", hash = "sha256:15a047d4d0fcac68f95654ff35589f5864824ea8ec4fae66dc84e28afe682f60"}, - {file = "open_learning_ai_tutor-0.2.3.tar.gz", hash = "sha256:53c043f499f81a1819a2b8db0ea21eae3e17632e5eea60898733ca658b4dacf7"}, + {file = "open_learning_ai_tutor-0.2.4-py3-none-any.whl", hash = "sha256:46cf2d3551c17a2db115abc228425c41a87e9f55d0094090494d400ed8c101ea"}, + {file = "open_learning_ai_tutor-0.2.4.tar.gz", hash = "sha256:875755254154dd7f646bb7cb5c75ad9b6839721961c8bd4f618d99d45ded8eaf"}, ] [package.dependencies] @@ -7076,29 +7077,29 @@ jinja2 = ["ruamel.yaml.jinja2 (>=0.2)"] [[package]] name = "ruff" -version = "0.11.11" +version = "0.12.3" description = "An extremely fast Python linter and code formatter, written in Rust." optional = false python-versions = ">=3.7" files = [ - {file = "ruff-0.11.11-py3-none-linux_armv6l.whl", hash = "sha256:9924e5ae54125ed8958a4f7de320dab7380f6e9fa3195e3dc3b137c6842a0092"}, - {file = "ruff-0.11.11-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:c8a93276393d91e952f790148eb226658dd275cddfde96c6ca304873f11d2ae4"}, - {file = "ruff-0.11.11-py3-none-macosx_11_0_arm64.whl", hash = "sha256:d6e333dbe2e6ae84cdedefa943dfd6434753ad321764fd937eef9d6b62022bcd"}, - {file = "ruff-0.11.11-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7885d9a5e4c77b24e8c88aba8c80be9255fa22ab326019dac2356cff42089fc6"}, - {file = "ruff-0.11.11-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:1b5ab797fcc09121ed82e9b12b6f27e34859e4227080a42d090881be888755d4"}, - {file = "ruff-0.11.11-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e231ff3132c1119ece836487a02785f099a43992b95c2f62847d29bace3c75ac"}, - {file = "ruff-0.11.11-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:a97c9babe1d4081037a90289986925726b802d180cca784ac8da2bbbc335f709"}, - {file = "ruff-0.11.11-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d8c4ddcbe8a19f59f57fd814b8b117d4fcea9bee7c0492e6cf5fdc22cfa563c8"}, - {file = "ruff-0.11.11-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6224076c344a7694c6fbbb70d4f2a7b730f6d47d2a9dc1e7f9d9bb583faf390b"}, - {file = "ruff-0.11.11-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:882821fcdf7ae8db7a951df1903d9cb032bbe838852e5fc3c2b6c3ab54e39875"}, - {file = "ruff-0.11.11-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:dcec2d50756463d9df075a26a85a6affbc1b0148873da3997286caf1ce03cae1"}, - {file = "ruff-0.11.11-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:99c28505ecbaeb6594701a74e395b187ee083ee26478c1a795d35084d53ebd81"}, - {file = "ruff-0.11.11-py3-none-musllinux_1_2_i686.whl", hash = "sha256:9263f9e5aa4ff1dec765e99810f1cc53f0c868c5329b69f13845f699fe74f639"}, - {file = "ruff-0.11.11-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:64ac6f885e3ecb2fdbb71de2701d4e34526651f1e8503af8fb30d4915a3fe345"}, - {file = "ruff-0.11.11-py3-none-win32.whl", hash = "sha256:1adcb9a18802268aaa891ffb67b1c94cd70578f126637118e8099b8e4adcf112"}, - {file = "ruff-0.11.11-py3-none-win_amd64.whl", hash = "sha256:748b4bb245f11e91a04a4ff0f96e386711df0a30412b9fe0c74d5bdc0e4a531f"}, - {file = "ruff-0.11.11-py3-none-win_arm64.whl", hash = "sha256:6c51f136c0364ab1b774767aa8b86331bd8e9d414e2d107db7a2189f35ea1f7b"}, - {file = "ruff-0.11.11.tar.gz", hash = "sha256:7774173cc7c1980e6bf67569ebb7085989a78a103922fb83ef3dfe230cd0687d"}, + {file = "ruff-0.12.3-py3-none-linux_armv6l.whl", hash = "sha256:47552138f7206454eaf0c4fe827e546e9ddac62c2a3d2585ca54d29a890137a2"}, + {file = "ruff-0.12.3-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:0a9153b000c6fe169bb307f5bd1b691221c4286c133407b8827c406a55282041"}, + {file = "ruff-0.12.3-py3-none-macosx_11_0_arm64.whl", hash = "sha256:fa6b24600cf3b750e48ddb6057e901dd5b9aa426e316addb2a1af185a7509882"}, + {file = "ruff-0.12.3-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e2506961bf6ead54887ba3562604d69cb430f59b42133d36976421bc8bd45901"}, + {file = "ruff-0.12.3-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:c4faaff1f90cea9d3033cbbcdf1acf5d7fb11d8180758feb31337391691f3df0"}, + {file = "ruff-0.12.3-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:40dced4a79d7c264389de1c59467d5d5cefd79e7e06d1dfa2c75497b5269a5a6"}, + {file = "ruff-0.12.3-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:0262d50ba2767ed0fe212aa7e62112a1dcbfd46b858c5bf7bbd11f326998bafc"}, + {file = "ruff-0.12.3-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:12371aec33e1a3758597c5c631bae9a5286f3c963bdfb4d17acdd2d395406687"}, + {file = "ruff-0.12.3-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:560f13b6baa49785665276c963edc363f8ad4b4fc910a883e2625bdb14a83a9e"}, + {file = "ruff-0.12.3-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:023040a3499f6f974ae9091bcdd0385dd9e9eb4942f231c23c57708147b06311"}, + {file = "ruff-0.12.3-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:883d844967bffff5ab28bba1a4d246c1a1b2933f48cb9840f3fdc5111c603b07"}, + {file = "ruff-0.12.3-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:2120d3aa855ff385e0e562fdee14d564c9675edbe41625c87eeab744a7830d12"}, + {file = "ruff-0.12.3-py3-none-musllinux_1_2_i686.whl", hash = "sha256:6b16647cbb470eaf4750d27dddc6ebf7758b918887b56d39e9c22cce2049082b"}, + {file = "ruff-0.12.3-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:e1417051edb436230023575b149e8ff843a324557fe0a265863b7602df86722f"}, + {file = "ruff-0.12.3-py3-none-win32.whl", hash = "sha256:dfd45e6e926deb6409d0616078a666ebce93e55e07f0fb0228d4b2608b2c248d"}, + {file = "ruff-0.12.3-py3-none-win_amd64.whl", hash = "sha256:a946cf1e7ba3209bdef039eb97647f1c77f6f540e5845ec9c114d3af8df873e7"}, + {file = "ruff-0.12.3-py3-none-win_arm64.whl", hash = "sha256:5f9c7c9c8f84c2d7f27e93674d27136fbf489720251544c4da7fb3d742e011b1"}, + {file = "ruff-0.12.3.tar.gz", hash = "sha256:f1b5a4b6668fd7b7ea3697d8d98857390b40c1320a63a178eee6be0899ea2d77"}, ] [[package]] @@ -7526,13 +7527,13 @@ tests = ["cython", "littleutils", "pygments", "pytest", "typeguard"] [[package]] name = "starlette" -version = "0.46.2" +version = "0.47.1" description = "The little ASGI library that shines." optional = false python-versions = ">=3.9" files = [ - {file = "starlette-0.46.2-py3-none-any.whl", hash = "sha256:595633ce89f8ffa71a015caed34a5b2dc1c0cdb3f0f1fbd1e69339cf2abeec35"}, - {file = "starlette-0.46.2.tar.gz", hash = "sha256:7f7361f34eed179294600af672f565727419830b54b7b084efe44bb82d2fccd5"}, + {file = "starlette-0.47.1-py3-none-any.whl", hash = "sha256:5e11c9f5c7c3f24959edbf2dffdc01bba860228acf657129467d8a7468591527"}, + {file = "starlette-0.47.1.tar.gz", hash = "sha256:aef012dd2b6be325ffa16698f9dc533614fb1cebd593a906b90dc1025529a79b"}, ] [package.dependencies] @@ -7925,13 +7926,13 @@ zstd = ["zstandard (>=0.18.0)"] [[package]] name = "uvicorn" -version = "0.34.2" +version = "0.35.0" description = "The lightning-fast ASGI server." optional = false python-versions = ">=3.9" files = [ - {file = "uvicorn-0.34.2-py3-none-any.whl", hash = "sha256:deb49af569084536d269fe0a6d67e3754f104cf03aba7c11c40f01aadf33c403"}, - {file = "uvicorn-0.34.2.tar.gz", hash = "sha256:0e929828f6186353a80b58ea719861d2629d766293b6d19baf086ba31d4f3328"}, + {file = "uvicorn-0.35.0-py3-none-any.whl", hash = "sha256:197535216b25ff9b785e29a0b79199f55222193d47f820816e7da751e9bc8d4a"}, + {file = "uvicorn-0.35.0.tar.gz", hash = "sha256:bc662f087f7cf2ce11a1d7fd70b90c9f98ef2e2831556dd078d131b96cc94a01"}, ] [package.dependencies] @@ -7941,12 +7942,12 @@ h11 = ">=0.8" httptools = {version = ">=0.6.3", optional = true, markers = "extra == \"standard\""} python-dotenv = {version = ">=0.13", optional = true, markers = "extra == \"standard\""} pyyaml = {version = ">=5.1", optional = true, markers = "extra == \"standard\""} -uvloop = {version = ">=0.14.0,<0.15.0 || >0.15.0,<0.15.1 || >0.15.1", optional = true, markers = "(sys_platform != \"win32\" and sys_platform != \"cygwin\") and platform_python_implementation != \"PyPy\" and extra == \"standard\""} +uvloop = {version = ">=0.15.1", optional = true, markers = "(sys_platform != \"win32\" and sys_platform != \"cygwin\") and platform_python_implementation != \"PyPy\" and extra == \"standard\""} watchfiles = {version = ">=0.13", optional = true, markers = "extra == \"standard\""} websockets = {version = ">=10.4", optional = true, markers = "extra == \"standard\""} [package.extras] -standard = ["colorama (>=0.4)", "httptools (>=0.6.3)", "python-dotenv (>=0.13)", "pyyaml (>=5.1)", "uvloop (>=0.14.0,!=0.15.0,!=0.15.1)", "watchfiles (>=0.13)", "websockets (>=10.4)"] +standard = ["colorama (>=0.4)", "httptools (>=0.6.3)", "python-dotenv (>=0.13)", "pyyaml (>=5.1)", "uvloop (>=0.15.1)", "watchfiles (>=0.13)", "websockets (>=10.4)"] [[package]] name = "uvloop" @@ -8805,5 +8806,5 @@ cffi = ["cffi (>=1.11)"] [metadata] lock-version = "2.0" -python-versions = "3.13.3" -content-hash = "087da9ccec47322e351cb49a32a4a52c3bd79b71fb17fbf4e41dac67cd09ba68" +python-versions = "3.13.5" +content-hash = "9d1631a2c3a93600e61bd4fcea9c384b6dfaec90545fbf6a95c1d7e5236053a8" diff --git a/pyproject.toml b/pyproject.toml index 89f74faa..3a0714e8 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -9,7 +9,7 @@ authors = ["MIT ODL"] [tool.poetry.dependencies] -python = "3.13.3" +python = "3.13.5" Django = "4.2.23" boto3 = "^1.35.81" celery = "^5.3.1" @@ -22,7 +22,7 @@ django-bitfield = "^2.2.0" django-cache-memoize = "^0.2.0" django-filter = "^25.1" django-guardian = "^3.0.0" -django-health-check = { git = "https://github.com/revsys/django-health-check", rev="9cfe2eaec5a15219513a36210b34875c03c64fe4" } # pragma: allowlist secret +django-health-check = { git = "https://github.com/revsys/django-health-check", rev="5267d8fd68b1ee307637d6ffca6d6d12cc744cdc" } # pragma: allowlist secret django-hijack = "^3.4.1" django-ipware = "^7.0.0" django-oauth-toolkit = "^2.3.0" @@ -58,16 +58,16 @@ psycopg-pool = "^3.2.4" pygithub = "^2.0.0" redis = "^5.0.0" requests = "^2.31.0" -ruff = "0.11.11" +ruff = "0.12.3" sentry-sdk = "^2.13.0" social-auth-app-django = "^5.2.0" social-auth-core = {extras = ["openidconnect"], version = "^4.4.2"} -starlette = "0.46.2" +starlette = "0.47.1" ulid-py = "^1.0.0" -uvicorn = {extras = ["standard"], version = "^0.34.0"} -langmem = "^0.0.27" +uvicorn = {extras = ["standard"], version = "^0.35.0"} +langmem = "^0.0.28" beautifulsoup4 = "^4.13.4" -open-learning-ai-tutor = "^0.2.3" +open-learning-ai-tutor = "^0.2.4" [tool.poetry.group.dev.dependencies] bpython = "^0.25" @@ -85,7 +85,7 @@ pytest-env = "^1.0.0" pytest-freezegun = "^0.4.2" pytest-mock = "^3.10.0" responses = "^0.25.0" -ruff = "^0.11.0" +ruff = "^0.12.0" safety = "^3.0.0" semantic-version = "^2.10.0" freezegun = "^1.4.0"