diff --git a/.github/workflows/pylint.yml b/.github/workflows/pylint.yml new file mode 100644 index 00000000..989f7387 --- /dev/null +++ b/.github/workflows/pylint.yml @@ -0,0 +1,22 @@ +name: Pylint + +on: [push] + +jobs: + build: + runs-on: ubuntu-latest + strategy: + matrix: + python-version: ["3.11"] + steps: + - uses: actions/checkout@v4 + - name: Set up Python ${{ matrix.python-version }} + uses: actions/setup-python@v3 + with: + python-version: ${{ matrix.python-version }} + - name: Install dependencies + run: | + python -m pip install --upgrade pip + pip install -r ClientAdvisor/App/requirements.txt + - name: Run flake8 + run: flake8 --config=ClientAdvisor/App/.flake8 ClientAdvisor/App diff --git a/.github/workflows/test_client_advisor.yml b/.github/workflows/test_client_advisor.yml new file mode 100644 index 00000000..52625e1a --- /dev/null +++ b/.github/workflows/test_client_advisor.yml @@ -0,0 +1,51 @@ +name: Unit Tests - Client Advisor + +on: + push: + branches: [main, dev, unit_test] + # Trigger on changes in these specific paths + paths: + - 'ClientAdvisor/**' + pull_request: + branches: [main, dev] + types: + - opened + - ready_for_review + - reopened + - synchronize + paths: + - 'ClientAdvisor/**' + +jobs: + test_client_advisor: + + name: Client Advisor Tests + runs-on: ubuntu-latest + # The if condition ensures that this job only runs if changes are in the ClientAdvisor folder + + steps: + - uses: actions/checkout@v4 + - name: Set up Python + uses: actions/setup-python@v5 + with: + python-version: "3.11" + - name: Install Backend Dependencies + run: | + cd ClientAdvisor/App + python -m pip install -r requirements.txt + python -m pip install coverage + - name: Run Backend Tests with Coverage + run: | + cd ClientAdvisor/App + python -m pytest -vv --cov=. --cov-report=xml --cov-report=html --cov-report=term-missing --cov-fail-under=80 --junitxml=coverage-junit.xml + - uses: actions/upload-artifact@v4 + with: + name: client-advisor-coverage + path: | + ClientAdvisor/App/coverage.xml + ClientAdvisor/App/coverage-junit.xml + ClientAdvisor/App/htmlcov/ + - name: Set up Node.js + uses: actions/setup-node@v3 + with: + node-version: '20' \ No newline at end of file diff --git a/ClientAdvisor/App/.flake8 b/ClientAdvisor/App/.flake8 new file mode 100644 index 00000000..74ee71d5 --- /dev/null +++ b/ClientAdvisor/App/.flake8 @@ -0,0 +1,4 @@ +[flake8] +max-line-length = 88 +extend-ignore = E501, E203 +exclude = .venv, frontend, \ No newline at end of file diff --git a/ClientAdvisor/App/app.py b/ClientAdvisor/App/app.py index ff564755..07790479 100644 --- a/ClientAdvisor/App/app.py +++ b/ClientAdvisor/App/app.py @@ -1,43 +1,29 @@ import copy import json -import os import logging +import os +import time import uuid -from dotenv import load_dotenv +from types import SimpleNamespace + import httpx -import time import requests -import pymssql -from types import SimpleNamespace -from db import get_connection -from quart import ( - Blueprint, - Quart, - jsonify, - make_response, - request, - send_from_directory, - render_template, - session -) +from azure.identity.aio import (DefaultAzureCredential, + get_bearer_token_provider) +from dotenv import load_dotenv # from quart.sessions import SecureCookieSessionInterface from openai import AsyncAzureOpenAI -from azure.identity.aio import DefaultAzureCredential, get_bearer_token_provider -from backend.auth.auth_utils import get_authenticated_user_details, get_tenantid +from quart import (Blueprint, Quart, jsonify, make_response, render_template, + request, send_from_directory) + +from backend.auth.auth_utils import (get_authenticated_user_details, + get_tenantid) from backend.history.cosmosdbservice import CosmosConversationClient -# from flask import Flask -# from flask_cors import CORS -import secrets - -from backend.utils import ( - format_as_ndjson, - format_stream_response, - generateFilterString, - parse_multi_columns, - format_non_streaming_response, - convert_to_pf_format, - format_pf_non_streaming_response, -) +from backend.utils import (convert_to_pf_format, format_as_ndjson, + format_pf_non_streaming_response, + format_stream_response, generateFilterString, + parse_multi_columns) +from db import get_connection bp = Blueprint("routes", __name__, static_folder="static", template_folder="static") @@ -65,11 +51,11 @@ def create_app(): app = Quart(__name__) - app.register_blueprint(bp) + app.config["PROVIDE_AUTOMATIC_OPTIONS"] = True app.config["TEMPLATES_AUTO_RELOAD"] = True + app.register_blueprint(bp) # app.secret_key = secrets.token_hex(16) # app.session_interface = SecureCookieSessionInterface() - # print(app.secret_key) return app @@ -297,6 +283,7 @@ async def assets(path): VITE_POWERBI_EMBED_URL = os.environ.get("VITE_POWERBI_EMBED_URL") + def should_use_data(): global DATASOURCE_TYPE if AZURE_SEARCH_SERVICE and AZURE_SEARCH_INDEX: @@ -762,16 +749,18 @@ def prepare_model_args(request_body, request_headers): messages.append({"role": message["role"], "content": message["content"]}) user_json = None - if (MS_DEFENDER_ENABLED): + if MS_DEFENDER_ENABLED: authenticated_user_details = get_authenticated_user_details(request_headers) tenantId = get_tenantid(authenticated_user_details.get("client_principal_b64")) - conversation_id = request_body.get("conversation_id", None) + conversation_id = request_body.get("conversation_id", None) user_args = { - "EndUserId": authenticated_user_details.get('user_principal_id'), - "EndUserIdType": 'Entra', + "EndUserId": authenticated_user_details.get("user_principal_id"), + "EndUserIdType": "Entra", "EndUserTenantId": tenantId, "ConversationId": conversation_id, - "SourceIp": request_headers.get('X-Forwarded-For', request_headers.get('Remote-Addr', '')), + "SourceIp": request_headers.get( + "X-Forwarded-For", request_headers.get("Remote-Addr", "") + ), } user_json = json.dumps(user_args) @@ -831,6 +820,7 @@ def prepare_model_args(request_body, request_headers): return model_args + async def promptflow_request(request): try: headers = { @@ -864,70 +854,77 @@ async def promptflow_request(request): logging.error(f"An error occurred while making promptflow_request: {e}") - async def send_chat_request(request_body, request_headers): filtered_messages = [] messages = request_body.get("messages", []) for message in messages: - if message.get("role") != 'tool': + if message.get("role") != "tool": filtered_messages.append(message) - - request_body['messages'] = filtered_messages + + request_body["messages"] = filtered_messages model_args = prepare_model_args(request_body, request_headers) try: azure_openai_client = init_openai_client() - raw_response = await azure_openai_client.chat.completions.with_raw_response.create(**model_args) + raw_response = ( + await azure_openai_client.chat.completions.with_raw_response.create( + **model_args + ) + ) response = raw_response.parse() - apim_request_id = raw_response.headers.get("apim-request-id") + apim_request_id = raw_response.headers.get("apim-request-id") except Exception as e: logging.exception("Exception in send_chat_request") raise e return response, apim_request_id + async def complete_chat_request(request_body, request_headers): if USE_PROMPTFLOW and PROMPTFLOW_ENDPOINT and PROMPTFLOW_API_KEY: response = await promptflow_request(request_body) history_metadata = request_body.get("history_metadata", {}) return format_pf_non_streaming_response( - response, history_metadata, PROMPTFLOW_RESPONSE_FIELD_NAME, PROMPTFLOW_CITATIONS_FIELD_NAME + response, + history_metadata, + PROMPTFLOW_RESPONSE_FIELD_NAME, + PROMPTFLOW_CITATIONS_FIELD_NAME, ) elif USE_AZUREFUNCTION: request_body = await request.get_json() - client_id = request_body.get('client_id') + client_id = request_body.get("client_id") print(request_body) if client_id is None: return jsonify({"error": "No client ID provided"}), 400 # client_id = '10005' print("Client ID in complete_chat_request: ", client_id) - answer = "Sample response from Azure Function" - # Construct the URL of your Azure Function endpoint - function_url = STREAMING_AZUREFUNCTION_ENDPOINT - - request_headers = { - 'Content-Type': 'application/json', - # 'Authorization': 'Bearer YOUR_TOKEN_HERE' # if applicable - } + # answer = "Sample response from Azure Function" + # Construct the URL of your Azure Function endpoint + # function_url = STREAMING_AZUREFUNCTION_ENDPOINT + # request_headers = { + # "Content-Type": "application/json", + # # 'Authorization': 'Bearer YOUR_TOKEN_HERE' # if applicable + # } # print(request_body.get("messages")[-1].get("content")) # print(request_body) query = request_body.get("messages")[-1].get("content") - print("Selected ClientId:", client_id) # print("Selected ClientName:", selected_client_name) # endpoint = STREAMING_AZUREFUNCTION_ENDPOINT + '?query=' + query + ' - for Client ' + selected_client_name + ':::' + selected_client_id - endpoint = STREAMING_AZUREFUNCTION_ENDPOINT + '?query=' + query + ':::' + client_id + endpoint = ( + STREAMING_AZUREFUNCTION_ENDPOINT + "?query=" + query + ":::" + client_id + ) print("Endpoint: ", endpoint) - query_response = '' + query_response = "" try: - with requests.get(endpoint,stream=True) as r: + with requests.get(endpoint, stream=True) as r: for line in r.iter_lines(chunk_size=10): # query_response += line.decode('utf-8') - query_response = query_response + '\n' + line.decode('utf-8') + query_response = query_response + "\n" + line.decode("utf-8") # print(line.decode('utf-8')) except Exception as e: print(format_as_ndjson({"error" + str(e)})) @@ -940,11 +937,9 @@ async def complete_chat_request(request_body, request_headers): "model": "", "created": 0, "object": "", - "choices": [{ - "messages": [] - }], + "choices": [{"messages": []}], "apim-request-id": "", - 'history_metadata': history_metadata + "history_metadata": history_metadata, } response["id"] = str(uuid.uuid4()) @@ -952,76 +947,83 @@ async def complete_chat_request(request_body, request_headers): response["created"] = int(time.time()) response["object"] = "extensions.chat.completion.chunk" # response["apim-request-id"] = headers.get("apim-request-id") - response["choices"][0]["messages"].append({ - "role": "assistant", - "content": query_response - }) - + response["choices"][0]["messages"].append( + {"role": "assistant", "content": query_response} + ) return response + async def stream_chat_request(request_body, request_headers): if USE_AZUREFUNCTION: history_metadata = request_body.get("history_metadata", {}) function_url = STREAMING_AZUREFUNCTION_ENDPOINT - apim_request_id = '' - - client_id = request_body.get('client_id') + apim_request_id = "" + + client_id = request_body.get("client_id") if client_id is None: return jsonify({"error": "No client ID provided"}), 400 query = request_body.get("messages")[-1].get("content") async def generate(): - deltaText = '' - #async for completionChunk in response: + deltaText = "" + # async for completionChunk in response: timeout = httpx.Timeout(10.0, read=None) - async with httpx.AsyncClient(verify=False,timeout=timeout) as client: # verify=False for development purposes - query_url = function_url + '?query=' + query + ':::' + client_id - async with client.stream('GET', query_url) as response: + async with httpx.AsyncClient( + verify=False, timeout=timeout + ) as client: # verify=False for development purposes + query_url = function_url + "?query=" + query + ":::" + client_id + async with client.stream("GET", query_url) as response: async for chunk in response.aiter_text(): - deltaText = '' + deltaText = "" deltaText = chunk completionChunk1 = { "id": "", "model": "", "created": 0, "object": "", - "choices": [{ - "messages": [], - "delta": {} - }], + "choices": [{"messages": [], "delta": {}}], "apim-request-id": "", - 'history_metadata': history_metadata + "history_metadata": history_metadata, } completionChunk1["id"] = str(uuid.uuid4()) completionChunk1["model"] = AZURE_OPENAI_MODEL_NAME completionChunk1["created"] = int(time.time()) completionChunk1["object"] = "extensions.chat.completion.chunk" - completionChunk1["apim-request-id"] = request_headers.get("apim-request-id") - completionChunk1["choices"][0]["messages"].append({ - "role": "assistant", - "content": deltaText - }) + completionChunk1["apim-request-id"] = request_headers.get( + "apim-request-id" + ) + completionChunk1["choices"][0]["messages"].append( + {"role": "assistant", "content": deltaText} + ) completionChunk1["choices"][0]["delta"] = { "role": "assistant", - "content": deltaText + "content": deltaText, } - completionChunk2 = json.loads(json.dumps(completionChunk1), object_hook=lambda d: SimpleNamespace(**d)) - yield format_stream_response(completionChunk2, history_metadata, apim_request_id) + completionChunk2 = json.loads( + json.dumps(completionChunk1), + object_hook=lambda d: SimpleNamespace(**d), + ) + yield format_stream_response( + completionChunk2, history_metadata, apim_request_id + ) return generate() - + else: - response, apim_request_id = await send_chat_request(request_body, request_headers) + response, apim_request_id = await send_chat_request( + request_body, request_headers + ) history_metadata = request_body.get("history_metadata", {}) - + async def generate(): async for completionChunk in response: - yield format_stream_response(completionChunk, history_metadata, apim_request_id) + yield format_stream_response( + completionChunk, history_metadata, apim_request_id + ) return generate() - async def conversation_internal(request_body, request_headers): @@ -1060,15 +1062,15 @@ def get_frontend_settings(): except Exception as e: logging.exception("Exception in /frontend_settings") return jsonify({"error": str(e)}), 500 - -## Conversation History API ## + +# Conversation History API # @bp.route("/history/generate", methods=["POST"]) async def add_conversation(): authenticated_user = get_authenticated_user_details(request_headers=request.headers) user_id = authenticated_user["user_principal_id"] - ## check request for conversation_id + # check request for conversation_id request_json = await request.get_json() conversation_id = request_json.get("conversation_id", None) @@ -1089,8 +1091,8 @@ async def add_conversation(): history_metadata["title"] = title history_metadata["date"] = conversation_dict["createdAt"] - ## Format the incoming message object in the "chat/completions" messages format - ## then write it to the conversation history in cosmos + # Format the incoming message object in the "chat/completions" messages format + # then write it to the conversation history in cosmos messages = request_json["messages"] if len(messages) > 0 and messages[-1]["role"] == "user": createdMessageValue = await cosmos_conversation_client.create_message( @@ -1126,7 +1128,7 @@ async def update_conversation(): authenticated_user = get_authenticated_user_details(request_headers=request.headers) user_id = authenticated_user["user_principal_id"] - ## check request for conversation_id + # check request for conversation_id request_json = await request.get_json() conversation_id = request_json.get("conversation_id", None) @@ -1140,8 +1142,8 @@ async def update_conversation(): if not conversation_id: raise Exception("No conversation_id found") - ## Format the incoming message object in the "chat/completions" messages format - ## then write it to the conversation history in cosmos + # Format the incoming message object in the "chat/completions" messages format + # then write it to the conversation history in cosmos messages = request_json["messages"] if len(messages) > 0 and messages[-1]["role"] == "assistant": if len(messages) > 1 and messages[-2].get("role", None) == "tool": @@ -1178,7 +1180,7 @@ async def update_message(): user_id = authenticated_user["user_principal_id"] cosmos_conversation_client = init_cosmosdb_client() - ## check request for message_id + # check request for message_id request_json = await request.get_json() message_id = request_json.get("message_id", None) message_feedback = request_json.get("message_feedback", None) @@ -1189,7 +1191,7 @@ async def update_message(): if not message_feedback: return jsonify({"error": "message_feedback is required"}), 400 - ## update the message in cosmos + # update the message in cosmos updated_message = await cosmos_conversation_client.update_message_feedback( user_id, message_id, message_feedback ) @@ -1220,11 +1222,11 @@ async def update_message(): @bp.route("/history/delete", methods=["DELETE"]) async def delete_conversation(): - ## get the user id from the request headers + # get the user id from the request headers authenticated_user = get_authenticated_user_details(request_headers=request.headers) user_id = authenticated_user["user_principal_id"] - ## check request for conversation_id + # check request for conversation_id request_json = await request.get_json() conversation_id = request_json.get("conversation_id", None) @@ -1232,20 +1234,16 @@ async def delete_conversation(): if not conversation_id: return jsonify({"error": "conversation_id is required"}), 400 - ## make sure cosmos is configured + # make sure cosmos is configured cosmos_conversation_client = init_cosmosdb_client() if not cosmos_conversation_client: raise Exception("CosmosDB is not configured or not working") - ## delete the conversation messages from cosmos first - deleted_messages = await cosmos_conversation_client.delete_messages( - conversation_id, user_id - ) + # delete the conversation messages from cosmos first + await cosmos_conversation_client.delete_messages(conversation_id, user_id) - ## Now delete the conversation - deleted_conversation = await cosmos_conversation_client.delete_conversation( - user_id, conversation_id - ) + # Now delete the conversation + await cosmos_conversation_client.delete_conversation(user_id, conversation_id) await cosmos_conversation_client.cosmosdb_client.close() @@ -1269,12 +1267,12 @@ async def list_conversations(): authenticated_user = get_authenticated_user_details(request_headers=request.headers) user_id = authenticated_user["user_principal_id"] - ## make sure cosmos is configured + # make sure cosmos is configured cosmos_conversation_client = init_cosmosdb_client() if not cosmos_conversation_client: raise Exception("CosmosDB is not configured or not working") - ## get the conversations from cosmos + # get the conversations from cosmos conversations = await cosmos_conversation_client.get_conversations( user_id, offset=offset, limit=25 ) @@ -1282,7 +1280,7 @@ async def list_conversations(): if not isinstance(conversations, list): return jsonify({"error": f"No conversations for {user_id} were found"}), 404 - ## return the conversation ids + # return the conversation ids return jsonify(conversations), 200 @@ -1292,23 +1290,23 @@ async def get_conversation(): authenticated_user = get_authenticated_user_details(request_headers=request.headers) user_id = authenticated_user["user_principal_id"] - ## check request for conversation_id + # check request for conversation_id request_json = await request.get_json() conversation_id = request_json.get("conversation_id", None) if not conversation_id: return jsonify({"error": "conversation_id is required"}), 400 - ## make sure cosmos is configured + # make sure cosmos is configured cosmos_conversation_client = init_cosmosdb_client() if not cosmos_conversation_client: raise Exception("CosmosDB is not configured or not working") - ## get the conversation object and the related messages from cosmos + # get the conversation object and the related messages from cosmos conversation = await cosmos_conversation_client.get_conversation( user_id, conversation_id ) - ## return the conversation id and the messages in the bot frontend format + # return the conversation id and the messages in the bot frontend format if not conversation: return ( jsonify( @@ -1324,7 +1322,7 @@ async def get_conversation(): user_id, conversation_id ) - ## format the messages in the bot frontend format + # format the messages in the bot frontend format messages = [ { "id": msg["id"], @@ -1345,19 +1343,19 @@ async def rename_conversation(): authenticated_user = get_authenticated_user_details(request_headers=request.headers) user_id = authenticated_user["user_principal_id"] - ## check request for conversation_id + # check request for conversation_id request_json = await request.get_json() conversation_id = request_json.get("conversation_id", None) if not conversation_id: return jsonify({"error": "conversation_id is required"}), 400 - ## make sure cosmos is configured + # make sure cosmos is configured cosmos_conversation_client = init_cosmosdb_client() if not cosmos_conversation_client: raise Exception("CosmosDB is not configured or not working") - ## get the conversation from cosmos + # get the conversation from cosmos conversation = await cosmos_conversation_client.get_conversation( user_id, conversation_id ) @@ -1371,7 +1369,7 @@ async def rename_conversation(): 404, ) - ## update the title + # update the title title = request_json.get("title", None) if not title: return jsonify({"error": "title is required"}), 400 @@ -1386,13 +1384,13 @@ async def rename_conversation(): @bp.route("/history/delete_all", methods=["DELETE"]) async def delete_all_conversations(): - ## get the user id from the request headers + # get the user id from the request headers authenticated_user = get_authenticated_user_details(request_headers=request.headers) user_id = authenticated_user["user_principal_id"] # get conversations for user try: - ## make sure cosmos is configured + # make sure cosmos is configured cosmos_conversation_client = init_cosmosdb_client() if not cosmos_conversation_client: raise Exception("CosmosDB is not configured or not working") @@ -1405,13 +1403,13 @@ async def delete_all_conversations(): # delete each conversation for conversation in conversations: - ## delete the conversation messages from cosmos first - deleted_messages = await cosmos_conversation_client.delete_messages( + # delete the conversation messages from cosmos first + await cosmos_conversation_client.delete_messages( conversation["id"], user_id ) - ## Now delete the conversation - deleted_conversation = await cosmos_conversation_client.delete_conversation( + # Now delete the conversation + await cosmos_conversation_client.delete_conversation( user_id, conversation["id"] ) await cosmos_conversation_client.cosmosdb_client.close() @@ -1431,11 +1429,11 @@ async def delete_all_conversations(): @bp.route("/history/clear", methods=["POST"]) async def clear_messages(): - ## get the user id from the request headers + # get the user id from the request headers authenticated_user = get_authenticated_user_details(request_headers=request.headers) user_id = authenticated_user["user_principal_id"] - ## check request for conversation_id + # check request for conversation_id request_json = await request.get_json() conversation_id = request_json.get("conversation_id", None) @@ -1443,15 +1441,13 @@ async def clear_messages(): if not conversation_id: return jsonify({"error": "conversation_id is required"}), 400 - ## make sure cosmos is configured + # make sure cosmos is configured cosmos_conversation_client = init_cosmosdb_client() if not cosmos_conversation_client: raise Exception("CosmosDB is not configured or not working") - ## delete the conversation messages from cosmos - deleted_messages = await cosmos_conversation_client.delete_messages( - conversation_id, user_id - ) + # delete the conversation messages from cosmos + await cosmos_conversation_client.delete_messages(conversation_id, user_id) return ( jsonify( @@ -1510,7 +1506,7 @@ async def ensure_cosmos(): async def generate_title(conversation_messages): - ## make sure the messages are sorted by _ts descending + # make sure the messages are sorted by _ts descending title_prompt = 'Summarize the conversation so far into a 4-word or less title. Do not use any quotation marks or punctuation. Respond with a json object in the format {{"title": string}}. Do not include any other commentary or description.' messages = [ @@ -1527,29 +1523,31 @@ async def generate_title(conversation_messages): title = json.loads(response.choices[0].message.content)["title"] return title - except Exception as e: + except Exception: return messages[-2]["content"] - -@bp.route("/api/pbi", methods=['GET']) + + +@bp.route("/api/pbi", methods=["GET"]) def get_pbiurl(): return VITE_POWERBI_EMBED_URL - -@bp.route("/api/users", methods=['GET']) + + +@bp.route("/api/users", methods=["GET"]) def get_users(): - conn = None + conn = None try: conn = get_connection() cursor = conn.cursor() sql_stmt = """ - SELECT - ClientId, - Client, - Email, + SELECT + ClientId, + Client, + Email, FORMAT(AssetValue, 'N0') AS AssetValue, ClientSummary, CAST(LastMeeting AS DATE) AS LastMeetingDate, FORMAT(CAST(LastMeeting AS DATE), 'dddd MMMM d, yyyy') AS LastMeetingDateFormatted, -      FORMAT(LastMeeting, 'hh:mm tt') AS LastMeetingStartTime, + FORMAT(LastMeeting, 'hh:mm tt') AS LastMeetingStartTime, FORMAT(LastMeetingEnd, 'hh:mm tt') AS LastMeetingEndTime, CAST(NextMeeting AS DATE) AS NextMeetingDate, FORMAT(CAST(NextMeeting AS DATE), 'dddd MMMM d, yyyy') AS NextMeetingFormatted, @@ -1573,7 +1571,7 @@ def get_users(): JOIN ClientSummaries cs ON c.ClientId = cs.ClientId ) ca JOIN ( - SELECT cm.ClientId, + SELECT cm.ClientId, MAX(CASE WHEN StartTime < GETDATE() THEN StartTime END) AS LastMeeting, DATEADD(MINUTE, 30, MAX(CASE WHEN StartTime < GETDATE() THEN StartTime END)) AS LastMeetingEnd, MIN(CASE WHEN StartTime > GETDATE() AND StartTime < GETDATE() + 7 THEN StartTime END) AS NextMeeting, @@ -1589,22 +1587,27 @@ def get_users(): rows = cursor.fetchall() if len(rows) <= 6: - #update ClientMeetings,Assets,Retirement tables sample data to current date + # update ClientMeetings,Assets,Retirement tables sample data to current date cursor = conn.cursor() - cursor.execute("""select DATEDIFF(d,CAST(max(StartTime) AS Date),CAST(GETDATE() AS Date)) + 3 as ndays from ClientMeetings""") + cursor.execute( + """select DATEDIFF(d,CAST(max(StartTime) AS Date),CAST(GETDATE() AS Date)) + 3 as ndays from ClientMeetings""" + ) rows = cursor.fetchall() + ndays = 0 for row in rows: - ndays = row['ndays'] - sql_stmt1 = f'UPDATE ClientMeetings SET StartTime = dateadd(day,{ndays},StartTime), EndTime = dateadd(day,{ndays},EndTime)' + ndays = row["ndays"] + sql_stmt1 = f"UPDATE ClientMeetings SET StartTime = dateadd(day,{ndays},StartTime), EndTime = dateadd(day,{ndays},EndTime)" cursor.execute(sql_stmt1) conn.commit() - nmonths = int(ndays/30) + nmonths = int(ndays / 30) if nmonths > 0: - sql_stmt1 = f'UPDATE Assets SET AssetDate = dateadd(MONTH,{nmonths},AssetDate)' + sql_stmt1 = ( + f"UPDATE Assets SET AssetDate = dateadd(MONTH,{nmonths},AssetDate)" + ) cursor.execute(sql_stmt1) conn.commit() - - sql_stmt1 = f'UPDATE Retirement SET StatusDate = dateadd(MONTH,{nmonths},StatusDate)' + + sql_stmt1 = f"UPDATE Retirement SET StatusDate = dateadd(MONTH,{nmonths},StatusDate)" cursor.execute(sql_stmt1) conn.commit() @@ -1616,29 +1619,29 @@ def get_users(): for row in rows: # print(row) user = { - 'ClientId': row['ClientId'], - 'ClientName': row['Client'], - 'ClientEmail': row['Email'], - 'AssetValue': row['AssetValue'], - 'NextMeeting': row['NextMeetingFormatted'], - 'NextMeetingTime': row['NextMeetingStartTime'], - 'NextMeetingEndTime': row['NextMeetingEndTime'], - 'LastMeeting': row['LastMeetingDateFormatted'], - 'LastMeetingStartTime': row['LastMeetingStartTime'], - 'LastMeetingEndTime': row['LastMeetingEndTime'], - 'ClientSummary': row['ClientSummary'] - } + "ClientId": row["ClientId"], + "ClientName": row["Client"], + "ClientEmail": row["Email"], + "AssetValue": row["AssetValue"], + "NextMeeting": row["NextMeetingFormatted"], + "NextMeetingTime": row["NextMeetingStartTime"], + "NextMeetingEndTime": row["NextMeetingEndTime"], + "LastMeeting": row["LastMeetingDateFormatted"], + "LastMeetingStartTime": row["LastMeetingStartTime"], + "LastMeetingEndTime": row["LastMeetingEndTime"], + "ClientSummary": row["ClientSummary"], + } users.append(user) # print(users) - + return jsonify(users) - - + except Exception as e: print("Exception occurred:", e) return str(e), 500 finally: if conn: conn.close() - + + app = create_app() diff --git a/ClientAdvisor/App/backend/auth/auth_utils.py b/ClientAdvisor/App/backend/auth/auth_utils.py index 3a97e610..31e01dff 100644 --- a/ClientAdvisor/App/backend/auth/auth_utils.py +++ b/ClientAdvisor/App/backend/auth/auth_utils.py @@ -2,38 +2,41 @@ import json import logging + def get_authenticated_user_details(request_headers): user_object = {} - ## check the headers for the Principal-Id (the guid of the signed in user) + # check the headers for the Principal-Id (the guid of the signed in user) if "X-Ms-Client-Principal-Id" not in request_headers.keys(): - ## if it's not, assume we're in development mode and return a default user + # if it's not, assume we're in development mode and return a default user from . import sample_user + raw_user_object = sample_user.sample_user else: - ## if it is, get the user details from the EasyAuth headers - raw_user_object = {k:v for k,v in request_headers.items()} + # if it is, get the user details from the EasyAuth headers + raw_user_object = {k: v for k, v in request_headers.items()} - user_object['user_principal_id'] = raw_user_object.get('X-Ms-Client-Principal-Id') - user_object['user_name'] = raw_user_object.get('X-Ms-Client-Principal-Name') - user_object['auth_provider'] = raw_user_object.get('X-Ms-Client-Principal-Idp') - user_object['auth_token'] = raw_user_object.get('X-Ms-Token-Aad-Id-Token') - user_object['client_principal_b64'] = raw_user_object.get('X-Ms-Client-Principal') - user_object['aad_id_token'] = raw_user_object.get('X-Ms-Token-Aad-Id-Token') + user_object["user_principal_id"] = raw_user_object.get("X-Ms-Client-Principal-Id") + user_object["user_name"] = raw_user_object.get("X-Ms-Client-Principal-Name") + user_object["auth_provider"] = raw_user_object.get("X-Ms-Client-Principal-Idp") + user_object["auth_token"] = raw_user_object.get("X-Ms-Token-Aad-Id-Token") + user_object["client_principal_b64"] = raw_user_object.get("X-Ms-Client-Principal") + user_object["aad_id_token"] = raw_user_object.get("X-Ms-Token-Aad-Id-Token") return user_object + def get_tenantid(client_principal_b64): - tenant_id = '' - if client_principal_b64: + tenant_id = "" + if client_principal_b64: try: # Decode the base64 header to get the JSON string decoded_bytes = base64.b64decode(client_principal_b64) - decoded_string = decoded_bytes.decode('utf-8') + decoded_string = decoded_bytes.decode("utf-8") # Convert the JSON string1into a Python dictionary user_info = json.loads(decoded_string) # Extract the tenant ID - tenant_id = user_info.get('tid') # 'tid' typically holds the tenant ID + tenant_id = user_info.get("tid") # 'tid' typically holds the tenant ID except Exception as ex: logging.exception(ex) - return tenant_id \ No newline at end of file + return tenant_id diff --git a/ClientAdvisor/App/backend/auth/sample_user.py b/ClientAdvisor/App/backend/auth/sample_user.py index 0b10d9ab..9353bcc1 100644 --- a/ClientAdvisor/App/backend/auth/sample_user.py +++ b/ClientAdvisor/App/backend/auth/sample_user.py @@ -1,39 +1,39 @@ sample_user = { - "Accept": "*/*", - "Accept-Encoding": "gzip, deflate, br", - "Accept-Language": "en", - "Client-Ip": "22.222.222.2222:64379", - "Content-Length": "192", - "Content-Type": "application/json", - "Cookie": "AppServiceAuthSession=/AuR5ENU+pmpoN3jnymP8fzpmVBgphx9uPQrYLEWGcxjIITIeh8NZW7r3ePkG8yBcMaItlh1pX4nzg5TFD9o2mxC/5BNDRe/uuu0iDlLEdKecROZcVRY7QsFdHLjn9KB90Z3d9ZeLwfVIf0sZowWJt03BO5zKGB7vZgL+ofv3QY3AaYn1k1GtxSE9HQWJpWar7mOA64b7Lsy62eY3nxwg3AWDsP3/rAta+MnDCzpdlZMFXcJLj+rsCppW+w9OqGhKQ7uCs03BPeon3qZOdmE8cOJW3+i96iYlhneNQDItHyQqEi1CHbBTSkqwpeOwWP4vcwGM22ynxPp7YFyiRw/X361DGYy+YkgYBkXq1AEIDZ44BCBz9EEaEi0NU+m6yUOpNjEaUtrJKhQywcM2odojdT4XAY+HfTEfSqp0WiAkgAuE/ueCu2JDOfvxGjCgJ4DGWCoYdOdXAN1c+MenT4OSvkMO41YuPeah9qk9ixkJI5s80lv8rUu1J26QF6pstdDkYkAJAEra3RQiiO1eAH7UEb3xHXn0HW5lX8ZDX3LWiAFGOt5DIKxBKFymBKJGzbPFPYjfczegu0FD8/NQPLl2exAX3mI9oy/tFnATSyLO2E8DxwP5wnYVminZOQMjB/I4g3Go14betm0MlNXlUbU1fyS6Q6JxoCNLDZywCoU9Y65UzimWZbseKsXlOwYukCEpuQ5QPT55LuEAWhtYier8LSh+fvVUsrkqKS+bg0hzuoX53X6aqUr7YB31t0Z2zt5TT/V3qXpdyD8Xyd884PqysSkJYa553sYx93ETDKSsfDguanVfn2si9nvDpvUWf6/R02FmQgXiaaaykMgYyIuEmE77ptsivjH3hj/MN4VlePFWokcchF4ciqqzonmICmjEHEx5zpjU2Kwa+0y7J5ROzVVygcnO1jH6ZKDy9bGGYL547bXx/iiYBYqSIQzleOAkCeULrGN2KEHwckX5MpuRaqTpoxdZH9RJv0mIWxbDA0kwGsbMICQd0ZODBkPUnE84qhzvXInC+TL7MbutPEnGbzgxBAS1c2Ct4vxkkjykOeOxTPxqAhxoefwUfIwZZax6A9LbeYX2bsBpay0lScHcA==", - "Disguised-Host": "your_app_service.azurewebsites.net", - "Host": "your_app_service.azurewebsites.net", - "Max-Forwards": "10", - "Origin": "https://your_app_service.azurewebsites.net", - "Referer": "https://your_app_service.azurewebsites.net/", - "Sec-Ch-Ua": "\"Microsoft Edge\";v=\"113\", \"Chromium\";v=\"113\", \"Not-A.Brand\";v=\"24\"", - "Sec-Ch-Ua-Mobile": "?0", - "Sec-Ch-Ua-Platform": "\"Windows\"", - "Sec-Fetch-Dest": "empty", - "Sec-Fetch-Mode": "cors", - "Sec-Fetch-Site": "same-origin", - "Traceparent": "00-24e9a8d1b06f233a3f1714845ef971a9-3fac69f81ca5175c-00", - "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/113.0.0.0 Safari/537.36 Edg/113.0.1774.42", - "Was-Default-Hostname": "your_app_service.azurewebsites.net", - "X-Appservice-Proto": "https", - "X-Arr-Log-Id": "4102b832-6c88-4c7c-8996-0edad9e4358f", - "X-Arr-Ssl": "2048|256|CN=Microsoft Azure TLS Issuing CA 02, O=Microsoft Corporation, C=US|CN=*.azurewebsites.net, O=Microsoft Corporation, L=Redmond, S=WA, C=US", - "X-Client-Ip": "22.222.222.222", - "X-Client-Port": "64379", - "X-Forwarded-For": "22.222.222.22:64379", - "X-Forwarded-Proto": "https", - "X-Forwarded-Tlsversion": "1.2", - "X-Ms-Client-Principal": "your_base_64_encoded_token", - "X-Ms-Client-Principal-Id": "00000000-0000-0000-0000-000000000000", - "X-Ms-Client-Principal-Idp": "aad", - "X-Ms-Client-Principal-Name": "testusername@constoso.com", - "X-Ms-Token-Aad-Id-Token": "your_aad_id_token", - "X-Original-Url": "/chatgpt", - "X-Site-Deployment-Id": "your_app_service", - "X-Waws-Unencoded-Url": "/chatgpt" + "Accept": "*/*", + "Accept-Encoding": "gzip, deflate, br", + "Accept-Language": "en", + "Client-Ip": "22.222.222.2222:64379", + "Content-Length": "192", + "Content-Type": "application/json", + "Cookie": "AppServiceAuthSession=/AuR5ENU+pmpoN3jnymP8fzpmVBgphx9uPQrYLEWGcxjIITIeh8NZW7r3ePkG8yBcMaItlh1pX4nzg5TFD9o2mxC/5BNDRe/uuu0iDlLEdKecROZcVRY7QsFdHLjn9KB90Z3d9ZeLwfVIf0sZowWJt03BO5zKGB7vZgL+ofv3QY3AaYn1k1GtxSE9HQWJpWar7mOA64b7Lsy62eY3nxwg3AWDsP3/rAta+MnDCzpdlZMFXcJLj+rsCppW+w9OqGhKQ7uCs03BPeon3qZOdmE8cOJW3+i96iYlhneNQDItHyQqEi1CHbBTSkqwpeOwWP4vcwGM22ynxPp7YFyiRw/X361DGYy+YkgYBkXq1AEIDZ44BCBz9EEaEi0NU+m6yUOpNjEaUtrJKhQywcM2odojdT4XAY+HfTEfSqp0WiAkgAuE/ueCu2JDOfvxGjCgJ4DGWCoYdOdXAN1c+MenT4OSvkMO41YuPeah9qk9ixkJI5s80lv8rUu1J26QF6pstdDkYkAJAEra3RQiiO1eAH7UEb3xHXn0HW5lX8ZDX3LWiAFGOt5DIKxBKFymBKJGzbPFPYjfczegu0FD8/NQPLl2exAX3mI9oy/tFnATSyLO2E8DxwP5wnYVminZOQMjB/I4g3Go14betm0MlNXlUbU1fyS6Q6JxoCNLDZywCoU9Y65UzimWZbseKsXlOwYukCEpuQ5QPT55LuEAWhtYier8LSh+fvVUsrkqKS+bg0hzuoX53X6aqUr7YB31t0Z2zt5TT/V3qXpdyD8Xyd884PqysSkJYa553sYx93ETDKSsfDguanVfn2si9nvDpvUWf6/R02FmQgXiaaaykMgYyIuEmE77ptsivjH3hj/MN4VlePFWokcchF4ciqqzonmICmjEHEx5zpjU2Kwa+0y7J5ROzVVygcnO1jH6ZKDy9bGGYL547bXx/iiYBYqSIQzleOAkCeULrGN2KEHwckX5MpuRaqTpoxdZH9RJv0mIWxbDA0kwGsbMICQd0ZODBkPUnE84qhzvXInC+TL7MbutPEnGbzgxBAS1c2Ct4vxkkjykOeOxTPxqAhxoefwUfIwZZax6A9LbeYX2bsBpay0lScHcA==", + "Disguised-Host": "your_app_service.azurewebsites.net", + "Host": "your_app_service.azurewebsites.net", + "Max-Forwards": "10", + "Origin": "https://your_app_service.azurewebsites.net", + "Referer": "https://your_app_service.azurewebsites.net/", + "Sec-Ch-Ua": '"Microsoft Edge";v="113", "Chromium";v="113", "Not-A.Brand";v="24"', + "Sec-Ch-Ua-Mobile": "?0", + "Sec-Ch-Ua-Platform": '"Windows"', + "Sec-Fetch-Dest": "empty", + "Sec-Fetch-Mode": "cors", + "Sec-Fetch-Site": "same-origin", + "Traceparent": "00-24e9a8d1b06f233a3f1714845ef971a9-3fac69f81ca5175c-00", + "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/113.0.0.0 Safari/537.36 Edg/113.0.1774.42", + "Was-Default-Hostname": "your_app_service.azurewebsites.net", + "X-Appservice-Proto": "https", + "X-Arr-Log-Id": "4102b832-6c88-4c7c-8996-0edad9e4358f", + "X-Arr-Ssl": "2048|256|CN=Microsoft Azure TLS Issuing CA 02, O=Microsoft Corporation, C=US|CN=*.azurewebsites.net, O=Microsoft Corporation, L=Redmond, S=WA, C=US", + "X-Client-Ip": "22.222.222.222", + "X-Client-Port": "64379", + "X-Forwarded-For": "22.222.222.22:64379", + "X-Forwarded-Proto": "https", + "X-Forwarded-Tlsversion": "1.2", + "X-Ms-Client-Principal": "your_base_64_encoded_token", + "X-Ms-Client-Principal-Id": "00000000-0000-0000-0000-000000000000", + "X-Ms-Client-Principal-Idp": "aad", + "X-Ms-Client-Principal-Name": "testusername@constoso.com", + "X-Ms-Token-Aad-Id-Token": "your_aad_id_token", + "X-Original-Url": "/chatgpt", + "X-Site-Deployment-Id": "your_app_service", + "X-Waws-Unencoded-Url": "/chatgpt", } diff --git a/ClientAdvisor/App/backend/history/cosmosdbservice.py b/ClientAdvisor/App/backend/history/cosmosdbservice.py index 737c23d9..70c2df5b 100644 --- a/ClientAdvisor/App/backend/history/cosmosdbservice.py +++ b/ClientAdvisor/App/backend/history/cosmosdbservice.py @@ -1,18 +1,29 @@ import uuid from datetime import datetime -from azure.cosmos.aio import CosmosClient + from azure.cosmos import exceptions - -class CosmosConversationClient(): - - def __init__(self, cosmosdb_endpoint: str, credential: any, database_name: str, container_name: str, enable_message_feedback: bool = False): +from azure.cosmos.aio import CosmosClient + + +class CosmosConversationClient: + + def __init__( + self, + cosmosdb_endpoint: str, + credential: any, + database_name: str, + container_name: str, + enable_message_feedback: bool = False, + ): self.cosmosdb_endpoint = cosmosdb_endpoint self.credential = credential self.database_name = database_name self.container_name = container_name self.enable_message_feedback = enable_message_feedback try: - self.cosmosdb_client = CosmosClient(self.cosmosdb_endpoint, credential=credential) + self.cosmosdb_client = CosmosClient( + self.cosmosdb_endpoint, credential=credential + ) except exceptions.CosmosHttpResponseError as e: if e.status_code == 401: raise ValueError("Invalid credentials") from e @@ -20,48 +31,58 @@ def __init__(self, cosmosdb_endpoint: str, credential: any, database_name: str, raise ValueError("Invalid CosmosDB endpoint") from e try: - self.database_client = self.cosmosdb_client.get_database_client(database_name) + self.database_client = self.cosmosdb_client.get_database_client( + database_name + ) except exceptions.CosmosResourceNotFoundError: - raise ValueError("Invalid CosmosDB database name") - + raise ValueError("Invalid CosmosDB database name") + try: - self.container_client = self.database_client.get_container_client(container_name) + self.container_client = self.database_client.get_container_client( + container_name + ) except exceptions.CosmosResourceNotFoundError: - raise ValueError("Invalid CosmosDB container name") - + raise ValueError("Invalid CosmosDB container name") async def ensure(self): - if not self.cosmosdb_client or not self.database_client or not self.container_client: + if ( + not self.cosmosdb_client + or not self.database_client + or not self.container_client + ): return False, "CosmosDB client not initialized correctly" - + try: - database_info = await self.database_client.read() - except: - return False, f"CosmosDB database {self.database_name} on account {self.cosmosdb_endpoint} not found" - + await self.database_client.read() + except Exception: + return ( + False, + f"CosmosDB database {self.database_name} on account {self.cosmosdb_endpoint} not found", + ) + try: - container_info = await self.container_client.read() - except: + await self.container_client.read() + except Exception: return False, f"CosmosDB container {self.container_name} not found" - + return True, "CosmosDB client initialized successfully" - async def create_conversation(self, user_id, title = ''): + async def create_conversation(self, user_id, title=""): conversation = { - 'id': str(uuid.uuid4()), - 'type': 'conversation', - 'createdAt': datetime.utcnow().isoformat(), - 'updatedAt': datetime.utcnow().isoformat(), - 'userId': user_id, - 'title': title + "id": str(uuid.uuid4()), + "type": "conversation", + "createdAt": datetime.utcnow().isoformat(), + "updatedAt": datetime.utcnow().isoformat(), + "userId": user_id, + "title": title, } - ## TODO: add some error handling based on the output of the upsert_item call - resp = await self.container_client.upsert_item(conversation) + # TODO: add some error handling based on the output of the upsert_item call + resp = await self.container_client.upsert_item(conversation) if resp: return resp else: return False - + async def upsert_conversation(self, conversation): resp = await self.container_client.upsert_item(conversation) if resp: @@ -70,95 +91,94 @@ async def upsert_conversation(self, conversation): return False async def delete_conversation(self, user_id, conversation_id): - conversation = await self.container_client.read_item(item=conversation_id, partition_key=user_id) + conversation = await self.container_client.read_item( + item=conversation_id, partition_key=user_id + ) if conversation: - resp = await self.container_client.delete_item(item=conversation_id, partition_key=user_id) + resp = await self.container_client.delete_item( + item=conversation_id, partition_key=user_id + ) return resp else: return True - async def delete_messages(self, conversation_id, user_id): - ## get a list of all the messages in the conversation + # get a list of all the messages in the conversation messages = await self.get_messages(user_id, conversation_id) response_list = [] if messages: for message in messages: - resp = await self.container_client.delete_item(item=message['id'], partition_key=user_id) + resp = await self.container_client.delete_item( + item=message["id"], partition_key=user_id + ) response_list.append(resp) return response_list - - async def get_conversations(self, user_id, limit, sort_order = 'DESC', offset = 0): - parameters = [ - { - 'name': '@userId', - 'value': user_id - } - ] + async def get_conversations(self, user_id, limit, sort_order="DESC", offset=0): + parameters = [{"name": "@userId", "value": user_id}] query = f"SELECT * FROM c where c.userId = @userId and c.type='conversation' order by c.updatedAt {sort_order}" if limit is not None: - query += f" offset {offset} limit {limit}" - + query += f" offset {offset} limit {limit}" + conversations = [] - async for item in self.container_client.query_items(query=query, parameters=parameters): + async for item in self.container_client.query_items( + query=query, parameters=parameters + ): conversations.append(item) - + return conversations async def get_conversation(self, user_id, conversation_id): parameters = [ - { - 'name': '@conversationId', - 'value': conversation_id - }, - { - 'name': '@userId', - 'value': user_id - } + {"name": "@conversationId", "value": conversation_id}, + {"name": "@userId", "value": user_id}, ] - query = f"SELECT * FROM c where c.id = @conversationId and c.type='conversation' and c.userId = @userId" + query = "SELECT * FROM c where c.id = @conversationId and c.type='conversation' and c.userId = @userId" conversations = [] - async for item in self.container_client.query_items(query=query, parameters=parameters): + async for item in self.container_client.query_items( + query=query, parameters=parameters + ): conversations.append(item) - ## if no conversations are found, return None + # if no conversations are found, return None if len(conversations) == 0: return None else: return conversations[0] - + async def create_message(self, uuid, conversation_id, user_id, input_message: dict): message = { - 'id': uuid, - 'type': 'message', - 'userId' : user_id, - 'createdAt': datetime.utcnow().isoformat(), - 'updatedAt': datetime.utcnow().isoformat(), - 'conversationId' : conversation_id, - 'role': input_message['role'], - 'content': input_message['content'] + "id": uuid, + "type": "message", + "userId": user_id, + "createdAt": datetime.utcnow().isoformat(), + "updatedAt": datetime.utcnow().isoformat(), + "conversationId": conversation_id, + "role": input_message["role"], + "content": input_message["content"], } if self.enable_message_feedback: - message['feedback'] = '' - - resp = await self.container_client.upsert_item(message) + message["feedback"] = "" + + resp = await self.container_client.upsert_item(message) if resp: - ## update the parent conversations's updatedAt field with the current message's createdAt datetime value + # update the parent conversations's updatedAt field with the current message's createdAt datetime value conversation = await self.get_conversation(user_id, conversation_id) if not conversation: return "Conversation not found" - conversation['updatedAt'] = message['createdAt'] + conversation["updatedAt"] = message["createdAt"] await self.upsert_conversation(conversation) return resp else: return False - + async def update_message_feedback(self, user_id, message_id, feedback): - message = await self.container_client.read_item(item=message_id, partition_key=user_id) + message = await self.container_client.read_item( + item=message_id, partition_key=user_id + ) if message: - message['feedback'] = feedback + message["feedback"] = feedback resp = await self.container_client.upsert_item(message) return resp else: @@ -166,19 +186,14 @@ async def update_message_feedback(self, user_id, message_id, feedback): async def get_messages(self, user_id, conversation_id): parameters = [ - { - 'name': '@conversationId', - 'value': conversation_id - }, - { - 'name': '@userId', - 'value': user_id - } + {"name": "@conversationId", "value": conversation_id}, + {"name": "@userId", "value": user_id}, ] - query = f"SELECT * FROM c WHERE c.conversationId = @conversationId AND c.type='message' AND c.userId = @userId ORDER BY c.timestamp ASC" + query = "SELECT * FROM c WHERE c.conversationId = @conversationId AND c.type='message' AND c.userId = @userId ORDER BY c.timestamp ASC" messages = [] - async for item in self.container_client.query_items(query=query, parameters=parameters): + async for item in self.container_client.query_items( + query=query, parameters=parameters + ): messages.append(item) return messages - diff --git a/ClientAdvisor/App/backend/utils.py b/ClientAdvisor/App/backend/utils.py index 5c53bd00..4c7511d4 100644 --- a/ClientAdvisor/App/backend/utils.py +++ b/ClientAdvisor/App/backend/utils.py @@ -1,8 +1,9 @@ -import os +import dataclasses import json import logging +import os + import requests -import dataclasses DEBUG = os.environ.get("DEBUG", "false") if DEBUG.lower() == "true": @@ -104,6 +105,7 @@ def format_non_streaming_response(chatCompletion, history_metadata, apim_request return {} + def format_stream_response(chatCompletionChunk, history_metadata, apim_request_id): response_obj = { "id": chatCompletionChunk.id, @@ -142,7 +144,11 @@ def format_stream_response(chatCompletionChunk, history_metadata, apim_request_i def format_pf_non_streaming_response( - chatCompletion, history_metadata, response_field_name, citations_field_name, message_uuid=None + chatCompletion, + history_metadata, + response_field_name, + citations_field_name, + message_uuid=None, ): if chatCompletion is None: logging.error( @@ -159,15 +165,13 @@ def format_pf_non_streaming_response( try: messages = [] if response_field_name in chatCompletion: - messages.append({ - "role": "assistant", - "content": chatCompletion[response_field_name] - }) + messages.append( + {"role": "assistant", "content": chatCompletion[response_field_name]} + ) if citations_field_name in chatCompletion: - messages.append({ - "role": "tool", - "content": chatCompletion[citations_field_name] - }) + messages.append( + {"role": "tool", "content": chatCompletion[citations_field_name]} + ) response_obj = { "id": chatCompletion["id"], "model": "", @@ -178,7 +182,7 @@ def format_pf_non_streaming_response( "messages": messages, "history_metadata": history_metadata, } - ] + ], } return response_obj except Exception as e: diff --git a/ClientAdvisor/App/db.py b/ClientAdvisor/App/db.py index 03de12ff..536bb940 100644 --- a/ClientAdvisor/App/db.py +++ b/ClientAdvisor/App/db.py @@ -1,23 +1,20 @@ # db.py import os + import pymssql from dotenv import load_dotenv load_dotenv() -server = os.environ.get('SQLDB_SERVER') -database = os.environ.get('SQLDB_DATABASE') -username = os.environ.get('SQLDB_USERNAME') -password = os.environ.get('SQLDB_PASSWORD') +server = os.environ.get("SQLDB_SERVER") +database = os.environ.get("SQLDB_DATABASE") +username = os.environ.get("SQLDB_USERNAME") +password = os.environ.get("SQLDB_PASSWORD") + def get_connection(): conn = pymssql.connect( - server=server, - user=username, - password=password, - database=database, - as_dict=True - ) + server=server, user=username, password=password, database=database, as_dict=True + ) return conn - \ No newline at end of file diff --git a/ClientAdvisor/App/requirements-dev.txt b/ClientAdvisor/App/requirements-dev.txt index b4eac12d..c652390e 100644 --- a/ClientAdvisor/App/requirements-dev.txt +++ b/ClientAdvisor/App/requirements-dev.txt @@ -12,3 +12,9 @@ gunicorn==20.1.0 quart-session==3.0.0 pymssql==2.3.0 httpx==0.27.0 +pytest-asyncio==0.24.0 +pytest-cov==5.0.0 +flake8==7.1.1 +black==24.8.0 +autoflake==2.3.1 +isort==5.13.2 \ No newline at end of file diff --git a/ClientAdvisor/App/requirements.txt b/ClientAdvisor/App/requirements.txt index a921be2a..ce2e28e6 100644 --- a/ClientAdvisor/App/requirements.txt +++ b/ClientAdvisor/App/requirements.txt @@ -12,3 +12,9 @@ gunicorn==20.1.0 quart-session==3.0.0 pymssql==2.3.0 httpx==0.27.0 +pytest-asyncio==0.24.0 +pytest-cov==5.0.0 +flake8==7.1.1 +black==24.8.0 +autoflake==2.3.1 +isort==5.13.2 \ No newline at end of file diff --git a/ClientAdvisor/App/test_app.py b/ClientAdvisor/App/test_app.py deleted file mode 100644 index f50d3fc5..00000000 --- a/ClientAdvisor/App/test_app.py +++ /dev/null @@ -1,6 +0,0 @@ -from app import format_as_ndjson - - -def test_format_as_ndjson(): - obj = {"message": "I ❤️ 🐍 \n and escaped newlines"} - assert format_as_ndjson(obj) == '{"message": "I ❤️ 🐍 \\n and escaped newlines"}\n' diff --git a/ClientAdvisor/App/tests/backend/auth/test_auth.py b/ClientAdvisor/App/tests/backend/auth/test_auth.py new file mode 100644 index 00000000..1adf323d --- /dev/null +++ b/ClientAdvisor/App/tests/backend/auth/test_auth.py @@ -0,0 +1,66 @@ +import base64 +import json +from unittest.mock import patch + +from backend.auth.auth_utils import (get_authenticated_user_details, + get_tenantid) + + +def test_get_authenticated_user_details_no_principal_id(): + request_headers = {} + sample_user_data = { + "X-Ms-Client-Principal-Id": "default-id", + "X-Ms-Client-Principal-Name": "default-name", + "X-Ms-Client-Principal-Idp": "default-idp", + "X-Ms-Token-Aad-Id-Token": "default-token", + "X-Ms-Client-Principal": "default-b64", + } + with patch("backend.auth.sample_user.sample_user", sample_user_data): + user_details = get_authenticated_user_details(request_headers) + assert user_details["user_principal_id"] == "default-id" + assert user_details["user_name"] == "default-name" + assert user_details["auth_provider"] == "default-idp" + assert user_details["auth_token"] == "default-token" + assert user_details["client_principal_b64"] == "default-b64" + + +def test_get_authenticated_user_details_with_principal_id(): + request_headers = { + "X-Ms-Client-Principal-Id": "test-id", + "X-Ms-Client-Principal-Name": "test-name", + "X-Ms-Client-Principal-Idp": "test-idp", + "X-Ms-Token-Aad-Id-Token": "test-token", + "X-Ms-Client-Principal": "test-b64", + } + user_details = get_authenticated_user_details(request_headers) + assert user_details["user_principal_id"] == "test-id" + assert user_details["user_name"] == "test-name" + assert user_details["auth_provider"] == "test-idp" + assert user_details["auth_token"] == "test-token" + assert user_details["client_principal_b64"] == "test-b64" + + +def test_get_tenantid_valid_b64(): + user_info = {"tid": "test-tenant-id"} + client_principal_b64 = base64.b64encode( + json.dumps(user_info).encode("utf-8") + ).decode("utf-8") + tenant_id = get_tenantid(client_principal_b64) + assert tenant_id == "test-tenant-id" + + +def test_get_tenantid_invalid_b64(): + client_principal_b64 = "invalid-b64" + with patch("backend.auth.auth_utils.logging") as mock_logging: + tenant_id = get_tenantid(client_principal_b64) + assert tenant_id == "" + mock_logging.exception.assert_called_once() + + +def test_get_tenantid_no_tid(): + user_info = {"some_other_key": "value"} + client_principal_b64 = base64.b64encode( + json.dumps(user_info).encode("utf-8") + ).decode("utf-8") + tenant_id = get_tenantid(client_principal_b64) + assert tenant_id is None diff --git a/ClientAdvisor/App/tests/backend/history/test_cosmosdb_service.py b/ClientAdvisor/App/tests/backend/history/test_cosmosdb_service.py new file mode 100644 index 00000000..ff0a51e5 --- /dev/null +++ b/ClientAdvisor/App/tests/backend/history/test_cosmosdb_service.py @@ -0,0 +1,184 @@ +from unittest.mock import AsyncMock, MagicMock, patch + +import pytest +from azure.cosmos import exceptions + +from backend.history.cosmosdbservice import CosmosConversationClient + + +# Helper function to create an async iterable +class AsyncIterator: + def __init__(self, items): + self.items = items + self.index = 0 + + def __aiter__(self): + return self + + async def __anext__(self): + if self.index < len(self.items): + item = self.items[self.index] + self.index += 1 + return item + else: + raise StopAsyncIteration + + +@pytest.fixture +def cosmos_client(): + return CosmosConversationClient( + cosmosdb_endpoint="https://fake.endpoint", + credential="fake_credential", + database_name="test_db", + container_name="test_container", + ) + + +@pytest.mark.asyncio +async def test_init_invalid_credentials(): + with patch( + "azure.cosmos.aio.CosmosClient.__init__", + side_effect=exceptions.CosmosHttpResponseError( + status_code=401, message="Unauthorized" + ), + ): + with pytest.raises(ValueError, match="Invalid credentials"): + CosmosConversationClient( + cosmosdb_endpoint="https://fake.endpoint", + credential="fake_credential", + database_name="test_db", + container_name="test_container", + ) + + +@pytest.mark.asyncio +async def test_init_invalid_endpoint(): + with patch( + "azure.cosmos.aio.CosmosClient.__init__", + side_effect=exceptions.CosmosHttpResponseError( + status_code=404, message="Not Found" + ), + ): + with pytest.raises(ValueError, match="Invalid CosmosDB endpoint"): + CosmosConversationClient( + cosmosdb_endpoint="https://fake.endpoint", + credential="fake_credential", + database_name="test_db", + container_name="test_container", + ) + + +@pytest.mark.asyncio +async def test_ensure_success(cosmos_client): + cosmos_client.database_client.read = AsyncMock() + cosmos_client.container_client.read = AsyncMock() + success, message = await cosmos_client.ensure() + assert success + assert message == "CosmosDB client initialized successfully" + + +@pytest.mark.asyncio +async def test_ensure_failure(cosmos_client): + cosmos_client.database_client.read = AsyncMock(side_effect=Exception) + success, message = await cosmos_client.ensure() + assert not success + assert "CosmosDB database" in message + + +@pytest.mark.asyncio +async def test_create_conversation(cosmos_client): + cosmos_client.container_client.upsert_item = AsyncMock(return_value={"id": "123"}) + response = await cosmos_client.create_conversation("user_1", "Test Conversation") + assert response["id"] == "123" + + +@pytest.mark.asyncio +async def test_create_conversation_failure(cosmos_client): + cosmos_client.container_client.upsert_item = AsyncMock(return_value=None) + response = await cosmos_client.create_conversation("user_1", "Test Conversation") + assert not response + + +@pytest.mark.asyncio +async def test_upsert_conversation(cosmos_client): + cosmos_client.container_client.upsert_item = AsyncMock(return_value={"id": "123"}) + response = await cosmos_client.upsert_conversation({"id": "123"}) + assert response["id"] == "123" + + +@pytest.mark.asyncio +async def test_delete_conversation(cosmos_client): + cosmos_client.container_client.read_item = AsyncMock(return_value={"id": "123"}) + cosmos_client.container_client.delete_item = AsyncMock(return_value=True) + response = await cosmos_client.delete_conversation("user_1", "123") + assert response + + +@pytest.mark.asyncio +async def test_delete_conversation_not_found(cosmos_client): + cosmos_client.container_client.read_item = AsyncMock(return_value=None) + response = await cosmos_client.delete_conversation("user_1", "123") + assert response + + +@pytest.mark.asyncio +async def test_delete_messages(cosmos_client): + cosmos_client.get_messages = AsyncMock( + return_value=[{"id": "msg_1"}, {"id": "msg_2"}] + ) + cosmos_client.container_client.delete_item = AsyncMock(return_value=True) + response = await cosmos_client.delete_messages("conv_1", "user_1") + assert len(response) == 2 + + +@pytest.mark.asyncio +async def test_get_conversations(cosmos_client): + items = [{"id": "conv_1"}, {"id": "conv_2"}] + cosmos_client.container_client.query_items = MagicMock( + return_value=AsyncIterator(items) + ) + response = await cosmos_client.get_conversations("user_1", 10) + assert len(response) == 2 + assert response[0]["id"] == "conv_1" + assert response[1]["id"] == "conv_2" + + +@pytest.mark.asyncio +async def test_get_conversation(cosmos_client): + items = [{"id": "conv_1"}] + cosmos_client.container_client.query_items = MagicMock( + return_value=AsyncIterator(items) + ) + response = await cosmos_client.get_conversation("user_1", "conv_1") + assert response["id"] == "conv_1" + + +@pytest.mark.asyncio +async def test_create_message(cosmos_client): + cosmos_client.container_client.upsert_item = AsyncMock(return_value={"id": "msg_1"}) + cosmos_client.get_conversation = AsyncMock(return_value={"id": "conv_1"}) + cosmos_client.upsert_conversation = AsyncMock() + response = await cosmos_client.create_message( + "msg_1", "conv_1", "user_1", {"role": "user", "content": "Hello"} + ) + assert response["id"] == "msg_1" + + +@pytest.mark.asyncio +async def test_update_message_feedback(cosmos_client): + cosmos_client.container_client.read_item = AsyncMock(return_value={"id": "msg_1"}) + cosmos_client.container_client.upsert_item = AsyncMock(return_value={"id": "msg_1"}) + response = await cosmos_client.update_message_feedback( + "user_1", "msg_1", "positive" + ) + assert response["id"] == "msg_1" + + +@pytest.mark.asyncio +async def test_get_messages(cosmos_client): + items = [{"id": "msg_1"}, {"id": "msg_2"}] + cosmos_client.container_client.query_items = MagicMock( + return_value=AsyncIterator(items) + ) + response = await cosmos_client.get_messages("user_1", "conv_1") + assert len(response) == 2 diff --git a/ClientAdvisor/App/tests/backend/test_utils.py b/ClientAdvisor/App/tests/backend/test_utils.py new file mode 100644 index 00000000..1585cd7f --- /dev/null +++ b/ClientAdvisor/App/tests/backend/test_utils.py @@ -0,0 +1,160 @@ +import dataclasses +import json +from unittest.mock import MagicMock, patch + +import pytest + +from backend.utils import (JSONEncoder, convert_to_pf_format, fetchUserGroups, + format_as_ndjson, format_non_streaming_response, + format_pf_non_streaming_response, + format_stream_response, generateFilterString, + parse_multi_columns) + + +@dataclasses.dataclass +class TestDataClass: + field1: int + field2: str + + +def test_json_encoder(): + obj = TestDataClass(1, "test") + encoded = json.dumps(obj, cls=JSONEncoder) + assert json.loads(encoded) == {"field1": 1, "field2": "test"} + + +# Test parse_multi_columns with edge cases +@pytest.mark.parametrize( + "input_str, expected", + [ + ("col1|col2|col3", ["col1", "col2", "col3"]), + ("col1,col2,col3", ["col1", "col2", "col3"]), + ("col1", ["col1"]), + ("", [""]), + ], +) +def test_parse_multi_columns(input_str, expected): + assert parse_multi_columns(input_str) == expected + + +@patch("app.requests.get") +def test_fetch_user_groups(mock_get): + mock_response = MagicMock() + mock_response.status_code = 200 + mock_response.json.return_value = {"value": [{"id": "group1"}]} + mock_get.return_value = mock_response + + user_groups = fetchUserGroups("fake_token") + assert user_groups == [{"id": "group1"}] + + # Test with nextLink + mock_response.json.return_value = { + "value": [{"id": "group1"}], + "@odata.nextLink": "next_link", + } + mock_get.side_effect = [mock_response, mock_response] + user_groups = fetchUserGroups("fake_token") + assert user_groups == [{"id": "group1"}, {"id": "group1"}] + + +@patch("backend.utils.fetchUserGroups") +@patch("backend.utils.AZURE_SEARCH_PERMITTED_GROUPS_COLUMN", "your_column") +def test_generate_filter_string(mock_fetch_user_groups): + mock_fetch_user_groups.return_value = [{"id": "group1"}, {"id": "group2"}] + filter_string = generateFilterString("fake_token") + assert filter_string == "your_column/any(g:search.in(g, 'group1, group2'))" + + +@pytest.mark.asyncio +async def test_format_as_ndjson(): + async def async_gen(): + yield {"event": "test"} + + r = async_gen() + result = [item async for item in format_as_ndjson(r)] + assert result == ['{"event": "test"}\n'] + + +def test_format_non_streaming_response(): + # Create a mock chatCompletion object with the necessary attributes + chatCompletion = MagicMock() + chatCompletion.id = "id" + chatCompletion.model = "model" + chatCompletion.created = "created" + chatCompletion.object = "object" + + # Create a mock choice object with a message attribute + choice = MagicMock() + choice.message = MagicMock() + choice.message.content = "content" + choice.message.context = {"key": "value"} + + # Assign the choice to the choices list + chatCompletion.choices = [choice] + + # Call the function with the mock object + response = format_non_streaming_response(chatCompletion, "history", "request_id") + + # Assert the response structure + assert response["id"] == "id" + assert response["choices"][0]["messages"][0]["content"] == '{"key": "value"}' + assert response["choices"][0]["messages"][1]["content"] == "content" + + +# Test format_stream_response with edge cases +def test_format_stream_response(): + # Create a mock chatCompletionChunk object with the necessary attributes + chatCompletionChunk = MagicMock() + chatCompletionChunk.id = "id" + chatCompletionChunk.model = "model" + chatCompletionChunk.created = "created" + chatCompletionChunk.object = "object" + + # Create a mock choice object with a delta attribute + choice = MagicMock() + choice.delta = MagicMock() + choice.delta.content = "content" + choice.delta.context = {"key": "value"} + choice.delta.role = "assistant" + + # Assign the choice to the choices list + chatCompletionChunk.choices = [choice] + + # Call the function with the mock object + response = format_stream_response(chatCompletionChunk, "history", "request_id") + + # Assert the response structure + assert response["id"] == "id" + assert response["choices"][0]["messages"][0]["content"] == '{"key": "value"}' + + +# Test format_pf_non_streaming_response with edge cases +def test_format_pf_non_streaming_response(): + chatCompletion = { + "id": "id", + "response_field": "response", + "citations_field": "citations", + } + response = format_pf_non_streaming_response( + chatCompletion, "history", "response_field", "citations_field" + ) + + assert response["choices"][0]["messages"][0]["content"] == "response" + assert response["choices"][0]["messages"][1]["content"] == "citations" + + +# Test convert_to_pf_format with edge cases +def test_convert_to_pf_format(): + input_json = { + "messages": [ + {"role": "user", "content": "user message"}, + {"role": "assistant", "content": "assistant message"}, + ] + } + output_json = convert_to_pf_format(input_json, "request_field", "response_field") + assert output_json == [ + { + "inputs": {"request_field": "user message"}, + "outputs": {"response_field": "assistant message"}, + } + ] diff --git a/ClientAdvisor/App/tests/test_app.py b/ClientAdvisor/App/tests/test_app.py new file mode 100644 index 00000000..c0202a5b --- /dev/null +++ b/ClientAdvisor/App/tests/test_app.py @@ -0,0 +1,1388 @@ +import json +from unittest.mock import AsyncMock, MagicMock, patch + +import pytest + +from app import (create_app, delete_all_conversations, generate_title, + init_cosmosdb_client, init_openai_client, stream_chat_request) + +# Constants for testing +INVALID_API_VERSION = "2022-01-01" +INVALID_API_KEY = None +CHAT_HISTORY_ENABLED = True +AZURE_COSMOSDB_ACCOUNT = "test_account" +AZURE_COSMOSDB_ACCOUNT_KEY = "test_key" +AZURE_COSMOSDB_DATABASE = "test_database" +AZURE_COSMOSDB_CONVERSATIONS_CONTAINER = "test_container" +AZURE_COSMOSDB_ENABLE_FEEDBACK = True + + +@pytest.fixture(autouse=True) +def set_env_vars(): + with patch("app.AZURE_OPENAI_PREVIEW_API_VERSION", "2024-02-15-preview"), patch( + "app.AZURE_OPENAI_ENDPOINT", "https://example.com/" + ), patch("app.AZURE_OPENAI_MODEL", "openai_model"), patch( + "app.CHAT_HISTORY_ENABLED", True + ), patch( + "app.AZURE_COSMOSDB_ACCOUNT", "test_account" + ), patch( + "app.AZURE_COSMOSDB_ACCOUNT_KEY", "test_key" + ), patch( + "app.AZURE_COSMOSDB_DATABASE", "test_database" + ), patch( + "app.AZURE_COSMOSDB_CONVERSATIONS_CONTAINER", "test_container" + ), patch( + "app.AZURE_COSMOSDB_ENABLE_FEEDBACK", True + ), patch( + "app.AZURE_OPENAI_KEY", "valid_key" + ): + yield + + +@pytest.fixture +def flaskapp(): + """Create a test client for the app.""" + return create_app() + + +@pytest.fixture +def client(flaskapp): + """Create a test client for the flaskapp.""" + return flaskapp.test_client() + + +def test_create_app(): + app = create_app() + assert app is not None + assert app.name == "app" + assert "routes" in app.blueprints + + +@patch("app.get_bearer_token_provider") +@patch("app.AsyncAzureOpenAI") +def test_init_openai_client(mock_async_openai, mock_token_provider): + mock_token_provider.return_value = MagicMock() + mock_async_openai.return_value = MagicMock() + + client = init_openai_client() + assert client is not None + mock_async_openai.assert_called_once() + + +@patch("app.CosmosConversationClient") +def test_init_cosmosdb_client(mock_cosmos_client): + mock_cosmos_client.return_value = MagicMock() + + client = init_cosmosdb_client() + assert client is not None + mock_cosmos_client.assert_called_once() + + +@pytest.mark.asyncio +@patch("app.render_template") +async def test_index(mock_render_template, client): + mock_render_template.return_value = "index" + response = await client.get("/") + assert response.status_code == 200 + mock_render_template.assert_called_once_with( + "index.html", title="Woodgrove Bank", favicon="/favicon.ico" + ) + + +@pytest.mark.asyncio +@patch("app.bp.send_static_file") +async def test_favicon(mock_send_static_file, client): + mock_send_static_file.return_value = "favicon" + response = await client.get("/favicon.ico") + assert response.status_code == 200 + mock_send_static_file.assert_called_once_with("favicon.ico") + + +@pytest.mark.asyncio +async def test_get_pbiurl(client): + with patch("app.VITE_POWERBI_EMBED_URL", "mocked_url"): + response = await client.get("/api/pbi") + res_text = await response.get_data(as_text=True) + assert response.status_code == 200 + assert res_text == "mocked_url" + + +@pytest.mark.asyncio +async def test_ensure_cosmos_not_configured(client): + with patch("app.AZURE_COSMOSDB_ACCOUNT", ""): + response = await client.get("/history/ensure") + res_text = await response.get_data(as_text=True) + assert response.status_code == 404 + assert json.loads(res_text) == {"error": "CosmosDB is not configured"} + + +@pytest.mark.asyncio +@patch("app.init_cosmosdb_client") +async def test_ensure_cosmos_success(mock_init_cosmosdb_client, client): + mock_client = AsyncMock() + mock_client.ensure.return_value = (True, None) + mock_init_cosmosdb_client.return_value = mock_client + + response = await client.get("/history/ensure") + res_text = await response.get_data(as_text=True) + assert response.status_code == 200 + assert json.loads(res_text) == {"message": "CosmosDB is configured and working"} + mock_client.cosmosdb_client.close.assert_called_once() + + +@pytest.mark.asyncio +@patch("app.init_cosmosdb_client") +async def test_ensure_cosmos_failure(mock_init_cosmosdb_client, client): + mock_client = AsyncMock() + mock_client.ensure.return_value = (False, "Some error") + mock_init_cosmosdb_client.return_value = mock_client + + response = await client.get("/history/ensure") + res_text = await response.get_data(as_text=True) + assert response.status_code == 422 + assert json.loads(res_text) == {"error": "Some error"} + + +@pytest.mark.asyncio +@patch("app.init_cosmosdb_client") +async def test_ensure_cosmos_exception(mock_init_cosmosdb_client, client): + mock_init_cosmosdb_client.side_effect = Exception("Invalid credentials") + + response = await client.get("/history/ensure") + assert response.status_code == 401 + res_text = await response.get_data(as_text=True) + assert json.loads(res_text) == {"error": "Invalid credentials"} + + +@pytest.mark.asyncio +@patch("app.init_cosmosdb_client") +async def test_ensure_cosmos_invalid_db_name(mock_init_cosmosdb_client, client): + with patch("app.AZURE_COSMOSDB_DATABASE", "your_db_name"), patch( + "app.AZURE_COSMOSDB_ACCOUNT", "your_account" + ): + mock_init_cosmosdb_client.side_effect = Exception( + "Invalid CosmosDB database name" + ) + + response = await client.get("/history/ensure") + assert response.status_code == 422 + res_text = await response.get_data(as_text=True) + assert json.loads(res_text) == { + "error": "Invalid CosmosDB database name your_db_name for account your_account" + } + + +@pytest.mark.asyncio +@patch("app.init_cosmosdb_client") +async def test_ensure_cosmos_invalid_container_name(mock_init_cosmosdb_client, client): + with patch("app.AZURE_COSMOSDB_CONVERSATIONS_CONTAINER", "your_container_name"): + mock_init_cosmosdb_client.side_effect = Exception( + "Invalid CosmosDB container name" + ) + + response = await client.get("/history/ensure") + assert response.status_code == 422 + res_text = await response.get_data(as_text=True) + assert json.loads(res_text) == { + "error": "Invalid CosmosDB container name: your_container_name" + } + + +@pytest.mark.asyncio +@patch("app.init_cosmosdb_client") +async def test_ensure_cosmos_generic_exception(mock_init_cosmosdb_client, client): + mock_init_cosmosdb_client.side_effect = Exception("Some other error") + + response = await client.get("/history/ensure") + assert response.status_code == 500 + res_text = await response.get_data(as_text=True) + assert json.loads(res_text) == {"error": "CosmosDB is not working"} + + +@pytest.mark.asyncio +async def test_get_users_success(client): + mock_conn = MagicMock() + mock_cursor = MagicMock() + mock_conn.cursor.return_value = mock_cursor + mock_cursor.fetchall.return_value = [ + { + "ClientId": 1, + "ndays": 10, + "Client": "Client A", + "Email": "clienta@example.com", + "AssetValue": "1,000,000", + "ClientSummary": "Summary A", + "LastMeetingDateFormatted": "Monday January 1, 2023", + "LastMeetingStartTime": "10:00 AM", + "LastMeetingEndTime": "10:30 AM", + "NextMeetingFormatted": "Monday January 8, 2023", + "NextMeetingStartTime": "11:00 AM", + "NextMeetingEndTime": "11:30 AM", + } + ] + + with patch("app.get_connection", return_value=mock_conn): + response = await client.get("/api/users") + assert response.status_code == 200 + res_text = await response.get_data(as_text=True) + assert json.loads(res_text) == [ + { + "ClientId": 1, + "ClientName": "Client A", + "ClientEmail": "clienta@example.com", + "AssetValue": "1,000,000", + "NextMeeting": "Monday January 8, 2023", + "NextMeetingTime": "11:00 AM", + "NextMeetingEndTime": "11:30 AM", + "LastMeeting": "Monday January 1, 2023", + "LastMeetingStartTime": "10:00 AM", + "LastMeetingEndTime": "10:30 AM", + "ClientSummary": "Summary A", + } + ] + + +@pytest.mark.asyncio +async def test_get_users_no_users(client): + mock_conn = MagicMock() + mock_cursor = MagicMock() + mock_conn.cursor.return_value = mock_cursor + mock_cursor.fetchall.return_value = [] + + with patch("app.get_connection", return_value=mock_conn): + response = await client.get("/api/users") + assert response.status_code == 200 + res_text = await response.get_data(as_text=True) + assert json.loads(res_text) == [] + + +@pytest.mark.asyncio +async def test_get_users_sql_execution_failure(client): + mock_conn = MagicMock() + mock_cursor = MagicMock() + mock_conn.cursor.return_value = mock_cursor + mock_cursor.execute.side_effect = Exception("SQL execution failed") + + with patch("app.get_connection", return_value=mock_conn): + response = await client.get("/api/users") + assert response.status_code == 500 + res_text = await response.get_data(as_text=True) + assert "SQL execution failed" in res_text + + +@pytest.fixture +def mock_request_headers(): + return {"Authorization": "Bearer test_token"} + + +@pytest.mark.asyncio +@patch("app.get_authenticated_user_details") +@patch("app.init_cosmosdb_client") +async def test_clear_messages_success( + mock_init_cosmosdb_client, + mock_get_authenticated_user_details, + mock_request_headers, + client, +): + # Mocking user details + mock_get_authenticated_user_details.return_value = {"user_principal_id": "user123"} + + # Mocking CosmosDB client + mock_cosmos_client = MagicMock() + mock_cosmos_client.delete_messages = AsyncMock(return_value=None) + mock_init_cosmosdb_client.return_value = mock_cosmos_client + + async with create_app().test_request_context( + "/history/clear", method="POST", headers=mock_request_headers + ): + response = await client.post( + "/history/clear", json={"conversation_id": "12345"} + ) + assert response.status_code == 200 + assert await response.get_json() == { + "message": "Successfully deleted messages in conversation", + "conversation_id": "12345", + } + + +@pytest.mark.asyncio +@patch("app.get_authenticated_user_details") +@patch("app.init_cosmosdb_client") +async def test_clear_messages_missing_conversation_id( + mock_init_cosmosdb_client, + mock_get_authenticated_user_details, + mock_request_headers, + client, +): + # Mocking user details + mock_get_authenticated_user_details.return_value = {"user_principal_id": "user123"} + + async with create_app().test_request_context( + "/history/clear", method="POST", headers=mock_request_headers + ): + response = await client.post("/history/clear", json={}) + assert response.status_code == 400 + assert await response.get_json() == {"error": "conversation_id is required"} + + +@patch("app.get_authenticated_user_details") +@patch("app.init_cosmosdb_client") +@pytest.mark.asyncio +async def test_clear_messages_cosmos_not_configured( + mock_init_cosmosdb_client, + mock_get_authenticated_user_details, + mock_request_headers, + client, +): + # Mocking user details + mock_get_authenticated_user_details.return_value = {"user_principal_id": "user123"} + + # Mocking CosmosDB client to return None + mock_init_cosmosdb_client.return_value = None + + async with create_app().test_request_context( + "/history/clear", method="POST", headers=mock_request_headers + ): + response = await client.post( + "/history/clear", json={"conversation_id": "12345"} + ) + assert response.status_code == 500 + res_text = await response.get_data(as_text=True) + assert "CosmosDB is not configured or not working" in res_text + + +@patch("app.get_authenticated_user_details") +@patch("app.init_cosmosdb_client") +@pytest.mark.asyncio +async def test_clear_messages_exception( + mock_init_cosmosdb_client, + mock_get_authenticated_user_details, + mock_request_headers, + client, +): + # Mocking user details + mock_get_authenticated_user_details.return_value = {"user_principal_id": "user123"} + + # Mocking CosmosDB client to raise an exception + mock_cosmos_client = MagicMock() + mock_cosmos_client.delete_messages = AsyncMock(side_effect=Exception("Some error")) + mock_init_cosmosdb_client.return_value = mock_cosmos_client + + async with create_app().test_request_context( + "/history/clear", method="POST", headers=mock_request_headers + ): + response = await client.post( + "/history/clear", json={"conversation_id": "12345"} + ) + assert response.status_code == 500 + res_text = await response.get_data(as_text=True) + assert "Some error" in res_text + + +@pytest.fixture +def mock_cosmos_conversation_client(): + client = MagicMock() + client.get_conversations = AsyncMock() + client.delete_messages = AsyncMock() + client.delete_conversation = AsyncMock() + client.cosmosdb_client.close = AsyncMock() + return client + + +@pytest.fixture +def mock_authenticated_user(): + return {"user_principal_id": "test_user_id"} + + +@patch("app.get_authenticated_user_details") +@patch("app.init_cosmosdb_client") +@pytest.mark.asyncio +async def test_delete_all_conversations_success( + mock_init_cosmosdb_client, + mock_get_authenticated_user_details, + mock_request_headers, + mock_authenticated_user, + mock_cosmos_conversation_client, +): + mock_get_authenticated_user_details.return_value = mock_authenticated_user + mock_init_cosmosdb_client.return_value = mock_cosmos_conversation_client + mock_cosmos_conversation_client.get_conversations.return_value = [ + {"id": "conv1"}, + {"id": "conv2"}, + ] + + async with create_app().test_request_context( + "/history/delete_all", method="DELETE", headers=mock_request_headers + ): + response, status_code = await delete_all_conversations() + response_json = await response.get_json() + + assert status_code == 200 + assert response_json == { + "message": "Successfully deleted conversation and messages for user test_user_id" + } + mock_cosmos_conversation_client.get_conversations.assert_called_once_with( + "test_user_id", offset=0, limit=None + ) + mock_cosmos_conversation_client.delete_messages.assert_any_await( + "conv1", "test_user_id" + ) + mock_cosmos_conversation_client.delete_messages.assert_any_await( + "conv2", "test_user_id" + ) + mock_cosmos_conversation_client.delete_conversation.assert_any_await( + "test_user_id", "conv1" + ) + mock_cosmos_conversation_client.delete_conversation.assert_any_await( + "test_user_id", "conv2" + ) + mock_cosmos_conversation_client.cosmosdb_client.close.assert_awaited_once() + + +@patch("app.get_authenticated_user_details") +@patch("app.init_cosmosdb_client") +@pytest.mark.asyncio +async def test_delete_all_conversations_no_conversations( + mock_init_cosmosdb_client, + mock_get_authenticated_user_details, + mock_request_headers, + mock_authenticated_user, + mock_cosmos_conversation_client, +): + mock_get_authenticated_user_details.return_value = mock_authenticated_user + mock_init_cosmosdb_client.return_value = mock_cosmos_conversation_client + mock_cosmos_conversation_client.get_conversations.return_value = [] + + async with create_app().test_request_context( + "/history/delete_all", method="DELETE", headers=mock_request_headers + ): + response, status_code = await delete_all_conversations() + response_json = await response.get_json() + + assert status_code == 404 + assert response_json == {"error": "No conversations for test_user_id were found"} + mock_cosmos_conversation_client.get_conversations.assert_called_once_with( + "test_user_id", offset=0, limit=None + ) + mock_cosmos_conversation_client.delete_messages.assert_not_called() + mock_cosmos_conversation_client.delete_conversation.assert_not_called() + + +@patch("app.get_authenticated_user_details") +@patch("app.init_cosmosdb_client") +@pytest.mark.asyncio +async def test_delete_all_conversations_cosmos_not_configured( + mock_init_cosmosdb_client, + mock_get_authenticated_user_details, + mock_request_headers, + mock_authenticated_user, +): + mock_get_authenticated_user_details.return_value = mock_authenticated_user + mock_init_cosmosdb_client.return_value = None + + async with create_app().test_request_context( + "/history/delete_all", method="DELETE", headers=mock_request_headers + ): + response, status_code = await delete_all_conversations() + response_json = await response.get_json() + + assert status_code == 500 + assert response_json == {"error": "CosmosDB is not configured or not working"} + mock_init_cosmosdb_client.assert_called_once() + + +@pytest.mark.asyncio +@patch("app.get_authenticated_user_details") +@patch("app.init_cosmosdb_client") +async def test_rename_conversation( + mock_init_cosmosdb_client, + mock_get_authenticated_user_details, + mock_request_headers, + client, +): + + # Mocking authenticated user details + mock_get_authenticated_user_details.return_value = {"user_principal_id": "user_123"} + + # Mocking CosmosDB client and its methods + mock_cosmos_conversation_client = AsyncMock() + mock_cosmos_conversation_client.get_conversation = AsyncMock( + return_value={"id": "123", "title": "Old Title"} + ) + mock_cosmos_conversation_client.upsert_conversation = AsyncMock( + return_value={"id": "123", "title": "New Title"} + ) + mock_init_cosmosdb_client.return_value = mock_cosmos_conversation_client + + async with create_app().test_request_context( + "/history/rename", method="POST", headers=mock_request_headers + ): + response = await client.post( + "/history/rename", json={"conversation_id": "123", "title": "New Title"} + ) + response_json = await response.get_json() + + # Assertions + assert response.status_code == 200 + assert response_json == {"id": "123", "title": "New Title"} + + # Ensure the CosmosDB client methods were called correctly + mock_cosmos_conversation_client.get_conversation.assert_called_once_with( + "user_123", "123" + ) + mock_cosmos_conversation_client.upsert_conversation.assert_called_once_with( + {"id": "123", "title": "New Title"} + ) + mock_cosmos_conversation_client.cosmosdb_client.close.assert_called_once() + + +@pytest.mark.asyncio +@patch("app.get_authenticated_user_details") +async def test_rename_conversation_missing_conversation_id( + mock_get_authenticated_user_details, mock_request_headers, client +): + async with create_app().test_request_context( + "/history/rename", method="POST", headers=mock_request_headers + ): + response = await client.post("/history/rename", json={"title": "New Title"}) + response_json = await response.get_json() + + assert response.status_code == 400 + assert response_json == {"error": "conversation_id is required"} + + +@pytest.mark.asyncio +@patch("app.get_authenticated_user_details") +@patch("app.init_cosmosdb_client") +async def test_rename_conversation_missing_title( + mock_init_cosmosdb_client, + mock_get_authenticated_user_details, + mock_request_headers, + client, +): + # Mocking authenticated user details + mock_get_authenticated_user_details.return_value = {"user_principal_id": "user123"} + + # Mocking CosmosDB client and its methods + mock_cosmos_client = MagicMock() + mock_cosmos_client.get_conversation = AsyncMock( + return_value={"id": "123", "title": "Old Title"} + ) + mock_cosmos_client.upsert_conversation = AsyncMock( + return_value={"id": "123", "title": "New Title"} + ) + mock_init_cosmosdb_client.return_value = mock_cosmos_client + + async with create_app().test_request_context( + "/history/rename", method="POST", headers=mock_request_headers + ): + response = await client.post("/history/rename", json={"conversation_id": "123"}) + response_json = await response.get_json() + + assert response.status_code == 400 + assert response_json == {"error": "title is required"} + + +@pytest.mark.asyncio +@patch("app.get_authenticated_user_details") +@patch("app.init_cosmosdb_client") +async def test_rename_conversation_not_found( + mock_init_cosmosdb_client, + mock_get_authenticated_user_details, + mock_request_headers, + client, +): + mock_get_authenticated_user_details.return_value = {"user_principal_id": "user123"} + + mock_cosmos_client = MagicMock() + mock_cosmos_client.get_conversation = AsyncMock(return_value=None) + mock_init_cosmosdb_client.return_value = mock_cosmos_client + + async with create_app().test_request_context( + "/history/rename", method="POST", headers=mock_request_headers + ): + response = await client.post( + "/history/rename", json={"conversation_id": "123", "title": "New Title"} + ) + response_json = await response.get_json() + + assert response.status_code == 404 + assert response_json == { + "error": "Conversation 123 was not found. It either does not exist or the logged in user does not have access to it." + } + + +@pytest.mark.asyncio +@patch("app.get_authenticated_user_details") +@patch("app.init_cosmosdb_client") +async def test_get_conversation_success( + mock_init_cosmosdb_client, + mock_get_authenticated_user_details, + mock_request_headers, + client, +): + # Mocking the authenticated user details + mock_get_authenticated_user_details.return_value = {"user_principal_id": "user123"} + + # Mocking the CosmosDB client and its methods + mock_cosmos_client = AsyncMock() + mock_cosmos_client.get_conversation.return_value = {"id": "12345"} + mock_cosmos_client.get_messages.return_value = [ + { + "id": "msg1", + "role": "user", + "content": "Hello", + "createdAt": "2024-10-01T00:00:00Z", + } + ] + mock_init_cosmosdb_client.return_value = mock_cosmos_client + + async with create_app().test_request_context( + "/history/read", method="POST", headers=mock_request_headers + ): + response = await client.post("/history/read", json={"conversation_id": "12345"}) + response_json = await response.get_json() + + assert response.status_code == 200 + assert response_json == { + "conversation_id": "12345", + "messages": [ + { + "id": "msg1", + "role": "user", + "content": "Hello", + "createdAt": "2024-10-01T00:00:00Z", + "feedback": None, + } + ], + } + + +@pytest.mark.asyncio +async def test_get_conversation_missing_conversation_id( + mock_request_headers, + client, +): + async with create_app().test_request_context( + "/history/read", method="POST", headers=mock_request_headers + ): + response = await client.post("/history/read", json={}) + response_json = await response.get_json() + + assert response.status_code == 400 + assert response_json == {"error": "conversation_id is required"} + + +@pytest.mark.asyncio +@patch("app.get_authenticated_user_details") +@patch("app.init_cosmosdb_client") +async def test_get_conversation_not_found( + mock_init_cosmosdb_client, + mock_get_authenticated_user_details, + mock_request_headers, + client, +): + mock_get_authenticated_user_details.return_value = {"user_principal_id": "user123"} + + mock_cosmos_client = AsyncMock() + mock_cosmos_client.get_conversation.return_value = None + mock_init_cosmosdb_client.return_value = mock_cosmos_client + + async with create_app().test_request_context( + "/history/read", method="POST", headers=mock_request_headers + ): + response = await client.post("/history/read", json={"conversation_id": "12345"}) + response_json = await response.get_json() + + assert response.status_code == 404 + assert response_json == { + "error": "Conversation 12345 was not found. It either does not exist or the logged in user does not have access to it." + } + + +@pytest.mark.asyncio +@patch("app.init_cosmosdb_client") +@patch("app.get_authenticated_user_details") +async def test_list_conversations_success( + mock_get_user_details, mock_init_cosmosdb_client, client +): + mock_get_user_details.return_value = {"user_principal_id": "test_user"} + mock_cosmos_client = AsyncMock() + mock_cosmos_client.get_conversations.return_value = [{"id": "1"}, {"id": "2"}] + mock_init_cosmosdb_client.return_value = mock_cosmos_client + + response = await client.get("/history/list") + assert response.status_code == 200 + assert await response.get_json() == [{"id": "1"}, {"id": "2"}] + + +@pytest.mark.asyncio +@patch("app.init_cosmosdb_client") +@patch("app.get_authenticated_user_details") +async def test_list_conversations_no_cosmos_client( + mock_get_user_details, mock_init_cosmosdb_client, client +): + mock_get_user_details.return_value = {"user_principal_id": "test_user"} + mock_init_cosmosdb_client.return_value = None + + response = await client.get("/history/list") + assert response.status_code == 500 + + +@pytest.mark.asyncio +@patch("app.init_cosmosdb_client") +@patch("app.get_authenticated_user_details") +async def test_list_conversations_no_conversations( + mock_get_user_details, mock_init_cosmosdb_client, client +): + mock_get_user_details.return_value = {"user_principal_id": "test_user"} + mock_cosmos_client = AsyncMock() + mock_cosmos_client.get_conversations.return_value = None + mock_init_cosmosdb_client.return_value = mock_cosmos_client + + response = await client.get("/history/list") + assert response.status_code == 404 + assert await response.get_json() == { + "error": "No conversations for test_user were found" + } + + +@pytest.mark.asyncio +@patch("app.init_cosmosdb_client") +@patch("app.get_authenticated_user_details") +async def test_list_conversations_invalid_response( + mock_get_user_details, mock_init_cosmosdb_client, client +): + mock_get_user_details.return_value = {"user_principal_id": "test_user"} + mock_cosmos_client = AsyncMock() + mock_cosmos_client.get_conversations.return_value = None + mock_init_cosmosdb_client.return_value = mock_cosmos_client + + response = await client.get("/history/list") + assert response.status_code == 404 + assert await response.get_json() == { + "error": "No conversations for test_user were found" + } + + +@pytest.mark.asyncio +@patch("app.get_authenticated_user_details") +@patch("app.init_cosmosdb_client") +async def test_delete_conversation_success( + mock_init_cosmosdb_client, + mock_get_authenticated_user_details, + mock_request_headers, + client, +): + # Mocking authenticated user details + mock_get_authenticated_user_details.return_value = {"user_principal_id": "user123"} + + # Mocking CosmosDB client + mock_cosmos_client = MagicMock() + mock_cosmos_client.delete_messages = AsyncMock() + mock_cosmos_client.delete_conversation = AsyncMock() + mock_cosmos_client.cosmosdb_client.close = AsyncMock() + mock_init_cosmosdb_client.return_value = mock_cosmos_client + + async with create_app().test_request_context( + "/history/delete", method="DELETE", headers=mock_request_headers + ): + response = await client.delete( + "/history/delete", json={"conversation_id": "12345"} + ) + response_json = await response.get_json() + + assert response.status_code == 200 + assert response_json == { + "message": "Successfully deleted conversation and messages", + "conversation_id": "12345", + } + mock_cosmos_client.delete_messages.assert_called_once_with("12345", "user123") + mock_cosmos_client.delete_conversation.assert_called_once_with("user123", "12345") + mock_cosmos_client.cosmosdb_client.close.assert_called_once() + + +@pytest.mark.asyncio +@patch("app.get_authenticated_user_details") +async def test_delete_conversation_missing_conversation_id( + mock_get_authenticated_user_details, + mock_request_headers, + client, +): + # Mocking authenticated user details + mock_get_authenticated_user_details.return_value = {"user_principal_id": "user123"} + + async with create_app().test_request_context( + "/history/delete", method="DELETE", headers=mock_request_headers + ): + response = await client.delete("/history/delete", json={}) + response_json = await response.get_json() + + assert response.status_code == 400 + assert response_json == {"error": "conversation_id is required"} + + +@pytest.mark.asyncio +@patch("app.get_authenticated_user_details") +@patch("app.init_cosmosdb_client") +async def test_delete_conversation_cosmos_not_configured( + mock_init_cosmosdb_client, + mock_get_authenticated_user_details, + mock_request_headers, + client, +): + # Mocking authenticated user details + mock_get_authenticated_user_details.return_value = {"user_principal_id": "user123"} + + # Mocking CosmosDB client not being configured + mock_init_cosmosdb_client.return_value = None + + async with create_app().test_request_context( + "/history/delete", method="DELETE", headers=mock_request_headers + ): + response = await client.delete( + "/history/delete", json={"conversation_id": "12345"} + ) + response_json = await response.get_json() + + assert response.status_code == 500 + assert response_json == {"error": "CosmosDB is not configured or not working"} + + +@pytest.mark.asyncio +@patch("app.get_authenticated_user_details") +@patch("app.init_cosmosdb_client") +async def test_delete_conversation_exception( + mock_init_cosmosdb_client, + mock_get_authenticated_user_details, + mock_request_headers, + client, +): + # Mocking authenticated user details + mock_get_authenticated_user_details.return_value = {"user_principal_id": "user123"} + + # Mocking CosmosDB client to raise an exception + mock_cosmos_client = MagicMock() + mock_cosmos_client.delete_messages = AsyncMock( + side_effect=Exception("Test exception") + ) + mock_init_cosmosdb_client.return_value = mock_cosmos_client + + async with create_app().test_request_context( + "/history/delete", method="DELETE", headers=mock_request_headers + ): + response = await client.delete( + "/history/delete", json={"conversation_id": "12345"} + ) + response_json = await response.get_json() + + assert response.status_code == 500 + assert response_json == {"error": "Test exception"} + + +@pytest.mark.asyncio +@patch("app.get_authenticated_user_details") +@patch("app.init_cosmosdb_client") +async def test_update_message_success( + mock_init_cosmosdb_client, mock_get_authenticated_user_details, client +): + mock_get_authenticated_user_details.return_value = { + "user_principal_id": "test_user" + } + mock_cosmos_client = AsyncMock() + mock_cosmos_client.update_message_feedback.return_value = True + mock_init_cosmosdb_client.return_value = mock_cosmos_client + + response = await client.post( + "/history/message_feedback", + json={"message_id": "123", "message_feedback": "positive"}, + ) + + assert response.status_code == 200 + assert await response.get_json() == { + "message": "Successfully updated message with feedback positive", + "message_id": "123", + } + + +@pytest.mark.asyncio +@patch("app.get_authenticated_user_details") +@patch("app.init_cosmosdb_client") +async def test_update_message_missing_message_id( + mock_init_cosmosdb_client, mock_get_authenticated_user_details, client +): + response = await client.post( + "/history/message_feedback", json={"message_feedback": "positive"} + ) + + assert response.status_code == 400 + assert await response.get_json() == {"error": "message_id is required"} + + +@pytest.mark.asyncio +@patch("app.get_authenticated_user_details") +@patch("app.init_cosmosdb_client") +async def test_update_message_missing_message_feedback( + mock_init_cosmosdb_client, mock_get_authenticated_user_details, client +): + response = await client.post( + "/history/message_feedback", json={"message_id": "123"} + ) + + assert response.status_code == 400 + assert await response.get_json() == {"error": "message_feedback is required"} + + +@pytest.mark.asyncio +@patch("app.get_authenticated_user_details") +@patch("app.init_cosmosdb_client") +async def test_update_message_not_found( + mock_init_cosmosdb_client, mock_get_authenticated_user_details, client +): + mock_get_authenticated_user_details.return_value = { + "user_principal_id": "test_user" + } + mock_cosmos_client = AsyncMock() + mock_cosmos_client.update_message_feedback.return_value = False + mock_init_cosmosdb_client.return_value = mock_cosmos_client + + response = await client.post( + "/history/message_feedback", + json={"message_id": "123", "message_feedback": "positive"}, + ) + + assert response.status_code == 404 + assert await response.get_json() == { + "error": "Unable to update message 123. It either does not exist or the user does not have access to it." + } + + +@pytest.mark.asyncio +@patch("app.get_authenticated_user_details") +@patch("app.init_cosmosdb_client") +async def test_update_message_exception( + mock_init_cosmosdb_client, mock_get_authenticated_user_details, client +): + mock_get_authenticated_user_details.return_value = { + "user_principal_id": "test_user" + } + mock_cosmos_client = AsyncMock() + mock_cosmos_client.update_message_feedback.side_effect = Exception("Test exception") + mock_init_cosmosdb_client.return_value = mock_cosmos_client + + response = await client.post( + "/history/message_feedback", + json={"message_id": "123", "message_feedback": "positive"}, + ) + + assert response.status_code == 500 + assert await response.get_json() == {"error": "Test exception"} + + +@pytest.mark.asyncio +@patch("app.get_authenticated_user_details") +@patch("app.init_cosmosdb_client") +async def test_update_conversation_success( + mock_init_cosmosdb_client, mock_get_authenticated_user_details, client +): + mock_get_authenticated_user_details.return_value = { + "user_principal_id": "test_user_id" + } + mock_request_json = { + "conversation_id": "test_conversation_id", + "messages": [ + {"role": "tool", "content": "tool message"}, + { + "role": "assistant", + "id": "assistant_message_id", + "content": "assistant message", + }, + ], + } + + mock_cosmos_client = AsyncMock() + mock_init_cosmosdb_client.return_value = mock_cosmos_client + + response = await client.post("/history/update", json=mock_request_json) + res_json = await response.get_json() + assert response.status_code == 200 + assert res_json == {"success": True} + mock_cosmos_client.create_message.assert_called() + + +@pytest.mark.asyncio +@patch("app.get_authenticated_user_details") +@patch("app.init_cosmosdb_client") +async def test_update_conversation_no_conversation_id( + mock_init_cosmosdb_client, mock_get_authenticated_user_details, client +): + mock_get_authenticated_user_details.return_value = { + "user_principal_id": "test_user_id" + } + mock_request_json = { + "messages": [ + {"role": "tool", "content": "tool message"}, + { + "role": "assistant", + "id": "assistant_message_id", + "content": "assistant message", + }, + ] + } + + response = await client.post("/history/update", json=mock_request_json) + res_json = await response.get_json() + assert response.status_code == 500 + assert "No conversation_id found" in res_json["error"] + + +@pytest.mark.asyncio +@patch("app.get_authenticated_user_details") +@patch("app.init_cosmosdb_client") +async def test_update_conversation_no_bot_messages( + mock_init_cosmosdb_client, mock_get_authenticated_user_details, client +): + mock_get_authenticated_user_details.return_value = { + "user_principal_id": "test_user_id" + } + mock_request_json = { + "conversation_id": "test_conversation_id", + "messages": [{"role": "user", "content": "user message"}], + } + response = await client.post("/history/update", json=mock_request_json) + res_json = await response.get_json() + assert response.status_code == 500 + assert "No bot messages found" in res_json["error"] + + +@pytest.mark.asyncio +@patch("app.get_authenticated_user_details") +@patch("app.init_cosmosdb_client") +async def test_update_conversation_cosmos_not_configured( + mock_init_cosmosdb_client, mock_get_authenticated_user_details, client +): + mock_get_authenticated_user_details.return_value = { + "user_principal_id": "test_user_id" + } + mock_request_json = { + "conversation_id": "test_conversation_id", + "messages": [ + {"role": "tool", "content": "tool message"}, + { + "role": "assistant", + "id": "assistant_message_id", + "content": "assistant message", + }, + ], + } + + mock_init_cosmosdb_client.return_value = None + response = await client.post("/history/update", json=mock_request_json) + res_json = await response.get_json() + assert response.status_code == 500 + assert "CosmosDB is not configured or not working" in res_json["error"] + + +@pytest.mark.asyncio +@patch("app.get_authenticated_user_details") +@patch("app.init_cosmosdb_client") +@patch("app.generate_title") +@patch("app.conversation_internal") +async def test_add_conversation_success( + mock_conversation_internal, + mock_generate_title, + mock_init_cosmosdb_client, + mock_get_authenticated_user_details, + client, +): + mock_get_authenticated_user_details.return_value = { + "user_principal_id": "test_user" + } + mock_generate_title.return_value = "Test Title" + mock_cosmos_client = AsyncMock() + mock_cosmos_client.create_conversation.return_value = { + "id": "test_conversation_id", + "createdAt": "2024-10-01T00:00:00Z", + } + mock_cosmos_client.create_message.return_value = "Message Created" + mock_init_cosmosdb_client.return_value = mock_cosmos_client + mock_conversation_internal.return_value = "Chat response" + + response = await client.post( + "/history/generate", json={"messages": [{"role": "user", "content": "Hello"}]} + ) + + assert response.status_code == 200 + + +@pytest.mark.asyncio +@patch("app.get_authenticated_user_details") +@patch("app.init_cosmosdb_client") +async def test_add_conversation_no_cosmos_config( + mock_init_cosmosdb_client, mock_get_authenticated_user_details, client +): + mock_get_authenticated_user_details.return_value = { + "user_principal_id": "test_user" + } + mock_init_cosmosdb_client.return_value = None + + response = await client.post( + "/history/generate", json={"messages": [{"role": "user", "content": "Hello"}]} + ) + response_json = await response.get_json() + + assert response.status_code == 500 + assert "CosmosDB is not configured or not working" in response_json["error"] + + +@pytest.mark.asyncio +@patch("app.get_authenticated_user_details") +@patch("app.init_cosmosdb_client") +async def test_add_conversation_conversation_not_found( + mock_init_cosmosdb_client, mock_get_authenticated_user_details, client +): + mock_get_authenticated_user_details.return_value = { + "user_principal_id": "test_user" + } + mock_cosmos_client = AsyncMock() + mock_cosmos_client.create_message.return_value = "Conversation not found" + mock_init_cosmosdb_client.return_value = mock_cosmos_client + + response = await client.post( + "/history/generate", + json={ + "messages": [{"role": "user", "content": "Hello"}], + "conversation_id": "invalid_id", + }, + ) + response_json = await response.get_json() + + assert response.status_code == 500 + assert ( + "Conversation not found for the given conversation ID" in response_json["error"] + ) + + +@pytest.mark.asyncio +@patch("app.init_openai_client") +async def test_generate_title_success(mock_init_openai_client): + mock_openai_client = AsyncMock() + mock_openai_client.chat.completions.create.return_value = MagicMock( + choices=[ + MagicMock(message=MagicMock(content=json.dumps({"title": "Test Title"}))) + ] + ) + mock_init_openai_client.return_value = mock_openai_client + + conversation_messages = [{"role": "user", "content": "Hello"}] + title = await generate_title(conversation_messages) + assert title == "Test Title" + + +@pytest.mark.asyncio +@patch("app.init_openai_client") +async def test_generate_title_exception(mock_init_openai_client): + mock_openai_client = AsyncMock() + mock_openai_client.chat.completions.create.side_effect = Exception("API error") + mock_init_openai_client.return_value = mock_openai_client + + conversation_messages = [{"role": "user", "content": "Hello"}] + title = await generate_title(conversation_messages) + assert title == "Hello" + + +@pytest.mark.asyncio +async def test_conversation_route(client): + request_body = { + "history_metadata": {}, + "client_id": "test_client", + "messages": [{"content": "test query"}], + } + request_headers = {"apim-request-id": "test_id"} + + with patch("app.stream_chat_request", new_callable=AsyncMock) as mock_stream: + mock_stream.return_value = ["chunk1", "chunk2"] + with patch( + "app.complete_chat_request", new_callable=AsyncMock + ) as mock_complete: + mock_complete.return_value = {"response": "test response"} + response = await client.post( + "/conversation", json=request_body, headers=request_headers + ) + + assert response.status_code == 200 + + +@pytest.mark.asyncio +async def test_invalid_json_format(client): + request_body = "invalid json" + request_headers = {"apim-request-id": "test_id"} + + response = await client.post( + "/conversation", data=request_body, headers=request_headers + ) + response_json = await response.get_json() + assert response.status_code == 415 + assert response_json["error"] == "request must be json" + + +@pytest.mark.asyncio +async def test_timeout_in_stream_chat_request(client): + request_body = { + "history_metadata": {}, + "client_id": "test_client", + "messages": [{"content": "test query"}], + } + request_headers = {"apim-request-id": "test_id"} + + with patch("app.stream_chat_request", new_callable=AsyncMock) as mock_stream: + mock_stream.side_effect = TimeoutError("Timeout occurred") + response = await client.post( + "/conversation", json=request_body, headers=request_headers + ) + response_json = await response.get_json() + + assert response.status_code == 500 + assert response_json["error"] == "Timeout occurred" + + +@pytest.mark.asyncio +async def test_unexpected_exception(client): + request_body = { + "history_metadata": {}, + "client_id": "test_client", + "messages": [{"content": "test query"}], + } + request_headers = {"apim-request-id": "test_id"} + + with patch("app.stream_chat_request", new_callable=AsyncMock) as mock_stream: + mock_stream.side_effect = Exception("Unexpected error") + response = await client.post( + "/conversation", json=request_body, headers=request_headers + ) + response_json = await response.get_json() + + assert response.status_code == 500 + assert response_json["error"] == "Unexpected error" + + +# Helper function to create an async generator +async def async_generator(items): + for item in items: + yield item + + +# Mock object for delta +class MockDelta: + def __init__(self, role, context=None): + self.role = role + self.context = context + + +# Mock object for chatCompletionChunk +class MockChoice: + def __init__(self, messages, delta): + self.messages = messages + self.delta = delta + + +class MockChatCompletionChunk: + def __init__(self, id, model, created, object, choices): + self.id = id + self.model = model + self.created = created + self.object = object + self.choices = choices + + +@pytest.mark.asyncio +async def test_stream_chat_request_with_azurefunction(): + request_body = { + "history_metadata": {}, + "client_id": "test_client", + "messages": [{"content": "test query"}], + } + request_headers = {"apim-request-id": "test_id"} + + async with create_app().app_context(): + with patch.multiple( + "app", + USE_AZUREFUNCTION=True, + STREAMING_AZUREFUNCTION_ENDPOINT="http://example.com", + ): + with patch("httpx.AsyncClient.stream") as mock_stream: + mock_response = AsyncMock() + mock_response.__aenter__.return_value.aiter_text = ( + lambda: async_generator(["chunk1", "chunk2"]) + ) + mock_stream.return_value = mock_response + + generator = await stream_chat_request(request_body, request_headers) + chunks = [chunk async for chunk in generator] + + assert len(chunks) == 2 + assert "apim-request-id" in chunks[0] + + +@pytest.mark.asyncio +async def test_stream_chat_request_no_client_id(): + request_body = {"history_metadata": {}, "messages": [{"content": "test query"}]} + request_headers = {"apim-request-id": "test_id"} + + async with create_app().app_context(): + with patch("app.USE_AZUREFUNCTION", True): + response, status_code = await stream_chat_request( + request_body, request_headers + ) + assert status_code == 400 + response_json = await response.get_json() + assert response_json["error"] == "No client ID provided" + + +@pytest.mark.asyncio +async def test_stream_chat_request_without_azurefunction(): + request_body = { + "history_metadata": {}, + "client_id": "test_client", + "messages": [{"content": "test query"}], + } + request_headers = {"apim-request-id": "test_id"} + + with patch("app.USE_AZUREFUNCTION", False): + with patch("app.send_chat_request", new_callable=AsyncMock) as mock_send: + mock_send.return_value = ( + async_generator( + [ + MockChatCompletionChunk( + "id1", + "model1", + 1234567890, + "object1", + [ + MockChoice( + ["message1"], + MockDelta("assistant", {"key": "value"}), + ) + ], + ), + MockChatCompletionChunk( + "id2", + "model2", + 1234567891, + "object2", + [ + MockChoice( + ["message2"], + MockDelta("assistant", {"key": "value"}), + ) + ], + ), + ] + ), + "test_apim_request_id", + ) + generator = await stream_chat_request(request_body, request_headers) + chunks = [chunk async for chunk in generator] + + assert len(chunks) == 2 + assert "apim-request-id" in chunks[0] diff --git a/ClientAdvisor/App/tests/test_db.py b/ClientAdvisor/App/tests/test_db.py new file mode 100644 index 00000000..e0ac75c2 --- /dev/null +++ b/ClientAdvisor/App/tests/test_db.py @@ -0,0 +1,30 @@ +from unittest.mock import MagicMock, patch + +import db + +db.server = "mock_server" +db.username = "mock_user" +db.password = "mock_password" +db.database = "mock_database" + + +@patch("db.pymssql.connect") +def test_get_connection(mock_connect): + # Create a mock connection object + mock_conn = MagicMock() + mock_connect.return_value = mock_conn + + # Call the function + conn = db.get_connection() + + # Assert that pymssql.connect was called with the correct parameters + mock_connect.assert_called_once_with( + server="mock_server", + user="mock_user", + password="mock_password", + database="mock_database", + as_dict=True, + ) + + # Assert that the connection returned is the mock connection + assert conn == mock_conn diff --git a/ClientAdvisor/App/tools/data_collection.py b/ClientAdvisor/App/tools/data_collection.py index 901b8be2..c0bb184b 100644 --- a/ClientAdvisor/App/tools/data_collection.py +++ b/ClientAdvisor/App/tools/data_collection.py @@ -1,35 +1,38 @@ -import os -import sys import asyncio import json +import os +import sys from dotenv import load_dotenv -#import the app.py module to gain access to the methods to construct payloads and -#call the API through the sdk +import app + +# import the app.py module to gain access to the methods to construct payloads and +# call the API through the sdk # Add parent directory to sys.path -sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__), '..'))) +sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__), ".."))) -import app -#function to enable loading of the .env file into the global variables of the app.py module +# function to enable loading of the .env file into the global variables of the app.py module + -def load_env_into_module(module_name, prefix=''): +def load_env_into_module(module_name, prefix=""): load_dotenv() module = __import__(module_name) for key, value in os.environ.items(): if key.startswith(prefix): - setattr(module, key[len(prefix):], value) + setattr(module, key[len(prefix) :], value) + load_env_into_module("app") -#some settings required in app.py +# some settings required in app.py app.SHOULD_STREAM = False app.SHOULD_USE_DATA = app.should_use_data() -#format: +# format: """ [ { @@ -40,71 +43,65 @@ def load_env_into_module(module_name, prefix=''): generated_data_path = r"path/to/qa_input_file.json" -with open(generated_data_path, 'r') as file: +with open(generated_data_path, "r") as file: data = json.load(file) """ Process a list of q(and a) pairs outputting to a file as we go. """ -async def process(data: list, file): - for qa_pairs_obj in data: - qa_pairs = qa_pairs_obj["qa_pairs"] - for qa_pair in qa_pairs: - question = qa_pair["question"] - messages = [{"role":"user", "content":question}] - print("processing question "+question) - request = {"messages":messages, "id":"1"} - - response = await app.complete_chat_request(request) - - #print(json.dumps(response)) - - messages = response["choices"][0]["messages"] - - tool_message = None - assistant_message = None - - for message in messages: - if message["role"] == "tool": - tool_message = message["content"] - elif message["role"] == "assistant": - assistant_message = message["content"] - else: - raise ValueError("unknown message role") - - #construct data for ai studio evaluation +async def process(data: list, file): + for qa_pairs_obj in data: + qa_pairs = qa_pairs_obj["qa_pairs"] + for qa_pair in qa_pairs: + question = qa_pair["question"] + messages = [{"role": "user", "content": question}] - user_message = {"role":"user", "content":question} - assistant_message = {"role":"assistant", "content":assistant_message} + print("processing question " + question) - #prepare citations - citations = json.loads(tool_message) - assistant_message["context"] = citations + request = {"messages": messages, "id": "1"} - #create output - messages = [] - messages.append(user_message) - messages.append(assistant_message) + response = await app.complete_chat_request(request) - evaluation_data = {"messages":messages} + # print(json.dumps(response)) - #incrementally write out to the jsonl file - file.write(json.dumps(evaluation_data)+"\n") - file.flush() + messages = response["choices"][0]["messages"] + tool_message = None + assistant_message = None -evaluation_data_file_path = r"path/to/output_file.jsonl" + for message in messages: + if message["role"] == "tool": + tool_message = message["content"] + elif message["role"] == "assistant": + assistant_message = message["content"] + else: + raise ValueError("unknown message role") -with open(evaluation_data_file_path, "w") as file: - asyncio.run(process(data, file)) + # construct data for ai studio evaluation + user_message = {"role": "user", "content": question} + assistant_message = {"role": "assistant", "content": assistant_message} + # prepare citations + citations = json.loads(tool_message) + assistant_message["context"] = citations + # create output + messages = [] + messages.append(user_message) + messages.append(assistant_message) + evaluation_data = {"messages": messages} + # incrementally write out to the jsonl file + file.write(json.dumps(evaluation_data) + "\n") + file.flush() +evaluation_data_file_path = r"path/to/output_file.jsonl" +with open(evaluation_data_file_path, "w") as file: + asyncio.run(process(data, file)) diff --git a/ResearchAssistant/App/.flake8 b/ResearchAssistant/App/.flake8 new file mode 100644 index 00000000..c462975a --- /dev/null +++ b/ResearchAssistant/App/.flake8 @@ -0,0 +1,4 @@ +[flake8] +max-line-length = 88 +extend-ignore = E501, E203 +exclude = .venv, frontend, \ No newline at end of file diff --git a/ResearchAssistant/App/app.py b/ResearchAssistant/App/app.py index 63ac092a..a50590ea 100644 --- a/ResearchAssistant/App/app.py +++ b/ResearchAssistant/App/app.py @@ -1,33 +1,35 @@ -import json -import os -import logging -import requests +import asyncio import copy -from flask import Flask, Response, request, jsonify, send_from_directory -from dotenv import load_dotenv -import urllib.request import json +import logging import os +import urllib.request -import asyncio +import requests +from dotenv import load_dotenv +from flask import Flask, Response, jsonify, request, send_from_directory load_dotenv() app = Flask(__name__, static_folder="static") + # Static Files @app.route("/") def index(): return app.send_static_file("index.html") + @app.route("/favicon.ico") def favicon(): - return app.send_static_file('favicon.ico') + return app.send_static_file("favicon.ico") + @app.route("/assets/") def assets(path): return send_from_directory("static/assets", path) + # Debug settings DEBUG = os.environ.get("DEBUG", "false") DEBUG_LOGGING = DEBUG.lower() == "true" @@ -43,17 +45,25 @@ def assets(path): # ACS Integration Settings AZURE_SEARCH_SERVICE = os.environ.get("AZURE_SEARCH_SERVICE") AZURE_SEARCH_KEY = os.environ.get("AZURE_SEARCH_KEY") -AZURE_SEARCH_USE_SEMANTIC_SEARCH = os.environ.get("AZURE_SEARCH_USE_SEMANTIC_SEARCH", "false") -AZURE_SEARCH_SEMANTIC_SEARCH_CONFIG = os.environ.get("AZURE_SEARCH_SEMANTIC_SEARCH_CONFIG", "default") +AZURE_SEARCH_USE_SEMANTIC_SEARCH = os.environ.get( + "AZURE_SEARCH_USE_SEMANTIC_SEARCH", "false" +) +AZURE_SEARCH_SEMANTIC_SEARCH_CONFIG = os.environ.get( + "AZURE_SEARCH_SEMANTIC_SEARCH_CONFIG", "default" +) AZURE_SEARCH_TOP_K = os.environ.get("AZURE_SEARCH_TOP_K", SEARCH_TOP_K) -AZURE_SEARCH_ENABLE_IN_DOMAIN = os.environ.get("AZURE_SEARCH_ENABLE_IN_DOMAIN", SEARCH_ENABLE_IN_DOMAIN) +AZURE_SEARCH_ENABLE_IN_DOMAIN = os.environ.get( + "AZURE_SEARCH_ENABLE_IN_DOMAIN", SEARCH_ENABLE_IN_DOMAIN +) AZURE_SEARCH_CONTENT_COLUMNS = os.environ.get("AZURE_SEARCH_CONTENT_COLUMNS") AZURE_SEARCH_FILENAME_COLUMN = os.environ.get("AZURE_SEARCH_FILENAME_COLUMN") AZURE_SEARCH_TITLE_COLUMN = os.environ.get("AZURE_SEARCH_TITLE_COLUMN") AZURE_SEARCH_URL_COLUMN = os.environ.get("AZURE_SEARCH_URL_COLUMN") AZURE_SEARCH_VECTOR_COLUMNS = os.environ.get("AZURE_SEARCH_VECTOR_COLUMNS") AZURE_SEARCH_QUERY_TYPE = os.environ.get("AZURE_SEARCH_QUERY_TYPE") -AZURE_SEARCH_PERMITTED_GROUPS_COLUMN = os.environ.get("AZURE_SEARCH_PERMITTED_GROUPS_COLUMN") +AZURE_SEARCH_PERMITTED_GROUPS_COLUMN = os.environ.get( + "AZURE_SEARCH_PERMITTED_GROUPS_COLUMN" +) AZURE_SEARCH_STRICTNESS = os.environ.get("AZURE_SEARCH_STRICTNESS", SEARCH_STRICTNESS) AZURE_SEARCH_INDEX_GRANTS = os.environ.get("AZURE_SEARCH_INDEX_GRANTS") AZURE_SEARCH_INDEX_ARTICLES = os.environ.get("AZURE_SEARCH_INDEX_ARTICLES") @@ -67,10 +77,17 @@ def assets(path): AZURE_OPENAI_TOP_P = os.environ.get("AZURE_OPENAI_TOP_P", 1.0) AZURE_OPENAI_MAX_TOKENS = os.environ.get("AZURE_OPENAI_MAX_TOKENS", 1000) AZURE_OPENAI_STOP_SEQUENCE = os.environ.get("AZURE_OPENAI_STOP_SEQUENCE") -AZURE_OPENAI_SYSTEM_MESSAGE = os.environ.get("AZURE_OPENAI_SYSTEM_MESSAGE", "You are an AI assistant that helps people find information.") -AZURE_OPENAI_PREVIEW_API_VERSION = os.environ.get("AZURE_OPENAI_PREVIEW_API_VERSION", "2023-08-01-preview") +AZURE_OPENAI_SYSTEM_MESSAGE = os.environ.get( + "AZURE_OPENAI_SYSTEM_MESSAGE", + "You are an AI assistant that helps people find information.", +) +AZURE_OPENAI_PREVIEW_API_VERSION = os.environ.get( + "AZURE_OPENAI_PREVIEW_API_VERSION", "2023-08-01-preview" +) AZURE_OPENAI_STREAM = os.environ.get("AZURE_OPENAI_STREAM", "true") -AZURE_OPENAI_MODEL_NAME = os.environ.get("AZURE_OPENAI_MODEL_NAME", "gpt-35-turbo-16k") # Name of the model, e.g. 'gpt-35-turbo-16k' or 'gpt-4' +AZURE_OPENAI_MODEL_NAME = os.environ.get( + "AZURE_OPENAI_MODEL_NAME", "gpt-35-turbo-16k" +) # Name of the model, e.g. 'gpt-35-turbo-16k' or 'gpt-4' AZURE_OPENAI_EMBEDDING_ENDPOINT = os.environ.get("AZURE_OPENAI_EMBEDDING_ENDPOINT") AZURE_OPENAI_EMBEDDING_KEY = os.environ.get("AZURE_OPENAI_EMBEDDING_KEY") AZURE_OPENAI_EMBEDDING_NAME = os.environ.get("AZURE_OPENAI_EMBEDDING_NAME", "") @@ -82,60 +99,67 @@ def assets(path): # Frontend Settings via Environment Variables AUTH_ENABLED = os.environ.get("AUTH_ENABLED", "true").lower() -frontend_settings = { "auth_enabled": AUTH_ENABLED } +frontend_settings = {"auth_enabled": AUTH_ENABLED} loop = asyncio.new_event_loop() asyncio.set_event_loop(loop) + def is_chat_model(): - if 'gpt-4' in AZURE_OPENAI_MODEL_NAME.lower() or AZURE_OPENAI_MODEL_NAME.lower() in ['gpt-35-turbo-4k', 'gpt-35-turbo-16k']: + if ( + "gpt-4" in AZURE_OPENAI_MODEL_NAME.lower() + or AZURE_OPENAI_MODEL_NAME.lower() in ["gpt-35-turbo-4k", "gpt-35-turbo-16k"] + ): return True return False + def should_use_data(): if AZURE_SEARCH_SERVICE and AZURE_SEARCH_KEY: if DEBUG_LOGGING: logging.debug("Using Azure Cognitive Search") return True - + return False + def format_as_ndjson(obj: dict) -> str: return json.dumps(obj, ensure_ascii=False) + "\n" + def parse_multi_columns(columns: str) -> list: if "|" in columns: return columns.split("|") else: return columns.split(",") + def fetchUserGroups(userToken, nextLink=None): # Recursively fetch group membership if nextLink: endpoint = nextLink else: endpoint = "https://graph.microsoft.com/v1.0/me/transitiveMemberOf?$select=id" - - headers = { - 'Authorization': "bearer " + userToken - } - try : + + headers = {"Authorization": "bearer " + userToken} + try: r = requests.get(endpoint, headers=headers) if r.status_code != 200: if DEBUG_LOGGING: logging.error(f"Error fetching user groups: {r.status_code} {r.text}") return [] - + r = r.json() if "@odata.nextLink" in r: nextLinkData = fetchUserGroups(userToken, r["@odata.nextLink"]) - r['value'].extend(nextLinkData) - - return r['value'] + r["value"].extend(nextLinkData) + + return r["value"] except Exception as e: logging.error(f"Exception in fetchUserGroups: {e}") return [] + def generateFilterString(userToken): # Get list of groups user is a member of userGroups = fetchUserGroups(userToken) @@ -144,9 +168,10 @@ def generateFilterString(userToken): if not userGroups: logging.debug("No user groups found") - group_ids = ", ".join([obj['id'] for obj in userGroups]) + group_ids = ", ".join([obj["id"] for obj in userGroups]) return f"{AZURE_SEARCH_PERMITTED_GROUPS_COLUMN}/any(g:search.in(g, '{group_ids}'))" + def prepare_body_headers_with_data(request): request_messages = request.json["messages"] @@ -155,9 +180,13 @@ def prepare_body_headers_with_data(request): "temperature": float(AZURE_OPENAI_TEMPERATURE), "max_tokens": int(AZURE_OPENAI_MAX_TOKENS), "top_p": float(AZURE_OPENAI_TOP_P), - "stop": AZURE_OPENAI_STOP_SEQUENCE.split("|") if AZURE_OPENAI_STOP_SEQUENCE else None, + "stop": ( + AZURE_OPENAI_STOP_SEQUENCE.split("|") + if AZURE_OPENAI_STOP_SEQUENCE + else None + ), "stream": SHOULD_STREAM, - "dataSources": [] + "dataSources": [], } if DATASOURCE_TYPE == "AzureCognitiveSearch": @@ -165,16 +194,21 @@ def prepare_body_headers_with_data(request): query_type = "simple" if AZURE_SEARCH_QUERY_TYPE: query_type = AZURE_SEARCH_QUERY_TYPE - elif AZURE_SEARCH_USE_SEMANTIC_SEARCH.lower() == "true" and AZURE_SEARCH_SEMANTIC_SEARCH_CONFIG: + elif ( + AZURE_SEARCH_USE_SEMANTIC_SEARCH.lower() == "true" + and AZURE_SEARCH_SEMANTIC_SEARCH_CONFIG + ): query_type = "semantic" # Set filter filter = None userToken = None if AZURE_SEARCH_PERMITTED_GROUPS_COLUMN: - userToken = request.headers.get('X-MS-TOKEN-AAD-ACCESS-TOKEN', "") + userToken = request.headers.get("X-MS-TOKEN-AAD-ACCESS-TOKEN", "") if DEBUG_LOGGING: - logging.debug(f"USER TOKEN is {'present' if userToken else 'not present'}") + logging.debug( + f"USER TOKEN is {'present' if userToken else 'not present'}" + ) filter = generateFilterString(userToken) if DEBUG_LOGGING: @@ -186,32 +220,71 @@ def prepare_body_headers_with_data(request): "parameters": { "endpoint": f"https://{AZURE_SEARCH_SERVICE}.search.windows.net", "key": AZURE_SEARCH_KEY, - "indexName": AZURE_SEARCH_INDEX_GRANTS if request.json.get("index_name").lower() == "grants" else AZURE_SEARCH_INDEX_ARTICLES, + "indexName": ( + AZURE_SEARCH_INDEX_GRANTS + if request.json.get("index_name").lower() == "grants" + else AZURE_SEARCH_INDEX_ARTICLES + ), "fieldsMapping": { - "contentFields": parse_multi_columns(AZURE_SEARCH_CONTENT_COLUMNS) if AZURE_SEARCH_CONTENT_COLUMNS else [], - "titleField": AZURE_SEARCH_TITLE_COLUMN if AZURE_SEARCH_TITLE_COLUMN else None, - "urlField": AZURE_SEARCH_URL_COLUMN if AZURE_SEARCH_URL_COLUMN else None, - "filepathField": AZURE_SEARCH_FILENAME_COLUMN if AZURE_SEARCH_FILENAME_COLUMN else None, - "vectorFields": parse_multi_columns(AZURE_SEARCH_VECTOR_COLUMNS) if AZURE_SEARCH_VECTOR_COLUMNS else [] + "contentFields": ( + parse_multi_columns(AZURE_SEARCH_CONTENT_COLUMNS) + if AZURE_SEARCH_CONTENT_COLUMNS + else [] + ), + "titleField": ( + AZURE_SEARCH_TITLE_COLUMN + if AZURE_SEARCH_TITLE_COLUMN + else None + ), + "urlField": ( + AZURE_SEARCH_URL_COLUMN if AZURE_SEARCH_URL_COLUMN else None + ), + "filepathField": ( + AZURE_SEARCH_FILENAME_COLUMN + if AZURE_SEARCH_FILENAME_COLUMN + else None + ), + "vectorFields": ( + parse_multi_columns(AZURE_SEARCH_VECTOR_COLUMNS) + if AZURE_SEARCH_VECTOR_COLUMNS + else [] + ), }, - "inScope": True if AZURE_SEARCH_ENABLE_IN_DOMAIN.lower() == "true" else False, + "inScope": ( + True + if AZURE_SEARCH_ENABLE_IN_DOMAIN.lower() == "true" + else False + ), "topNDocuments": AZURE_SEARCH_TOP_K, "queryType": query_type, - "semanticConfiguration": AZURE_SEARCH_SEMANTIC_SEARCH_CONFIG if AZURE_SEARCH_SEMANTIC_SEARCH_CONFIG else "", + "semanticConfiguration": ( + AZURE_SEARCH_SEMANTIC_SEARCH_CONFIG + if AZURE_SEARCH_SEMANTIC_SEARCH_CONFIG + else "" + ), "roleInformation": AZURE_OPENAI_SYSTEM_MESSAGE, "filter": filter, - "strictness": int(AZURE_SEARCH_STRICTNESS) - } - }) + "strictness": int(AZURE_SEARCH_STRICTNESS), + }, + } + ) else: - raise Exception(f"DATASOURCE_TYPE is not configured or unknown: {DATASOURCE_TYPE}") + raise Exception( + f"DATASOURCE_TYPE is not configured or unknown: {DATASOURCE_TYPE}" + ) if "vector" in query_type.lower(): if AZURE_OPENAI_EMBEDDING_NAME: - body["dataSources"][0]["parameters"]["embeddingDeploymentName"] = AZURE_OPENAI_EMBEDDING_NAME + body["dataSources"][0]["parameters"][ + "embeddingDeploymentName" + ] = AZURE_OPENAI_EMBEDDING_NAME else: - body["dataSources"][0]["parameters"]["embeddingEndpoint"] = AZURE_OPENAI_EMBEDDING_ENDPOINT - body["dataSources"][0]["parameters"]["embeddingKey"] = AZURE_OPENAI_EMBEDDING_KEY + body["dataSources"][0]["parameters"][ + "embeddingEndpoint" + ] = AZURE_OPENAI_EMBEDDING_ENDPOINT + body["dataSources"][0]["parameters"][ + "embeddingKey" + ] = AZURE_OPENAI_EMBEDDING_KEY if DEBUG_LOGGING: body_clean = copy.deepcopy(body) @@ -221,26 +294,29 @@ def prepare_body_headers_with_data(request): body_clean["dataSources"][0]["parameters"]["connectionString"] = "*****" if body_clean["dataSources"][0]["parameters"].get("embeddingKey"): body_clean["dataSources"][0]["parameters"]["embeddingKey"] = "*****" - + logging.debug(f"REQUEST BODY: {json.dumps(body_clean, indent=4)}") headers = { - 'Content-Type': 'application/json', - 'api-key': AZURE_OPENAI_KEY, - "x-ms-useragent": "GitHubSampleWebApp/PublicAPI/3.0.0" + "Content-Type": "application/json", + "api-key": AZURE_OPENAI_KEY, + "x-ms-useragent": "GitHubSampleWebApp/PublicAPI/3.0.0", } return body, headers + def stream_with_data(body, headers, endpoint, history_metadata={}): if USE_AZURE_AI_STUDIO.lower() == "true": endpoint = os.environ.get("AI_STUDIO_CHAT_FLOW_ENDPOINT") api_key = os.environ.get("AI_STUDIO_CHAT_FLOW_API_KEY") headers = { - 'Content-Type':'application/json', - 'Authorization':('Bearer '+ api_key), - 'azureml-model-deployment': os.environ.get("AI_STUDIO_CHAT_FLOW_DEPLOYMENT_NAME"), - 'Accept': 'text/event-stream' + "Content-Type": "application/json", + "Authorization": ("Bearer " + api_key), + "azureml-model-deployment": os.environ.get( + "AI_STUDIO_CHAT_FLOW_DEPLOYMENT_NAME" + ), + "Accept": "text/event-stream", } s = requests.Session() @@ -248,12 +324,14 @@ def stream_with_data(body, headers, endpoint, history_metadata={}): with s.post(endpoint, json=body, headers=headers, stream=True) as r: for line in r.iter_lines(chunk_size=10): try: - rawResponse = json.loads(line.lstrip(b'data:').decode('utf-8'))["answer"] + rawResponse = json.loads(line.lstrip(b"data:").decode("utf-8"))[ + "answer" + ] lineJson = json.loads(rawResponse) except json.decoder.JSONDecodeError: continue - if 'error' in lineJson: + if "error" in lineJson: yield format_as_ndjson(lineJson) yield format_as_ndjson(lineJson) @@ -269,113 +347,140 @@ def stream_with_data(body, headers, endpoint, history_metadata={}): "model": "", "created": 0, "object": "", - "choices": [{ - "messages": [] - }], + "choices": [{"messages": []}], "apim-request-id": "", - 'history_metadata': history_metadata + "history_metadata": history_metadata, } if line: - if AZURE_OPENAI_PREVIEW_API_VERSION == '2023-06-01-preview': - lineJson = json.loads(line.lstrip(b'data:').decode('utf-8')) + if AZURE_OPENAI_PREVIEW_API_VERSION == "2023-06-01-preview": + lineJson = json.loads(line.lstrip(b"data:").decode("utf-8")) else: try: - rawResponse = json.loads(line.lstrip(b'data:').decode('utf-8')) + rawResponse = json.loads( + line.lstrip(b"data:").decode("utf-8") + ) lineJson = formatApiResponseStreaming(rawResponse) except json.decoder.JSONDecodeError: continue - if 'error' in lineJson: - error_code_value = lineJson.get('error', {}).get('code', '') + if "error" in lineJson: + error_code_value = lineJson.get("error", {}).get("code", "") error_message = format_as_ndjson(lineJson) - inner_error_code_value = extract_value('code', error_message) - inner_error_status_value = extract_value('status', error_message) - if inner_error_code_value == 'content_filter' and inner_error_status_value == '400': - response["choices"][0]["messages"].append({ - "role": "assistant", - "content": "I am sorry, I don’t have this information in the knowledge repository. Please ask another question." - }) + inner_error_code_value = extract_value( + "code", error_message + ) + inner_error_status_value = extract_value( + "status", error_message + ) + if ( + inner_error_code_value == "content_filter" + and inner_error_status_value == "400" + ): + response["choices"][0]["messages"].append( + { + "role": "assistant", + "content": "I am sorry, I don’t have this information in the knowledge repository. Please ask another question.", + } + ) yield format_as_ndjson(response) - elif error_code_value == '429' or inner_error_code_value == '429': - yield format_as_ndjson({"error": "We're currently experiencing a high number of requests for the service you're trying to access. Please wait a moment and try again."}) + elif ( + error_code_value == "429" + or inner_error_code_value == "429" + ): + yield format_as_ndjson( + { + "error": "We're currently experiencing a high number of requests for the service you're trying to access. Please wait a moment and try again." + } + ) else: - yield format_as_ndjson({"error": "An error occurred. Please try again. If the problem persists, please contact the site administrator."}) + yield format_as_ndjson( + { + "error": "An error occurred. Please try again. If the problem persists, please contact the site administrator." + } + ) continue response["id"] = lineJson["id"] response["model"] = lineJson["model"] response["created"] = lineJson["created"] response["object"] = lineJson["object"] - response["apim-request-id"] = r.headers.get('apim-request-id') + response["apim-request-id"] = r.headers.get("apim-request-id") - role = lineJson["choices"][0]["messages"][0]["delta"].get("role") + role = lineJson["choices"][0]["messages"][0]["delta"].get( + "role" + ) if role == "tool": - response["choices"][0]["messages"].append(lineJson["choices"][0]["messages"][0]["delta"]) + response["choices"][0]["messages"].append( + lineJson["choices"][0]["messages"][0]["delta"] + ) yield format_as_ndjson(response) - elif role == "assistant": - if response['apim-request-id'] and DEBUG_LOGGING: - logging.debug(f"RESPONSE apim-request-id: {response['apim-request-id']}") - response["choices"][0]["messages"].append({ - "role": "assistant", - "content": "" - }) + elif role == "assistant": + if response["apim-request-id"] and DEBUG_LOGGING: + logging.debug( + f"RESPONSE apim-request-id: {response['apim-request-id']}" + ) + response["choices"][0]["messages"].append( + {"role": "assistant", "content": ""} + ) yield format_as_ndjson(response) else: - deltaText = lineJson["choices"][0]["messages"][0]["delta"]["content"] + deltaText = lineJson["choices"][0]["messages"][0]["delta"][ + "content" + ] if deltaText != "[DONE]": - response["choices"][0]["messages"].append({ - "role": "assistant", - "content": deltaText - }) + response["choices"][0]["messages"].append( + {"role": "assistant", "content": deltaText} + ) yield format_as_ndjson(response) except Exception as e: yield format_as_ndjson({"error" + str(e)}) - + def formatApiResponseNoStreaming(rawResponse): - if 'error' in rawResponse: + if "error" in rawResponse: return {"error": rawResponse["error"]} response = { "id": rawResponse["id"], "model": rawResponse["model"], "created": rawResponse["created"], "object": rawResponse["object"], - "choices": [{ - "messages": [] - }], + "choices": [{"messages": []}], } toolMessage = { "role": "tool", - "content": rawResponse["choices"][0]["message"]["context"]["messages"][0]["content"] + "content": rawResponse["choices"][0]["message"]["context"]["messages"][0][ + "content" + ], } assistantMessage = { "role": "assistant", - "content": rawResponse["choices"][0]["message"]["content"] + "content": rawResponse["choices"][0]["message"]["content"], } response["choices"][0]["messages"].append(toolMessage) response["choices"][0]["messages"].append(assistantMessage) return response + def formatApiResponseStreaming(rawResponse): - if 'error' in rawResponse: + if "error" in rawResponse: return {"error": rawResponse["error"]} response = { "id": rawResponse["id"], "model": rawResponse["model"], "created": rawResponse["created"], "object": rawResponse["object"], - "choices": [{ - "messages": [] - }], + "choices": [{"messages": []}], } if rawResponse["choices"][0]["delta"].get("context"): messageObj = { "delta": { "role": "tool", - "content": rawResponse["choices"][0]["delta"]["context"]["messages"][0]["content"] + "content": rawResponse["choices"][0]["delta"]["context"]["messages"][0][ + "content" + ], } } response["choices"][0]["messages"].append(messageObj) @@ -404,9 +509,14 @@ def formatApiResponseStreaming(rawResponse): return response + def conversation_with_data(request_body): body, headers = prepare_body_headers_with_data(request) - base_url = AZURE_OPENAI_ENDPOINT if AZURE_OPENAI_ENDPOINT else f"https://{AZURE_OPENAI_RESOURCE}.openai.azure.com/" + base_url = ( + AZURE_OPENAI_ENDPOINT + if AZURE_OPENAI_ENDPOINT + else f"https://{AZURE_OPENAI_RESOURCE}.openai.azure.com/" + ) endpoint = f"{base_url}openai/deployments/{AZURE_OPENAI_MODEL}/extensions/chat/completions?api-version={AZURE_OPENAI_PREVIEW_API_VERSION}" history_metadata = request_body.get("history_metadata", {}) @@ -418,21 +528,26 @@ def conversation_with_data(request_body): status_code = r.status_code r = r.json() if AZURE_OPENAI_PREVIEW_API_VERSION == "2023-06-01-preview": - r['history_metadata'] = history_metadata + r["history_metadata"] = history_metadata return Response(format_as_ndjson(r), status=status_code) else: result = formatApiResponseNoStreaming(r) - result['history_metadata'] = history_metadata + result["history_metadata"] = history_metadata return Response(format_as_ndjson(result), status=status_code) else: - return Response(stream_with_data(body, headers, endpoint, history_metadata), mimetype='text/event-stream') + return Response( + stream_with_data(body, headers, endpoint, history_metadata), + mimetype="text/event-stream", + ) + @app.route("/conversation", methods=["GET", "POST"]) def conversation(): request_body = request.json return conversation_internal(request_body) + def conversation_internal(request_body): try: return conversation_with_data(request_body) @@ -440,7 +555,8 @@ def conversation_internal(request_body): logging.exception("Exception in /conversation") return jsonify({"error": str(e)}), 500 -@app.route("/frontend_settings", methods=["GET"]) + +@app.route("/frontend_settings", methods=["GET"]) def get_frontend_settings(): try: return jsonify(frontend_settings), 200 @@ -448,26 +564,29 @@ def get_frontend_settings(): logging.exception("Exception in /frontend_settings") return jsonify({"error": str(e)}), 500 + def run_async(func): return loop.run_until_complete(func) + # Helper function to extract values safely -def extract_value(key, text, default='N/A'): +def extract_value(key, text, default="N/A"): try: - return text.split(f"'{key}': ")[1].split(',')[0].strip("'") + return text.split(f"'{key}': ")[1].split(",")[0].strip("'") except IndexError: return default + @app.route("/draft_document/generate_section", methods=["POST"]) def draft_document_generate(): request_body = request.json topic = request_body["grantTopic"] section = request_body["sectionTitle"] section_context = request_body["sectionContext"] - if(section_context != ""): - query = f'{section_context} ' + if section_context != "": + query = f"{section_context} " else: - query = f'Create {section} section of research grant application for - {topic}.' + query = f"Create {section} section of research grant application for - {topic}." data = { "chat_history": [], @@ -477,13 +596,19 @@ def draft_document_generate(): url = os.environ.get("AI_STUDIO_DRAFT_FLOW_ENDPOINT") api_key = os.environ.get("AI_STUDIO_DRAFT_FLOW_API_KEY") - headers = {'Content-Type': 'application/json', 'Authorization':('Bearer '+ api_key), 'azureml-model-deployment': os.environ.get("AI_STUDIO_DRAFT_FLOW_DEPLOYMENT_NAME") } + headers = { + "Content-Type": "application/json", + "Authorization": ("Bearer " + api_key), + "azureml-model-deployment": os.environ.get( + "AI_STUDIO_DRAFT_FLOW_DEPLOYMENT_NAME" + ), + } req = urllib.request.Request(url, body, headers) try: response = urllib.request.urlopen(req) result = response.read() - return jsonify({"content": json.loads(result)['reply']}), 200 + return jsonify({"content": json.loads(result)["reply"]}), 200 except urllib.error.HTTPError as error: # Read and parse the error response res = error.read() @@ -493,15 +618,19 @@ def draft_document_generate(): except json.JSONDecodeError: return "Failed to decode the error content." - error_message = error_json['error']['message'] + error_message = error_json["error"]["message"] # Extract specific parts of the error message - code_value = extract_value('code', error_json['error']['message']) - status_value = extract_value('status', error_json['error']['message']) - - if code_value == 'content_filter' and status_value == '400': - return jsonify({"The request failed with status code: ": str(error_message)}), 400 + code_value = extract_value("code", error_json["error"]["message"]) + status_value = extract_value("status", error_json["error"]["message"]) + + if code_value == "content_filter" and status_value == "400": + return ( + jsonify({"The request failed with status code: ": str(error_message)}), + 400, + ) else: - return ("The request failed with status code: " + str(error.code)) - + return "The request failed with status code: " + str(error.code) + + if __name__ == "__main__": - app.run() \ No newline at end of file + app.run() diff --git a/ResearchAssistant/App/requirements.txt b/ResearchAssistant/App/requirements.txt index 7185c4d5..c9dc23a7 100644 --- a/ResearchAssistant/App/requirements.txt +++ b/ResearchAssistant/App/requirements.txt @@ -1,7 +1,13 @@ azure-identity==1.14.0 -Flask==2.3.2 +Flask==3.0.0 openai==1.6.1 azure-search-documents==11.4.0b6 azure-storage-blob==12.17.0 python-dotenv==1.0.0 -azure-cosmos==4.5.0 \ No newline at end of file +azure-cosmos==4.5.0 +pytest-asyncio==0.24.0 +pytest-cov==5.0.0 +flake8==7.1.1 +black==24.8.0 +autoflake==2.3.1 +isort==5.13.2 \ No newline at end of file