Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion .python-version
Original file line number Diff line number Diff line change
@@ -1 +1 @@
3.12
3.11
4 changes: 2 additions & 2 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@ license-files = [
authors = [
{ name = "NVIDIA RAG", email = "[email protected]" }
]
requires-python = ">=3.12"
requires-python = ">=3.11"

dependencies = [
"bleach==6.2.0",
Expand Down Expand Up @@ -111,7 +111,7 @@ Documentation = "https://github.com/NVIDIA-AI-Blueprints/rag/blob/main/README.md

# Linting and formatting configuration
[tool.ruff]
target-version = "py312"
target-version = "py311"
line-length = 88

[tool.ruff.lint]
Expand Down
4 changes: 1 addition & 3 deletions src/nvidia_rag/rag_server/response_generator.py
Original file line number Diff line number Diff line change
Expand Up @@ -735,9 +735,7 @@ async def retrieve_summary(
# If summary not found and wait=False, return immediately
if not wait:
return {
"message": f"Summary for {
file_name
} not found. Ensure the file name and collection name are correct. Set wait=true to wait for generation.",
"message": f"Summary for {file_name} not found. Ensure the file name and collection name are correct. Set wait=true to wait for generation.",
"status": "FAILED",
}

Expand Down
6 changes: 1 addition & 5 deletions src/nvidia_rag/utils/llm.py
Original file line number Diff line number Diff line change
Expand Up @@ -144,11 +144,7 @@ def get_llm(config: NvidiaRAGConfig | None = None, **kwargs) -> LLM | SimpleChat
stop=kwargs.get("stop", []),
)
except (requests.RequestException, requests.ConnectionError) as e:
error_msg = f"Failed to connect to guardrails service at {
guardrails_url
}: {
str(e)
} Make sure the guardrails service is running and accessible."
error_msg = f"Failed to connect to guardrails service at {guardrails_url}: {str(e)} Make sure the guardrails service is running and accessible."
logger.error(error_msg)
raise RuntimeError(error_msg) from e

Expand Down
16 changes: 4 additions & 12 deletions src/nvidia_rag/utils/vdb/milvus/milvus_vdb.py
Original file line number Diff line number Diff line change
Expand Up @@ -469,9 +469,7 @@ def delete_documents(
for source_value in source_values:
# Delete Milvus Entities
logger.info(
f"Deleting document {source_value} from collection {
collection_name
} at {self.vdb_endpoint}"
f"Deleting document {source_value} from collection {collection_name} at {self.vdb_endpoint}"
)
try:
resp = collection.delete(f"source['source_name'] == '{source_value}'")
Expand All @@ -481,9 +479,7 @@ def delete_documents(
)
except MilvusException:
logger.debug(
f"Failed to delete document {
source_value
}, source name might be available in the source field"
f"Failed to delete document {source_value}, source name might be available in the source field"
)
resp = collection.delete(f"source == '{source_value}'")
deleted = True
Expand Down Expand Up @@ -561,9 +557,7 @@ def add_metadata_schema(
}
client.insert(collection_name=DEFAULT_METADATA_SCHEMA_COLLECTION, data=data)
logger.info(
f"Metadata schema added to the collection {
collection_name
}. Metadata schema: {metadata_schema}"
f"Metadata schema added to the collection {collection_name}. Metadata schema: {metadata_schema}"
)

def get_metadata_schema(
Expand Down Expand Up @@ -680,9 +674,7 @@ def add_document_info(
}
client.insert(collection_name=DEFAULT_DOCUMENT_INFO_COLLECTION, data=data)
logger.info(
f"Document info added to the collection {
collection_name
}. Document info: {info_type}, {document_name}, {info_value}"
f"Document info added to the collection {collection_name}. Document info: {info_type}, {document_name}, {info_value}"
)

def get_document_info(
Expand Down
Loading