diff --git a/.pylintrc b/.pylintrc index 8c13acbca..d82ad6195 100644 --- a/.pylintrc +++ b/.pylintrc @@ -3,7 +3,7 @@ ignore-patterns= ignore= [MESSAGES CONTROL] -disable=C0114,C0115,C0116,C0301,C0103,W0603,R1715,W0621,R0903,W0237,W0511,W0622,R0913,R0902,W0221,C0302,R0801,C0411,C0412 +disable=C0114,C0115,C0116,C0301,C0103,W0603,R1715,W0621,R0903,W0237,W0511,W0622,R0913,R0902,W0221,C0302,R0801,C0411,C0412,W0719,W0718,R0914,R0916,R0912,R0911,W0102 # To discuss # W0621: redefined-outer-name diff --git a/README.md b/README.md index b99ddf618..cb1ff1079 100644 --- a/README.md +++ b/README.md @@ -1,8 +1,10 @@
-
-Dashboard |
+Try on Dashboard |
Homepage |
@@ -59,10 +61,10 @@
- [📋 Table of contents](#-table-of-contents)
- [🤔 Why Composio?](#-why-composio)
+- [🔥 Key Features](#-key-features)
- [🚀 Getting Started](#-getting-started)
- [1. Installation](#1-installation)
- [2. Testing Composio in Action](#2-testing-composio-in-action)
-- [🔥 Key Features](#-key-features)
- [💡 Examples](#-examples)
- [Competitor Researcher](#competitor-researcher)
- [Todolist to Calendar](#todolist-to-calendar)
@@ -78,7 +80,24 @@
We believe AI Based Agents/Workflows are the future.
Composio is the best toolset to integrate AI Agents to best Agentic Tools and use them to accomplish tasks.
-
+
+
+## 🔥 Key Features
+
+- **100+ Tools**: Support for a range of different categories
+
+ - **Softwares**: Do anything on GitHub, Notion, Linear, Gmail, Slack, Hubspot, Salesforce, & 90 more.
+ - **OS**: Click anywhere, Type anything, Copy to Clipboard, & more.
+ - **Browser**: Smart Search, Take a screenshot, MultiOn, Download, Upload, & more.
+ - **Search**: Google Search, Perplexity Search, Tavily, Exa & more.
+ - **SWE**: Ngrok, Database, Redis, Vercel, Git, etc.
+ - **RAG**: Agentic RAG for any type of data on the fly!
+
+- **Frameworks**: Use tools with agent frameworks like **OpenAI, Claude, LlamaIndex, Langchain, CrewAI, Autogen, Gemini, Julep, Lyzr**, and more in a single line of code.
+- **Managed Authorisation**: Supports six different auth protocols. _Access Token, Refresh token, OAuth, API Keys, JWT, and more_ abstracted out so you can focus on the building agents.
+- **Accuracy**: Get _upto 40% better agentic accuracy_ in your tool calls due to better tool designs.
+- **Embeddable**: Whitelabel in the backend of your applications managing Auth & Integrations for all your users & agents and maintain a consistent experience.
+- **Pluggable**: Designed to be extended with additional Tools, Frameworks and Authorisation Protocols very easily.
## 🚀 Getting Started
@@ -156,23 +175,6 @@ response_after_tool_calls = composio_tool_set.wait_and_handle_assistant_tool_cal
print(response_after_tool_calls)
```
-## 🔥 Key Features
-
-- **100+ Tools**: Support for a range of different categories
-
- - **Softwares**: Do anything on GitHub, Notion, Linear, Gmail, Slack, Hubspot, Salesforce, & 90 more.
- - **OS**: Click anywhere, Type anything, Copy to Clipboard, & more.
- - **Browser**: Smart Search, Take a screenshot, MultiOn, Download, Upload, & more.
- - **Search**: Google Search, Perplexity Search, Tavily, Exa & more.
- - **SWE**: Ngrok, Database, Redis, Vercel, Git, etc.
- - **RAG**: Agentic RAG for any type of data on the fly!
-
-- **Frameworks**: Use tools with agent frameworks like **OpenAI, Claude, LlamaIndex, Langchain, CrewAI, Autogen, Gemini, Julep, Lyzr**, and more in a single line of code.
-- **Managed Authorisation**: Supports six different auth protocols. _Access Token, Refresh token, OAuth, API Keys, JWT, and more_ abstracted out so you can focus on the building agents.
-- **Accuracy**: Get _upto 40% better agentic accuracy_ in your tool calls due to better tool designs.
-- **Embeddable**: Whitelabel in the backend of your applications managing Auth & Integrations for all your users & agents and maintain a consistent experience.
-- **Pluggable**: Designed to be extended with additional Tools, Frameworks and Authorisation Protocols very easily.
-
## 💡 Examples
### [Competitor Researcher](https://docs.composio.dev/guides/examples/CompetitorResearcher)
diff --git a/composio/cli/__init__.py b/composio/cli/__init__.py
index 72061cdaa..dab3dab0f 100644
--- a/composio/cli/__init__.py
+++ b/composio/cli/__init__.py
@@ -16,6 +16,7 @@
from composio.cli.triggers import _triggers
from composio.cli.whoami import _whoami
from composio.core.cls.did_you_mean import DYMGroup
+from composio.cli.utils import HelpfulCmdBase
class HelpDYMGroup(DYMGroup):
diff --git a/composio/cli/apps.py b/composio/cli/apps.py
index 6d8976781..3682cc1fe 100755
--- a/composio/cli/apps.py
+++ b/composio/cli/apps.py
@@ -197,7 +197,6 @@ def _update(context: Context, beta: bool = False) -> None:
key=lambda x: x.appKey,
)
if not beta:
- c = []
def filter_non_beta_items(items):
filtered_items = []
diff --git a/composio/cli/utils/__init__.py b/composio/cli/utils/__init__.py
new file mode 100644
index 000000000..4eef8db68
--- /dev/null
+++ b/composio/cli/utils/__init__.py
@@ -0,0 +1 @@
+from .helpfulcmd import HelpfulCmdBase
diff --git a/composio/client/__init__.py b/composio/client/__init__.py
index ae44035f2..f7f9472fc 100644
--- a/composio/client/__init__.py
+++ b/composio/client/__init__.py
@@ -6,9 +6,10 @@
import time
import typing as t
import warnings
-from datetime import datetime
-
+import base64
import requests
+
+from datetime import datetime
from pydantic import BaseModel, ConfigDict
from composio.client.endpoints import Endpoint, v1
@@ -521,17 +522,26 @@ class ActiveTriggers(Collection[ActiveTriggerModel]):
def get( # type: ignore
self,
trigger_ids: t.Optional[t.List[str]] = None,
+ connected_account_ids: t.Optional[t.List[str]] = None,
+ integration_ids: t.Optional[t.List[str]] = None,
+ trigger_names: t.Optional[t.List[str]] = None,
) -> t.List[ActiveTriggerModel]:
"""List active triggers."""
trigger_ids = trigger_ids or []
+ connected_account_ids = connected_account_ids or []
+ integration_ids = integration_ids or []
+ trigger_names = trigger_names or []
+ queries = {}
+ if len(trigger_ids) > 0:
+ queries["triggerIds"] = ",".join(trigger_ids)
+ if len(connected_account_ids) > 0:
+ queries["connectedAccountIds"] = ",".join(connected_account_ids)
+ if len(integration_ids) > 0:
+ queries["integrationIds"] = ",".join(integration_ids)
+ if len(trigger_names) > 0:
+ queries["triggerNames"] = ",".join(trigger_names)
return self._raise_if_empty(
- super().get(
- queries=(
- {"triggerIds": ",".join(trigger_ids)}
- if len(trigger_ids) > 0
- else {}
- )
- )
+ super().get(queries=queries)
)
@@ -542,6 +552,8 @@ class ActionParameterPropertyModel(BaseModel):
description: t.Optional[str] = None
title: t.Optional[str] = None
type: t.Optional[str] = None
+ oneOf: t.Optional[t.List["ActionParameterPropertyModel"]] = None
+ file_readable: t.Optional[bool] = False
class ActionParametersModel(BaseModel):
@@ -744,13 +756,32 @@ def execute(
action=action,
request_data=params,
)
+ actionsResp = self.client.actions.get(actions=[action])
+ if len(actionsResp) == 0:
+ raise ComposioClientError(f"Action {action} not found")
+ action_model = actionsResp[0]
+ action_req_schema = action_model.parameters.properties
+ modified_params = {}
+ for param, value in params.items():
+ file_readable = action_req_schema[param].file_readable or False
+ if file_readable and isinstance(value, str) and os.path.isfile(value):
+ with open(value, 'rb') as file:
+ file_content = file.read()
+ try:
+ file_content.decode('utf-8') # Try decoding as UTF-8 to check if it's normal text
+ modified_params[param] = file_content.decode('utf-8')
+ except UnicodeDecodeError:
+ # If decoding fails, treat as binary and encode in base64
+ modified_params[param] = base64.b64encode(file_content).decode('utf-8')
+ else:
+ modified_params[param] = value
if action.no_auth:
return self._raise_if_required(
self.client.http.post(
url=str(self.endpoint / action.action / "execute"),
json={
"appName": action.app,
- "input": params,
+ "input": modified_params,
"entityId": entity_id,
},
)
@@ -767,7 +798,7 @@ def execute(
url=str(self.endpoint / action.action / "execute"),
json={
"connectedAccountId": connected_account,
- "input": params,
+ "input": modified_params,
"entityId": entity_id,
},
)
@@ -975,13 +1006,13 @@ def execute(
def get_connection(
self,
- app: t.Optional[str] = None,
+ app: t.Optional[t.Union[str, App]] = None,
connected_account_id: t.Optional[str] = None,
) -> ConnectedAccountModel:
"""
Get connected account for an action.
- :param action: Action type enum
+ :param app: App name
:param connected_account_id: Connected account ID to use as filter
:return: Connected account object
:raises: If no connected account found for given entity ID
@@ -1013,6 +1044,42 @@ def get_connection(
)
return latest_account
+ def get_connections(self) -> t.List[ConnectedAccountModel]:
+ """
+ Get all connections for an entity.
+ """
+ return self.client.connected_accounts.get(entity_ids=[self.id], active=True)
+
+ def enable_trigger(self, app: t.Union[str, App], trigger_name: str, config: t.Dict[str, t.Any]) -> t.Dict:
+ """
+ Enable a trigger for an entity.
+
+ :param app: App name
+ :param trigger_name: Trigger name
+ :param config: Trigger config
+ """
+ connected_account = self.get_connection(app=app)
+ return self.client.triggers.enable(
+ name=trigger_name,
+ connected_account_id=connected_account.id,
+ config=config,
+ )
+
+ def disable_trigger(self, trigger_id: str) -> t.Dict:
+ """
+ Disable a trigger for an entity.
+
+ :param trigger_id: Trigger ID
+ """
+ return self.client.triggers.disable(id=trigger_id)
+
+ def get_active_triggers(self) -> t.List[ActiveTriggerModel]:
+ """
+ Get all active triggers for an entity.
+ """
+ connected_accounts = self.get_connections()
+ return self.client.active_triggers.get(connected_account_ids=[connected_account.id for connected_account in connected_accounts])
+
def initiate_connection(
self,
app_name: t.Union[str, App],
diff --git a/composio/client/enums.py b/composio/client/enums.py
index 4f7faf701..3a7283d71 100644
--- a/composio/client/enums.py
+++ b/composio/client/enums.py
@@ -10,11 +10,14 @@
class Tag(tuple, Enum):
"""App tags."""
+ # pylint: disable=function-redefined,invalid-overridden-method
@property
def name(self) -> str:
"""Returns trigger name."""
return self.value[0]
+ # pylint: enable=function-redefined,invalid-overridden-method
+
IMPORTANT = ("default", "important")
ASANA_JOBS = ("asana", "Jobs")
ASANA_TEAM_MEMBERSHIPS = ("asana", "Team memberships")
diff --git a/composio/client/local_handler.py b/composio/client/local_handler.py
index e5c62cb9d..cd2b6ff4c 100644
--- a/composio/client/local_handler.py
+++ b/composio/client/local_handler.py
@@ -57,11 +57,9 @@ def get_list_of_action_schemas(self, apps=[], actions=[], tags=[]):
action_obj = tool_obj.get_actions_dict()[action.value[1]]
all_action_objs.append(action_obj)
- # all_action_objs = list(set(all_action_objs))
all_action_schemas = [
action_obj.get_action_schema() for action_obj in all_action_objs
]
- # all_action_schemas = list(set(all_action_schemas))
all_action_schemas = list(
{
diff --git a/composio/core/local/action.py b/composio/core/local/action.py
index faf58ac01..3aee5c5a7 100644
--- a/composio/core/local/action.py
+++ b/composio/core/local/action.py
@@ -1,10 +1,12 @@
import hashlib
import json
-from abc import ABC, abstractmethod
-from typing import List
-
import inflection
import jsonref
+import os
+import base64
+
+from abc import ABC, abstractmethod
+from typing import List
from pydantic import BaseModel
@@ -82,6 +84,16 @@ def get_tool_merged_action_name(self) -> str:
return f"{self._tool_name}_{inflection.underscore(self.action_name)}"
def get_action_schema(self):
+ request_schema_json = self.request_schema.model_json_schema(by_alias=False)
+ modified_properties = request_schema_json.get('properties', {})
+ for param, details in modified_properties.items():
+ if details.get('file_readable', False):
+ details['oneOf'] = [
+ {'type': details.get('type'), 'description': details.get('description', '')},
+ {'type': 'string', 'format': 'file-path', 'description': f"File path to {details.get('description', '')}"}
+ ]
+ del details['type'] # Remove original type to avoid conflict in oneOf
+ request_schema_json['properties'] = modified_properties
action_schema = {
"appKey": self._tool_name,
"appName": self._tool_name,
@@ -92,14 +104,11 @@ def get_action_schema(self):
"tags": self.tags, # type: ignore
"enabled": True,
"description": self.__class__.__doc__ if self.__class__.__doc__ else self.action_name, # type: ignore
- "parameters": jsonref.loads(
- json.dumps(self.request_schema.model_json_schema(by_alias=False))
- ),
+ "parameters": jsonref.loads(json.dumps(request_schema_json)),
"response": jsonref.loads(
json.dumps(self.response_schema.model_json_schema())
),
}
-
return action_schema
def execute_action(self, request_data: dict, metadata: dict):
@@ -108,7 +117,24 @@ def execute_action(self, request_data: dict, metadata: dict):
# print(f"Executing {self.__class__.__name__} on Tool: {self.tool_name} with request data {request_data} and meta data {metadata}")
try:
request_schema = self.request_schema # type: ignore
- req = request_schema.model_validate_json(json_data=json.dumps(request_data))
+ modified_request_data = {}
+
+ for param, value in request_data.items():
+ annotations = request_schema.model_fields[param].json_schema_extra
+ file_readable = annotations is not None and annotations.get('file_readable', False)
+ if file_readable and isinstance(value, str) and os.path.isfile(value):
+ with open(value, 'rb') as file:
+ file_content = file.read()
+ try:
+ file_content.decode('utf-8') # Try decoding as UTF-8 to check if it's normal text
+ modified_request_data[param] = file_content.decode('utf-8')
+ except UnicodeDecodeError:
+ # If decoding fails, treat as binary and encode in base64
+ modified_request_data[param] = base64.b64encode(file_content).decode('utf-8')
+ else:
+ modified_request_data[param] = value
+
+ req = request_schema.model_validate_json(json_data=json.dumps(modified_request_data))
return self.execute(req, metadata) # type: ignore
except json.JSONDecodeError as e:
# logger.error(f"Error executing {action.__name__} on Tool: {tool_name}: {e}\n{traceback.format_exc()}")
@@ -116,8 +142,8 @@ def execute_action(self, request_data: dict, metadata: dict):
"status": "failure",
"details": f"Could not parse response with error: {e}. Please contact the tool developer.",
}
- except Exception as e:
# logger.error(f"Error executing {action.__name__} on Tool: {tool_name}: {e}\n{traceback.format_exc()}")
+ except Exception as e:
return {
"status": "failure",
"details": "Error executing action with error: " + str(e),
diff --git a/composio/core/local/tool.py b/composio/core/local/tool.py
index 4d53ed00d..66d9b733a 100644
--- a/composio/core/local/tool.py
+++ b/composio/core/local/tool.py
@@ -1,4 +1,4 @@
-from typing import List, Optional
+from typing import Optional, Type
from composio.local_tools.local_workspace.commons.history_processor import (
HistoryProcessor,
@@ -15,7 +15,7 @@ class Tool:
def tool_name(self) -> str:
return self.__class__.__name__.lower()
- def actions(self) -> List[Action]:
+ def actions(self) -> list[Type[Action]]:
raise NotImplementedError("This method should be overridden by subclasses.")
def get_workspace_factory(self) -> Optional[WorkspaceManagerFactory]:
diff --git a/composio/local_tools/README.md b/composio/local_tools/README.md
new file mode 100644
index 000000000..180d33c4c
--- /dev/null
+++ b/composio/local_tools/README.md
@@ -0,0 +1,4 @@
+Composio is your one-stop solution for all kind of LLM tools and functionalities. And a very important step on that front is the ability of adding your own custom tools. Once we add a tool(or Action).
+
+
+This page contains necessary details for using Local Tools: [Local Tools Page](https://docs.composio.dev/sdk/python/local_tools)
diff --git a/composio/local_tools/file/actions/read_file.py b/composio/local_tools/file/actions/read_file.py
index d7016de9f..ec3cbe9d7 100644
--- a/composio/local_tools/file/actions/read_file.py
+++ b/composio/local_tools/file/actions/read_file.py
@@ -1,4 +1,3 @@
-import typing as t # noqa: F401
from pathlib import Path
from pydantic import BaseModel, Field
diff --git a/composio/local_tools/file/actions/write_file.py b/composio/local_tools/file/actions/write_file.py
index 8121e9fcd..74e6c7db4 100644
--- a/composio/local_tools/file/actions/write_file.py
+++ b/composio/local_tools/file/actions/write_file.py
@@ -1,4 +1,3 @@
-import typing as t # noqa: F401
from pathlib import Path
from pydantic import BaseModel, Field
diff --git a/composio/local_tools/greptile/actions/codequery.py b/composio/local_tools/greptile/actions/codequery.py
index 875b1de65..e2771070d 100644
--- a/composio/local_tools/greptile/actions/codequery.py
+++ b/composio/local_tools/greptile/actions/codequery.py
@@ -115,12 +115,11 @@ def execute(
# Check if the request was successful
if response.status_code == 200:
return response.json()
- else:
- logger.error(
- "Failed to fetch data from Greptile API, status code: %s",
- response.status_code,
- )
- return {
- "error": "Failed to fetch data from Greptile API",
- "status_code": response.status_code,
- }
+ logger.error(
+ "Failed to fetch data from Greptile API, status code: %s",
+ response.status_code,
+ )
+ return {
+ "error": "Failed to fetch data from Greptile API",
+ "status_code": response.status_code,
+ }
diff --git a/composio/local_tools/local_workspace/cmd_manager/actions/run_cmd.py b/composio/local_tools/local_workspace/cmd_manager/actions/run_cmd.py
index 746260177..ca9bafdb1 100644
--- a/composio/local_tools/local_workspace/cmd_manager/actions/run_cmd.py
+++ b/composio/local_tools/local_workspace/cmd_manager/actions/run_cmd.py
@@ -107,7 +107,8 @@ def run_command(self, action: str, timeout: int) -> Tuple[str, int]:
return "\nEXECUTION TIMED OUT", 1
except RuntimeError as e:
logger.warning(
- f"Failed to interrupt container: {e}\nRESTARTING PROCESS."
+ "Failed to interrupt container: %s\nRESTARTING PROCESS.",
+ e,
)
self.close_container()
return (
@@ -115,15 +116,15 @@ def run_command(self, action: str, timeout: int) -> Tuple[str, int]:
1,
)
except RuntimeError as e:
- logger.warning(f"Failed to execute command: {e}\nRESTARTING PROCESS.")
+ logger.warning("Failed to execute command: %s\nRESTARTING PROCESS.", e)
self.close_container()
return "\nCOMMAND FAILED TO EXECUTE. RESTARTING PROCESS.", 1
except BrokenPipeError as e:
- logger.error(f"Broken pipe error: {e}\nRESTARTING PROCESS.")
+ logger.error("Broken pipe error: %s\nRESTARTING PROCESS.", e)
self.close_container()
return "\nBROKEN PIPE ERROR. RESTARTING PROCESS.", 1
except Exception as e:
- logger.error(f"cmd failed with exception: {e}")
+ logger.error("cmd failed with exception: %s", e)
return "\nEXECUTION FAILED OR COMMAND MALFORMED", 1
def close_container(self) -> None:
diff --git a/composio/local_tools/local_workspace/commons/history_processor.py b/composio/local_tools/local_workspace/commons/history_processor.py
index 72f1503c0..027f2e994 100644
--- a/composio/local_tools/local_workspace/commons/history_processor.py
+++ b/composio/local_tools/local_workspace/commons/history_processor.py
@@ -11,7 +11,6 @@
logger = get_logger()
script_path = Path(__file__)
script_dir = script_path.parent
-submit_logs_dir = script_dir / Path("../../../examples/swe/submit_logs/")
class HistoryProcessor:
diff --git a/composio/local_tools/local_workspace/commons/local_docker_workspace.py b/composio/local_tools/local_workspace/commons/local_docker_workspace.py
index fec9eed4d..7fb461d92 100644
--- a/composio/local_tools/local_workspace/commons/local_docker_workspace.py
+++ b/composio/local_tools/local_workspace/commons/local_docker_workspace.py
@@ -83,7 +83,7 @@ def _reset_container(self) -> None:
logger.error("handling keyboard interrupt")
raise
except Exception as e:
- logger.error(f"reset container exception: {e}")
+ logger.error("reset container exception: %s", e)
self._init_container()
self._init_scripts()
@@ -170,9 +170,9 @@ def communicate_with_handling(
timeout_duration=timeout_duration,
)
if self.returncode != 0:
- self.logger.error(f"{error_msg}: {logs}")
+ self.logger.error("%s: %s", error_msg, logs)
self.close()
- raise RuntimeError(f"{error_msg}: {logs}")
+ raise RuntimeError("%s: %s", error_msg, logs)
return logs
def communicate(self, input: str, timeout_duration=25) -> Tuple[str, int]:
@@ -205,9 +205,7 @@ def interrupt(self):
except TimeoutError:
pass
try:
- output, return_code = self.communicate(
- input="echo 'interrupted'", timeout_duration=5
- )
+ output, _ = self.communicate(input="echo 'interrupted'", timeout_duration=5)
assert output.strip().endswith(
"interrupted"
), "container health check failed"
@@ -249,7 +247,7 @@ def close(self):
logger.error("handling keyboard interrupt")
raise
except Exception as e:
- logger.error(f"docker close exception: {e}")
+ logger.error("docker close exception: %s", e)
assert self.container is not None
assert self.container_obj is not None
self.container.terminate()
@@ -258,7 +256,9 @@ def close(self):
self.container_obj.pause()
self.logger.info("Agent container paused")
else:
- self.logger.info(f"Agent container status: {self.container_obj.status}")
+ self.logger.info(
+ "Agent container status: %s", self.container_obj.status
+ )
else:
try:
self.container_obj.remove(force=True)
@@ -266,7 +266,7 @@ def close(self):
logger.error("handling keyboard interrupt")
raise
except Exception as e:
- logger.error(f"docker close exception: {e}")
+ logger.error("docker close exception: %s", e)
self.logger.info("Agent container stopped")
# todo: implement these hooks
for hook in self.hooks:
@@ -310,7 +310,7 @@ def get_workspace_state(self, workspace_id: str):
container_process = get_container_process(workspace_meta[KEY_WORKSPACE_MANAGER])
container_obj = get_container_by_container_name(container_name, image_name)
parent_pids = workspace_meta[KEY_PARENT_PIDS]
- output, return_code = communicate(
+ output, _ = communicate(
container_process, container_obj, state_cmd, parent_pids
)
return output
diff --git a/composio/local_tools/local_workspace/commons/parsing.py b/composio/local_tools/local_workspace/commons/parsing.py
index 38e9d0a00..82caf9559 100644
--- a/composio/local_tools/local_workspace/commons/parsing.py
+++ b/composio/local_tools/local_workspace/commons/parsing.py
@@ -103,7 +103,7 @@ def parse_command_file(self, path: str) -> List[Command]:
)
return commands
- def parse_bash_functions(self, path, contents) -> List[Command]:
+ def parse_bash_functions(self, _path, contents) -> List[Command]:
"""
Simple logic for parsing a bash file and segmenting it into functions.
diff --git a/composio/local_tools/local_workspace/commons/utils.py b/composio/local_tools/local_workspace/commons/utils.py
index dd295b11c..3f9b2f323 100644
--- a/composio/local_tools/local_workspace/commons/utils.py
+++ b/composio/local_tools/local_workspace/commons/utils.py
@@ -70,13 +70,17 @@ def get_container(
)
raise RuntimeError(msg)
if len(filtered_images) > 1:
- logger.warning(f"Multiple images found for {image_name}, that's weird.")
+ logger.warning("Multiple images found for %s, that's weird.", image_name)
attrs = filtered_images[0].attrs
if attrs:
logger.info(
- f"Found image {image_name} with tags: {attrs['RepoTags']}, created: {attrs['Created']} "
- f"for {attrs['Os']} {attrs['Architecture']}."
+ "Found image %s with tags: %s, created: %s " "for %s %s.",
+ image_name,
+ attrs["RepoTags"],
+ attrs["Created"],
+ attrs["Os"],
+ attrs["Architecture"],
)
if persistent:
@@ -97,12 +101,16 @@ def get_container_by_container_name(cls, container_name: str, image_name: str):
)
raise RuntimeError(msg)
if len(filtered_images) > 1:
- logger.warning(f"Multiple images found for {image_name}, that's weird.")
+ logger.warning("Multiple images found for %s, that's weird.", image_name)
attrs = filtered_images[0].attrs
if attrs is not None:
logger.info(
- f"Found image {image_name} with tags: {attrs['RepoTags']}, created: {attrs['Created']} "
- f"for {attrs['Os']} {attrs['Architecture']}."
+ "Found image %s with tags: %s, created: %s " "for %s %s.",
+ image_name,
+ attrs["RepoTags"],
+ attrs["Created"],
+ attrs["Os"],
+ attrs["Architecture"],
)
max_attempts = 5
attempt = 0
@@ -160,7 +168,7 @@ def _get_persistent_container(
"-l",
"-m",
]
- logger.debug(f"Starting container with command: {shlex.join(startup_cmd)}")
+ logger.debug("Starting container with command: %s", shlex.join(startup_cmd))
container = subprocess.Popen(
startup_cmd,
stdin=PIPE,
@@ -175,7 +183,7 @@ def _get_persistent_container(
container, None, lambda arge1, arg2: [], [], timeout_duration=2
)
if output:
- logger.error(f"Unexpected container setup output: {output}")
+ logger.error("Unexpected container setup output: %s", output)
# Get the process IDs of the container
# There should be at least a head process and possibly one child bash process
bash_pids, other_pids = get_background_pids(container_obj)
@@ -211,7 +219,7 @@ def _get_non_persistent_container(
"-l",
"-m",
]
- logger.debug(f"Starting container with command: {shlex.join(startup_cmd)}")
+ logger.debug("Starting container with command: %s", shlex.join(startup_cmd))
container = subprocess.Popen(
startup_cmd,
stdin=PIPE,
@@ -226,7 +234,7 @@ def _get_non_persistent_container(
container, None, lambda arg1, arg2: [], [], timeout_duration=2
)
if output:
- logger.error(f"Unexpected container setup output: {output}")
+ logger.error("Unexpected container setup output: %s", output)
return container, {
"1",
} # bash PID is always 1 for non-persistent containers
@@ -345,7 +353,7 @@ def copy_file_to_container(container_obj, contents, container_path):
)
except Exception as e:
- logger.error(f"An error occurred: {e}")
+ logger.error("An error occurred: %s", e)
logger.error(traceback.format_exc())
finally:
# Cleanup: Remove the temporary file if it was created
@@ -405,7 +413,7 @@ def communicate_with_handling(
timeout_duration=timeout_duration,
)
if return_code != 0:
- logger.error(f"{error_msg}: {logs}")
+ logger.error("%s: %s", error_msg, logs)
# call close container here in future
# self.close()
raise RuntimeError(f"{error_msg}: {logs}")
@@ -448,7 +456,7 @@ def _communicate(
container, container_obj, get_pids, parent_pids, 5
).strip()
except Exception as e:
- logger.error(f"Read with timeout failed on input:\n---\n{input}\n---")
+ logger.error("Read with timeout failed on input:\n---\n%s\n---", input)
raise e
if not exit_code.isdigit():
raise RuntimeError(
diff --git a/composio/local_tools/mathematical/actions/calculator.py b/composio/local_tools/mathematical/actions/calculator.py
index 70bfaf022..dee38bf2c 100644
--- a/composio/local_tools/mathematical/actions/calculator.py
+++ b/composio/local_tools/mathematical/actions/calculator.py
@@ -7,6 +7,7 @@ class CalculatorRequest(BaseModel):
operation: str = Field(
...,
description="A mathematical expression, a couple examples are `200*7` or `5000/2*10`",
+ json_schema_extra={"file_readable": True}
)
@@ -30,7 +31,9 @@ def execute(
) -> dict:
operation_str = request_data.dict()["operation"]
try:
+ # pylint: disable=eval-used
result = eval(operation_str)
+ # pylint: enable=eval-used
execution_details = {"executed": True}
response_data = result
except Exception as e:
diff --git a/composio/local_tools/ragtool/actions/rag_add_request.py b/composio/local_tools/ragtool/actions/rag_add_request.py
index 8d766a3ae..671a504d0 100644
--- a/composio/local_tools/ragtool/actions/rag_add_request.py
+++ b/composio/local_tools/ragtool/actions/rag_add_request.py
@@ -4,7 +4,7 @@
class RagToolAddRequest(BaseModel):
- content: str = Field(..., description="Content to add to the knowledge base")
+ content: str = Field(..., description="Content to add to the knowledge base", json_schema_extra={"file_readable": True})
class RagToolAddResponse(BaseModel):
@@ -22,16 +22,21 @@ class AddContentToRagTool(Action):
_tags = ["Knowledge Base"]
_tool_name = "ragtool"
- def execute(self, request: RagToolAddRequest, authorisation_data: dict = {}):
+ def execute(self, request: RagToolAddRequest, authorisation_data: dict = None):
"""Add content to the knowledge base"""
+ if authorisation_data is None:
+ authorisation_data = {}
try:
+ # pylint: disable=import-outside-toplevel
from embedchain import App
+
+ # pylint: enable=import-outside-toplevel
except ImportError as e:
- raise ImportError(f"Failed to import App from embedchain: {e}")
+ raise ImportError(f"Failed to import App from embedchain: {e}") from e
try:
embedchain_app = App()
content = request.content
embedchain_app.add(content)
return "Content added successfully"
except Exception as e:
- raise Exception(f"Error adding content: {e}")
+ raise Exception(f"Error adding content: {e}") from e
diff --git a/composio/local_tools/ragtool/actions/rag_query.py b/composio/local_tools/ragtool/actions/rag_query.py
index 3599e6545..ec68fc922 100644
--- a/composio/local_tools/ragtool/actions/rag_query.py
+++ b/composio/local_tools/ragtool/actions/rag_query.py
@@ -25,24 +25,29 @@ class RagToolQuery(Action):
_tags = ["Knowledge Base"]
_tool_name = "ragtool"
- def execute(self, request: RagToolQueryRequest, authorisation_data: dict = {}):
+ def execute(self, request: RagToolQueryRequest, authorisation_data: dict = None):
"""Query the knowledge base and return the response"""
+ if authorisation_data is None:
+ authorisation_data = {}
try:
+ # pylint: disable=import-outside-toplevel
from embedchain import App
+
+ # pylint: enable=import-outside-toplevel
except ImportError as e:
- raise ImportError(f"Failed to import App from embedchain: {e}")
+ raise ImportError(f"Failed to import App from embedchain: {e}") from e
embedchain_app = None
try:
embedchain_app = App()
except Exception as e:
print(f"Failed to initialize App: {e}")
- raise Exception(f"Failed to initialize App: {e}")
+ raise Exception(f"Failed to initialize App: {e}") from e
query = request.query
if embedchain_app:
try:
- result, sources = embedchain_app.query(query, citations=True)
+ _, sources = embedchain_app.query(query, citations=True)
response = "\n\n".join([source[0] for source in sources])
return response
except Exception as e:
diff --git a/composio/local_tools/webtool/actions/scrape_website_content.py b/composio/local_tools/webtool/actions/scrape_website_content.py
index 4fae0a4e5..0e787f3d7 100644
--- a/composio/local_tools/webtool/actions/scrape_website_content.py
+++ b/composio/local_tools/webtool/actions/scrape_website_content.py
@@ -31,9 +31,12 @@ def execute(self, request: ScrapeWebsiteToolRequest, authorisation_data: dict =
"""Scrape the website and return the content"""
url = request.website_url
try:
+ # pylint: disable=import-outside-toplevel
from bs4 import BeautifulSoup
+
+ # pylint: enable=import-outside-toplevel
except ImportError as e:
- raise ImportError("Failed to import BeautifulSoup:", e)
+ raise ImportError("Failed to import BeautifulSoup:", e) from e
try:
# Adding headers to mimic a browser request
headers = {
diff --git a/composio/local_tools/webtool/actions/scrape_website_element.py b/composio/local_tools/webtool/actions/scrape_website_element.py
index 7aca0cc9e..885759a11 100644
--- a/composio/local_tools/webtool/actions/scrape_website_element.py
+++ b/composio/local_tools/webtool/actions/scrape_website_element.py
@@ -29,15 +29,20 @@ class ScrapeWebsiteElement(Action):
_tool_name = "webtool"
def execute(
- self, request: ScrapeWebsiteElementToolRequest, authorisation_data: dict = {}
+ self, request: ScrapeWebsiteElementToolRequest, authorisation_data: dict = None
):
"""Scrape a specific element from the website and return its content"""
+ if authorisation_data is None:
+ authorisation_data = {}
url = request.website_url
selector = request.element_selector
try:
+ # pylint: disable=import-outside-toplevel
from bs4 import BeautifulSoup
+
+ # pylint: enable=import-outside-toplevel
except ImportError as e:
- raise ImportError("Failed to import BeautifulSoup:", e)
+ raise ImportError("Failed to import BeautifulSoup:", e) from e
try:
# Adding headers to mimic a browser request
headers = {
@@ -54,7 +59,6 @@ def execute(
element = soup.select_one(selector)
if element:
return str(element)
- else:
- return "Element not found"
+ return "Element not found"
except Exception as e:
return f"Error scraping element: {e}"
diff --git a/composio/utils/shared.py b/composio/utils/shared.py
index 2f2b19bac..26e0baa15 100644
--- a/composio/utils/shared.py
+++ b/composio/utils/shared.py
@@ -9,7 +9,7 @@
from pydantic.v1.fields import FieldInfo
-SCHEMA_TYPE_TO_PYTHON_TYPE = {
+SCHEMA_TYPE_TO_PYTHON_TYPE: t.Dict[str, t.Type] = {
"string": str,
"number": float,
"boolean": bool,
@@ -60,6 +60,18 @@ def json_schema_to_pydantic_type(
nested_model = json_schema_to_model(json_schema)
return nested_model
return t.Dict
+
+ if type_ is None and "oneOf" in json_schema:
+ one_of_options = json_schema["oneOf"]
+ pydantic_types: t.List[t.Type] = [json_schema_to_pydantic_type(option) for option in one_of_options]
+ if len(pydantic_types) == 1:
+ return pydantic_types[0]
+ elif len(pydantic_types) == 2:
+ return t.Union[t.cast(t.Type, pydantic_types[0]), t.cast(t.Type, pydantic_types[1])]
+ elif len(pydantic_types) == 3 :
+ return t.Union[t.cast(t.Type, pydantic_types[0]), t.cast(t.Type, pydantic_types[1]), t.cast(t.Type, pydantic_types[2])]
+ else:
+ raise ValueError("Invalid 'oneOf' schema")
pytype = PYDANTIC_TYPE_TO_PYTHON_TYPE.get(type_)
if pytype is not None:
@@ -82,6 +94,10 @@ def json_schema_to_pydantic_field(
:return: A Pydantic field definition.
"""
description = json_schema.get("description")
+ if 'oneOf' in json_schema:
+ description = " | ".join([option.get("description", "") for option in json_schema['oneOf']])
+ description = f"Any of the following options(separated by |): {description}"
+
examples = json_schema.get("examples", [])
return (
t.cast(
@@ -193,13 +209,31 @@ def get_signature_format_from_schema_params(schema_params: t.Dict) -> t.List[Par
required_params = schema_params.get("required", [])
schema_params_object = schema_params.get("properties", {})
for param_name, param_schema in schema_params_object.items():
- param_type = param_schema["type"]
- if param_type in SCHEMA_TYPE_TO_PYTHON_TYPE:
+ param_type = param_schema.get("type", None)
+ param_oneOf = param_schema.get("oneOf", None)
+ if param_oneOf is not None:
+ param_types = [ptype.get("type") for ptype in param_oneOf]
+ if len(param_types) == 1:
+ signature_param_type = SCHEMA_TYPE_TO_PYTHON_TYPE[param_types[0]]
+ elif len(param_types) == 2:
+ t1: t.Type = SCHEMA_TYPE_TO_PYTHON_TYPE[param_types[0]]
+ t2: t.Type = SCHEMA_TYPE_TO_PYTHON_TYPE[param_types[1]]
+ signature_param_type = t.Union[t1, t2]
+ elif len(param_types) == 3:
+ t1: t.Type = SCHEMA_TYPE_TO_PYTHON_TYPE[param_types[0]]
+ t2: t.Type = SCHEMA_TYPE_TO_PYTHON_TYPE[param_types[1]]
+ t3: t.Type = SCHEMA_TYPE_TO_PYTHON_TYPE[param_types[2]]
+ signature_param_type = t.Union[t1, t2, t3]
+ else:
+ raise ValueError("Invalid 'oneOf' schema")
+ param_default = param_schema.get("default", '')
+ elif param_type in SCHEMA_TYPE_TO_PYTHON_TYPE:
signature_param_type = SCHEMA_TYPE_TO_PYTHON_TYPE[param_type]
+ param_default = param_schema.get("default", FALLBACK_VALUES[param_type])
else:
signature_param_type = pydantic_model_from_param_schema(param_schema)
+ param_default = param_schema.get("default", FALLBACK_VALUES[param_type])
- param_default = param_schema.get("default", FALLBACK_VALUES[param_type])
param_annotation = signature_param_type
param = Parameter(
name=param_name,
diff --git a/docs/imgs/banner.gif b/docs/imgs/banner.gif
new file mode 100644
index 000000000..6cd15e658
Binary files /dev/null and b/docs/imgs/banner.gif differ
diff --git a/docs/imgs/composio_black_font.svg b/docs/imgs/composio_black_font.svg
new file mode 100644
index 000000000..bfaab1b33
--- /dev/null
+++ b/docs/imgs/composio_black_font.svg
@@ -0,0 +1,18 @@
+
diff --git a/docs/imgs/composio_white_font.svg b/docs/imgs/composio_white_font.svg
new file mode 100644
index 000000000..54e0ed825
--- /dev/null
+++ b/docs/imgs/composio_white_font.svg
@@ -0,0 +1,18 @@
+
diff --git a/examples/local_tools/autogen_math.py b/examples/local_tools/autogen_math.py
new file mode 100644
index 000000000..709d0fdb6
--- /dev/null
+++ b/examples/local_tools/autogen_math.py
@@ -0,0 +1,48 @@
+import os
+
+import dotenv
+from autogen import AssistantAgent, UserProxyAgent
+from composio_autogen import App, ComposioToolSet
+
+
+# Load environment variables from .env
+dotenv.load_dotenv()
+
+
+# Initialize tools.
+chatbot = AssistantAgent(
+ "chatbot",
+ system_message="Reply TERMINATE when the task is done or when user's content is empty",
+ llm_config={
+ "config_list": [
+ {"model": "gpt-4", "api_key": os.environ["OPENAI_API_KEY"]},
+ ]
+ },
+)
+composio_toolset = ComposioToolSet()
+
+
+def is_termination_msg(content: dict) -> bool:
+ """Check if a message contains termination message."""
+ return "TERMINATE" in (content.get("content", "") or "")
+
+
+# Create a user proxy agent
+user_proxy = UserProxyAgent(
+ "user_proxy",
+ is_termination_msg=is_termination_msg,
+ human_input_mode="NEVER",
+ code_execution_config={"use_docker": False},
+)
+
+# Register the preferred Applications, with right executor.
+composio_toolset.register_tools(tools=[App.MATHEMATICAL], caller=chatbot, executor=user_proxy)
+
+# Define task.
+task = "What is 230 multiplied by 52 and added with 233 divided by 91?"
+
+# Execute task.
+response = user_proxy.initiate_chat(chatbot, message=task)
+
+# Print response
+print(response.chat_history)
diff --git a/examples/local_tools/langchain_math.py b/examples/local_tools/langchain_math.py
index da16bb3a3..73a0be1a9 100644
--- a/examples/local_tools/langchain_math.py
+++ b/examples/local_tools/langchain_math.py
@@ -16,7 +16,7 @@
print(tools)
-task = "Calculate 5*30*330"
+task = "Calculate the forumula as mentioned in the file /Users/karanvaidya/codes/composio_sdk/eq.txt"
agent = create_openai_functions_agent(llm, tools, prompt)
agent_executor = AgentExecutor(agent=agent, tools=tools, verbose=True)
diff --git a/plugins/autogen/autogen_demo.py b/plugins/autogen/autogen_demo.py
index 21e7e6b31..0d947792f 100644
--- a/plugins/autogen/autogen_demo.py
+++ b/plugins/autogen/autogen_demo.py
@@ -19,30 +19,33 @@
]
},
)
-composio_toolset = ComposioToolSet()
-
def is_termination_msg(content: dict) -> bool:
"""Check if a message contains termination message."""
return "TERMINATE" in (content.get("content", "") or "")
-# Create a user proxy agent
-user_proxy = UserProxyAgent(
- "user_proxy",
- is_termination_msg=is_termination_msg,
- human_input_mode="NEVER",
- code_execution_config={"use_docker": False},
-)
+def main():
+ composio_toolset = ComposioToolSet()
+ # Create a user proxy agent
+ user_proxy = UserProxyAgent(
+ "user_proxy",
+ is_termination_msg=is_termination_msg,
+ human_input_mode="NEVER",
+ code_execution_config={"use_docker": False},
+ )
+
+ # Register the preferred Applications, with right executor.
+ composio_toolset.register_tools(tools=[App.GITHUB], caller=chatbot, executor=user_proxy)
-# Register the preferred Applications, with right executor.
-composio_toolset.register_tools(tools=[App.GITHUB], caller=chatbot, executor=user_proxy)
+ # Define task.
+ task = "Star a repo SamparkAI/composio on GitHub"
-# Define task.
-task = "Star a repo SamparkAI/composio on GitHub"
+ # Execute task.
+ response = user_proxy.initiate_chat(chatbot, message=task)
-# Execute task.
-response = user_proxy.initiate_chat(chatbot, message=task)
+ # Print response
+ print(response.chat_history)
-# Print response
-print(response.chat_history)
+if __name__ == "__main__":
+ main()
diff --git a/plugins/autogen/setup.py b/plugins/autogen/setup.py
index 622a11a46..7d73a68e4 100644
--- a/plugins/autogen/setup.py
+++ b/plugins/autogen/setup.py
@@ -9,7 +9,7 @@
setup(
name="composio_autogen",
- version="0.3.9-rc.1",
+ version="0.3.9rc4",
author="Sawradip",
author_email="sawradip@composio.dev",
description="Use Composio to get an array of tools with your Autogen agent.",
@@ -22,6 +22,6 @@
"Operating System :: OS Independent",
],
python_requires=">=3.9,<4",
- install_requires=["composio_core===0.3.9-rc.1", "pyautogen>=0.2.19"],
+ install_requires=["composio_core===0.3.9rc4", "pyautogen>=0.2.19"],
include_package_data=True,
)
diff --git a/plugins/claude/setup.py b/plugins/claude/setup.py
index 95829847c..ae0bb3d65 100644
--- a/plugins/claude/setup.py
+++ b/plugins/claude/setup.py
@@ -9,7 +9,7 @@
setup(
name="composio_claude",
- version="0.3.9-rc.1",
+ version="0.3.9rc4",
author="Sawradip",
author_email="sawradip@composio.dev",
description="Use Composio to get an array of tools with your Claude LLMs.",
@@ -22,6 +22,6 @@
"Operating System :: OS Independent",
],
python_requires=">=3.9,<4",
- install_requires=["composio_openai===0.3.9-rc.1", "anthropic>=0.25.7"],
+ install_requires=["composio_openai===0.3.9rc4", "anthropic>=0.25.7"],
include_package_data=True,
)
diff --git a/plugins/crew_ai/setup.py b/plugins/crew_ai/setup.py
index 760670d8a..0a7e90a64 100644
--- a/plugins/crew_ai/setup.py
+++ b/plugins/crew_ai/setup.py
@@ -9,7 +9,7 @@
setup(
name="composio_crewai",
- version="0.3.9-rc.1",
+ version="0.3.9rc4",
author="Himanshu",
author_email="himanshu@composio.dev",
description="Use Composio to get an array of tools with your CrewAI agent.",
@@ -22,6 +22,6 @@
"Operating System :: OS Independent",
],
python_requires=">=3.9,<4",
- install_requires=["composio_langchain===0.3.9-rc.1"],
+ install_requires=["composio_langchain===0.3.9rc4"],
include_package_data=True,
)
diff --git a/plugins/griptape/setup.py b/plugins/griptape/setup.py
index e8e65d097..cda972385 100644
--- a/plugins/griptape/setup.py
+++ b/plugins/griptape/setup.py
@@ -9,7 +9,7 @@
setup(
name="composio_griptape",
- version="0.3.9-rc.1",
+ version="0.3.9rc4",
author="Sawradip",
author_email="sawradip@composio.dev",
description="Use Composio to get an array of tools with your Griptape wokflow.",
@@ -22,6 +22,6 @@
"Operating System :: OS Independent",
],
python_requires=">=3.9,<4",
- install_requires=["composio_core===0.3.9-rc.1", "griptape>=0.24.2"],
+ install_requires=["composio_core===0.3.9rc4", "griptape>=0.24.2"],
include_package_data=True,
)
diff --git a/plugins/julep/setup.py b/plugins/julep/setup.py
index e4d619ac7..00ab87096 100644
--- a/plugins/julep/setup.py
+++ b/plugins/julep/setup.py
@@ -9,7 +9,7 @@
setup(
name="composio_julep",
- version="0.3.9-rc.1",
+ version="0.3.9rc4",
author="Sawradip",
author_email="sawradip@composio.dev",
description="Use Composio to get an array of tools with your Julep wokflow.",
@@ -22,6 +22,6 @@
"Operating System :: OS Independent",
],
python_requires=">=3.9,<4",
- install_requires=["composio_openai===0.3.9-rc.1", "julep>=0.3.2"],
+ install_requires=["composio_openai===0.3.9rc4", "julep>=0.3.2"],
include_package_data=True,
)
diff --git a/plugins/langchain/langchain_demo.py b/plugins/langchain/langchain_demo.py
index 79b4eb87b..65cd1d11c 100644
--- a/plugins/langchain/langchain_demo.py
+++ b/plugins/langchain/langchain_demo.py
@@ -20,17 +20,22 @@
# Initialize tools.
openai_client = ChatOpenAI(api_key=os.environ["OPENAI_API_KEY"])
-composio_toolset = ComposioToolSet()
-# Get All the tools
-tools = composio_toolset.get_tools(apps=[App.MATHEMATICAL])
+def main():
+ composio_toolset = ComposioToolSet()
-# Define task
-task = "Star a repo SamparkAI/docs on GitHub"
+ # Get All the tools
+ tools = composio_toolset.get_tools(apps=[App.GITHUB])
-# Define agent
-agent = create_openai_functions_agent(openai_client, tools, prompt)
-agent_executor = AgentExecutor(agent=agent, tools=tools, verbose=True)
+ # Define task
+ task = "Star a repo SamparkAI/docs on GitHub"
-# Execute using agent_executor
-agent_executor.invoke({"input": task})
+ # Define agent
+ agent = create_openai_functions_agent(openai_client, tools, prompt)
+ agent_executor = AgentExecutor(agent=agent, tools=tools, verbose=True)
+
+ # Execute using agent_executor
+ agent_executor.invoke({"input": task})
+
+if __name__ == "__main__":
+ main()
diff --git a/plugins/langchain/setup.py b/plugins/langchain/setup.py
index 845d292fe..3ff206748 100644
--- a/plugins/langchain/setup.py
+++ b/plugins/langchain/setup.py
@@ -9,7 +9,7 @@
setup(
name="composio_langchain",
- version="0.3.9-rc.1",
+ version="0.3.9rc4",
author="Karan",
author_email="karan@composio.dev",
description="Use Composio to get an array of tools with your LangChain agent.",
@@ -27,7 +27,7 @@
"langchain-openai>=0.0.2.post1",
"pydantic>=2.6.4",
"langchainhub>=0.1.15",
- "composio_core===0.3.9-rc.1",
+ "composio_core===0.3.9rc4",
],
include_package_data=True,
)
diff --git a/plugins/lyzr/setup.py b/plugins/lyzr/setup.py
index 9a7fc70b9..5420f68d6 100644
--- a/plugins/lyzr/setup.py
+++ b/plugins/lyzr/setup.py
@@ -9,7 +9,7 @@
setup(
name="composio_lyzr",
- version="0.3.9-rc.1",
+ version="0.3.9rc4",
author="Sawradip",
author_email="sawradip@composio.dev",
description="Use Composio to get an array of tools with your Lyzr workflow.",
@@ -25,7 +25,7 @@
install_requires=[
"lyzr-automata>=0.1.3",
"pydantic>=2.6.4",
- "composio_core===0.3.9-rc.1",
+ "composio_core===0.3.9rc4",
"langchain>=0.1.0",
],
include_package_data=True,
diff --git a/plugins/openai/setup.py b/plugins/openai/setup.py
index e70e118d3..621df0f5c 100644
--- a/plugins/openai/setup.py
+++ b/plugins/openai/setup.py
@@ -9,7 +9,7 @@
setup(
name="composio_openai",
- version="0.3.9-rc.1",
+ version="0.3.9rc4",
author="Sawradip",
author_email="sawradip@composio.dev",
description="Use Composio to get an array of tools with your OpenAI Function Call.",
@@ -22,6 +22,6 @@
"Operating System :: OS Independent",
],
python_requires=">=3.9,<4",
- install_requires=["composio_core===0.3.9-rc.1"],
+ install_requires=["composio_core===0.3.9rc4"],
include_package_data=True,
)
diff --git a/setup.py b/setup.py
index eafd5b01d..274b34d69 100644
--- a/setup.py
+++ b/setup.py
@@ -10,7 +10,7 @@
setup(
name="composio_core",
- version="0.3.9-rc.1",
+ version="0.3.9rc4",
author="Utkarsh",
author_email="utkarsh@composio.dev",
description="Core package to act as a bridge between composio platform and other services.",
diff --git a/tests/core.spec.ts b/tests/core.spec.ts
deleted file mode 100644
index cb87a1ebc..000000000
--- a/tests/core.spec.ts
+++ /dev/null
@@ -1,40 +0,0 @@
-import { test, expect } from '@playwright/test';
-import fs from 'fs';
-import { execSync } from 'child_process';
-
-test.describe.serial('Python CLI Core Operations', () => {
- test.skip('add integration for github', async ({browser}) => {
- const { exec } = require('child_process');
- const command = `DISABLE_COMPOSIO_WEBBROWSER_OPEN=true python3 ./start_cli.py add github`;
- const context = await browser.newContext({ storageState: 'session.json' });
-
- const process = exec(command);
-
- process.stdout.on('data', async (data) => {
- // console.log('stdout:', data);
- const match = data.trim().match(/Please authenticate github in the browser and come back here. URL: (.+)/);
- if (match && match[1]) {
- const redirectURL = match[1];
- console.log(`Redirect URL: ${match[1]}`);
- const page = await context.newPage();
- await page.goto(redirectURL);
- }
- });
-
- process.stderr.on('data', (data) => {
- // console.error('stderr:', data);
- });
-
- await new Promise((resolve, reject) => {
- process.on('close', (code) => {
- if (code === 0) {
- resolve(true);
- } else {
- reject(`Process exited with code ${code}`);
- }
- });
- });
-
- expect(process).not.toBeNull();
- });
-});
\ No newline at end of file
diff --git a/tests/global.setup.ts b/tests/global.setup.ts
deleted file mode 100644
index f69852f67..000000000
--- a/tests/global.setup.ts
+++ /dev/null
@@ -1,25 +0,0 @@
-import { test as setup, expect } from '@playwright/test';
-import { execSync } from 'child_process';
-import fs from "fs";
-
-const userDataPath = `${process.env.HOME}/.composio`;
-
-setup('user session management', async ({ }) => {
- // Check if directory exists, delete it if it does
- if (fs.existsSync(userDataPath)) {
- execSync(`rm -rf ${userDataPath}`);
- console.log(`Existing directory '${userDataPath}' deleted successfully.`);
- }
-
- // Create directory and write file
- execSync(`mkdir -p ${userDataPath}`);
- fs.writeFileSync(
- `${userDataPath}/user_data.json`,
- JSON.stringify({ "api_key": "3kmtwhffkxvwebhnm7qwzj" }, null, 2)
- );
-
- // Read file and verify content
- const data = fs.readFileSync(`${userDataPath}/user_data.json`, 'utf8');
- expect(data).toContain('3kmtwhffkxvwebhnm7qwzj');
- console.log('user_data.json created and verified successfully.');
-});
\ No newline at end of file
diff --git a/tests/global.teardown.ts b/tests/global.teardown.ts
deleted file mode 100644
index 089f81700..000000000
--- a/tests/global.teardown.ts
+++ /dev/null
@@ -1,8 +0,0 @@
-import { test as teardown, expect } from '@playwright/test';
-import { execSync } from 'child_process';
-
-teardown('logout user session', async ({ }) => {
- const output = execSync(`python3 ./start_cli.py logout`).toString();
- await new Promise((resolve) => setTimeout(resolve, 1000));
- expect(output).not.toBeNull();
-});
\ No newline at end of file
diff --git a/tests/initial.spec.ts b/tests/initial.spec.ts
deleted file mode 100755
index 1159692e2..000000000
--- a/tests/initial.spec.ts
+++ /dev/null
@@ -1,21 +0,0 @@
-import { test, expect } from '@playwright/test';
-import fs from 'fs';
-import { execSync } from 'child_process';
-
-test.describe('Python CLI Operations', () => {
- const commands = [
- { command: 'whoami', description: 'Running whoami' },
- { command: 'show-apps', description: 'Running show-apps' },
- { command: 'show-connections github', description: 'Running show-connections github' },
- { command: 'list-triggers github', description: 'Running list-triggers github' },
- ];
-
- commands.forEach(({ command, description }) => {
- test(description, async () => {
- const output = execSync(`python3 ./start_cli.py ${command}`).toString();
- await new Promise((resolve) => setTimeout(resolve, 1000));
- console.log(description + ':', output);
- expect(output).not.toBeNull();
- });
- });
-});
\ No newline at end of file
diff --git a/tests/test_cli/test_login.py b/tests/test_cli/test_login.py
index 4fe7ce38c..f000bd7ac 100644
--- a/tests/test_cli/test_login.py
+++ b/tests/test_cli/test_login.py
@@ -18,5 +18,4 @@ def test_user_already_logged_in(self) -> None:
# Revert back the API Key value.
context.user_data.api_key = api_key
- assert result.exit_code == 1, result.stdout
- assert "Already logged in" in result.stderr
+ assert result.exit_code == 0, result.stdout
diff --git a/tests/test_examples/__init__.py b/tests/test_examples/__init__.py
new file mode 100644
index 000000000..e69de29bb
diff --git a/tests/test_examples/test_autogen.py b/tests/test_examples/test_autogen.py
new file mode 100644
index 000000000..160a18631
--- /dev/null
+++ b/tests/test_examples/test_autogen.py
@@ -0,0 +1,52 @@
+import json
+import sys
+import unittest.mock as mock
+import click
+import os
+
+import pytest
+
+from composio.cli import composio as composio_cli
+from composio.exceptions import ApiKeyNotProvidedError
+
+
+def run_autogen_script():
+ from plugins.autogen.autogen_demo import main
+ main()
+
+
+@pytest.fixture(scope="session", autouse=True)
+def pytest_sessionstart_autogen():
+ """
+ Called after the Session object has been created and
+ before performing collection and entering the run test loop.
+ """
+ original_argv = sys.argv # Backup the original arguments
+ sys.argv = [
+ "composio",
+ "logout",
+ ]
+ print("")
+ try:
+ # INSERT_YOUR_CODE
+ if 'COMPOSIO_API_KEY' in os.environ:
+ os.environ.pop('COMPOSIO_API_KEY', None)
+ composio_cli()
+ except SystemExit as e:
+ print(f"SystemExit ignored: {e}")
+ except Exception as e:
+ print(f"Error ignored: {e}")
+ finally:
+ sys.argv = original_argv # Restore original arguments
+
+def test_autogen_script_not_authorized_error():
+ with pytest.raises(ApiKeyNotProvidedError) as exc_info:
+ run_autogen_script()
+ assert "API Key not provided" in str(
+ exc_info.value
+ )
+
+def test_autogen_script_is_working():
+ import os
+ os.environ['COMPOSIO_API_KEY'] = 'kwrjjvgedmuw5jt1fet2'
+ run_autogen_script()
\ No newline at end of file
diff --git a/tests/test_examples/test_langchain.py b/tests/test_examples/test_langchain.py
new file mode 100644
index 000000000..9a9b10f76
--- /dev/null
+++ b/tests/test_examples/test_langchain.py
@@ -0,0 +1,50 @@
+import json
+import sys
+import unittest.mock as mock
+import click
+import os
+import pytest
+
+from composio.exceptions import ApiKeyNotProvidedError
+
+
+def run_langchain_script():
+ from plugins.langchain.langchain_demo import main
+ main()
+
+
+@pytest.fixture(scope="session", autouse=True)
+def pytest_sessionstart_langchain():
+ from composio.cli import composio as composio_cli
+ """
+ Called after the Session object has been created and
+ before performing collection and entering the run test loop.
+ """
+ original_argv = sys.argv # Backup the original arguments
+ sys.argv = [
+ "composio",
+ "logout",
+ ]
+ try:
+ # INSERT_YOUR_CODE
+ if 'COMPOSIO_API_KEY' in os.environ:
+ os.environ.pop('COMPOSIO_API_KEY', None)
+ composio_cli()
+ except SystemExit as e:
+ print(f"SystemExit ignored: {e}")
+ except Exception as e:
+ print(f"Error ignored: {e}")
+ finally:
+ sys.argv = original_argv # Restore original arguments
+
+def test_langchain_script_not_authorized_error():
+ with pytest.raises(ApiKeyNotProvidedError) as exc_info:
+ run_langchain_script()
+ assert "API Key not provided" in str(
+ exc_info.value
+ )
+
+def test_langchain_script_is_working():
+ import os
+ os.environ['COMPOSIO_API_KEY'] = 'kwrjjvgedmuw5jt1fet2'
+ run_langchain_script()
\ No newline at end of file
diff --git a/tox.ini b/tox.ini
index 4ffd0273e..b81f3c47b 100644
--- a/tox.ini
+++ b/tox.ini
@@ -121,7 +121,9 @@ sections=FUTURE,STDLIB,THIRDPARTY,FIRSTPARTY,PACKAGES,LOCALFOLDER
[flake8]
max_line_length = 200
exclude= **/build, **/dist
-per-file-ignores = __init__.py:F401
+per-file-ignores = __init__.py:F401,W503
+ignore = E231, W291, W503
+
[mypy]
strict_optional = True