-
Notifications
You must be signed in to change notification settings - Fork 77
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
Litellm #1265
Litellm #1265
Changes from 7 commits
af4743a
e126bc2
90a47aa
3c8bb1b
76ed000
16cb58f
3bfd368
aba13c8
2dfddd6
ff7588c
3ec8315
6af2708
6da18bb
a4b01c0
8cc9c97
e3bf9aa
ab991a5
2da9cc6
16104bd
7246313
ab31dd2
6090cf4
cdc9dfa
e04f8d5
8663846
5692297
bcd7a5e
f8545c6
2875c2f
a710ddd
e3d52c1
2b4a3bb
f5e1e94
270b0ab
db4a710
5b83405
e57d183
c75d3ef
423ec46
e7cd631
a05ebd1
c286989
f48dc2e
06693c0
887adc1
8252520
b556412
70b8c71
181cfa7
d0d5b1d
2ed8e06
dd80d84
e68ba65
a0156f4
b357b5e
43d3c85
da13b69
a309505
779e519
c3316a9
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
This file was deleted.
This file was deleted.
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -4,16 +4,15 @@ | |
from rasa_sdk import Tracker | ||
from rasa_sdk.executor import CollectingDispatcher | ||
|
||
from kairon import Utility | ||
from kairon.actions.definitions.base import ActionsBase | ||
from kairon.shared.actions.data_objects import ActionServerLogs | ||
from kairon.shared.actions.exception import ActionFailure | ||
from kairon.shared.actions.models import ActionType, UserMessageType | ||
from kairon.shared.actions.utils import ActionUtility | ||
from kairon.shared.constants import FAQ_DISABLED_ERR, KaironSystemSlots, KAIRON_USER_MSG_ENTITY | ||
from kairon.shared.data.constant import DEFAULT_NLU_FALLBACK_RESPONSE | ||
from kairon.shared.llm.factory import LLMFactory | ||
from kairon.shared.models import LlmPromptType, LlmPromptSource | ||
from kairon.shared.llm.processor import LLMProcessor | ||
|
||
|
||
class ActionPrompt(ActionsBase): | ||
|
@@ -62,14 +61,17 @@ async def execute(self, dispatcher: CollectingDispatcher, tracker: Tracker, doma | |
time_taken_slots = 0 | ||
final_slots = {"type": "slots_to_fill"} | ||
llm_response_log = {"type": "llm_response"} | ||
|
||
llm_processor = None | ||
try: | ||
k_faq_action_config, bot_settings = self.retrieve_config() | ||
user_question = k_faq_action_config.get('user_question') | ||
user_msg = self.__get_user_msg(tracker, user_question) | ||
llm_type = k_faq_action_config['llm_type'] | ||
llm_params = await self.__get_llm_params(k_faq_action_config, dispatcher, tracker, domain) | ||
llm = LLMFactory.get_instance("faq")(self.bot, bot_settings["llm_settings"]) | ||
llm_response, time_taken_llm_response = await llm.predict(user_msg, **llm_params) | ||
llm_processor = LLMProcessor(self.bot) | ||
llm_response, time_taken_llm_response = await llm_processor.predict(user_msg, | ||
user=tracker.sender_id, | ||
**llm_params) | ||
status = "FAILURE" if llm_response.get("is_failure", False) is True else status | ||
exception = llm_response.get("exception") | ||
bot_response = llm_response['content'] | ||
|
@@ -93,8 +95,8 @@ async def execute(self, dispatcher: CollectingDispatcher, tracker: Tracker, doma | |
total_time_elapsed = time_taken_llm_response + time_taken_slots | ||
events_to_extend = [llm_response_log, final_slots] | ||
events.extend(events_to_extend) | ||
if llm: | ||
llm_logs = llm.logs | ||
if llm_processor: | ||
llm_logs = llm_processor.logs | ||
ActionServerLogs( | ||
type=ActionType.prompt_action.value, | ||
intent=tracker.get_intent_of_latest_message(skip_fallback_intent=False), | ||
|
@@ -119,16 +121,6 @@ async def execute(self, dispatcher: CollectingDispatcher, tracker: Tracker, doma | |
return slots_to_fill | ||
|
||
async def __get_llm_params(self, k_faq_action_config: dict, dispatcher: CollectingDispatcher, tracker: Tracker, domain: Dict[Text, Any]): | ||
implementations = { | ||
"GPT3_FAQ_EMBED": self.__get_gpt_params, | ||
} | ||
|
||
llm_type = Utility.environment['llm']["faq"] | ||
if not implementations.get(llm_type): | ||
raise ActionFailure(f'{llm_type} type LLM is not supported') | ||
return await implementations[Utility.environment['llm']["faq"]](k_faq_action_config, dispatcher, tracker, domain) | ||
|
||
async def __get_gpt_params(self, k_faq_action_config: dict, dispatcher: CollectingDispatcher, tracker: Tracker, domain: Dict[Text, Any]): | ||
from kairon.actions.definitions.factory import ActionFactory | ||
|
||
system_prompt = None | ||
|
@@ -147,7 +139,7 @@ async def __get_gpt_params(self, k_faq_action_config: dict, dispatcher: Collecti | |
history_prompt = ActionUtility.prepare_bot_responses(tracker, num_bot_responses) | ||
elif prompt['source'] == LlmPromptSource.bot_content.value and prompt['is_enabled']: | ||
use_similarity_prompt = True | ||
hyperparameters = prompt.get('hyperparameters', {}) | ||
hyperparameters = prompt.get("hyperparameters", {}) | ||
similarity_prompt.append({'similarity_prompt_name': prompt['name'], | ||
'similarity_prompt_instructions': prompt['instructions'], | ||
'collection': prompt['data'], | ||
|
@@ -179,7 +171,7 @@ async def __get_gpt_params(self, k_faq_action_config: dict, dispatcher: Collecti | |
is_query_prompt_enabled = True | ||
query_prompt_dict.update({'query_prompt': query_prompt, 'use_query_prompt': is_query_prompt_enabled}) | ||
|
||
params["hyperparameters"] = k_faq_action_config.get('hyperparameters', Utility.get_llm_hyperparameters()) | ||
params["hyperparameters"] = k_faq_action_config['hyperparameters'] | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Refactor method to reduce complexity. The - async def __get_llm_params(self, k_faq_action_config: dict, dispatcher: CollectingDispatcher, tracker: Tracker, domain: Dict[Text, Any]):
+ async def __get_llm_params(self, k_faq_action_config: dict, tracker: Tracker):
+ # Simplified method focusing only on necessary parameters
|
||
params["system_prompt"] = system_prompt | ||
params["context_prompt"] = context_prompt | ||
params["query_prompt"] = query_prompt_dict | ||
|
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
Remove unused variable.
The variable
llm_type
is declared but not used anywhere in the method.- llm_type = k_faq_action_config['llm_type']
Committable suggestion
Tools
Ruff