Skip to content

Commit

Permalink
Add Chat Settings to Generic Langchain Provider (#622)
Browse files Browse the repository at this point in the history
* Add Chat Settings to Generic Langchain Provider

* Update langchain.py

* Set default inputs to empty list

Suggestion from @willydouhard

* Do not require settings for stream event

* use chainlit 1.0.0 attrs

* remove unecessary whitespace

* Update langchain.py

* Update backend/chainlit/playground/providers/langchain.py

Co-authored-by: Willy Douhard <[email protected]>

---------

Co-authored-by: Willy Douhard <[email protected]>
  • Loading branch information
tylertitsworth and willydouhard authored Jan 3, 2024
1 parent ba46705 commit cf23d47
Showing 1 changed file with 7 additions and 5 deletions.
12 changes: 7 additions & 5 deletions backend/chainlit/playground/providers/langchain.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
from typing import Union
from typing import List, Union

from chainlit.input_widget import InputWidget
from chainlit.playground.provider import BaseProvider
from chainlit.sync import make_async
from chainlit_client import GenerationMessage
Expand All @@ -18,13 +19,14 @@ def __init__(
id: str,
name: str,
llm: Union[LLM, BaseChatModel],
inputs: List[InputWidget] = [],
is_chat: bool = False,
):
super().__init__(
id=id,
name=name,
env_vars={},
inputs=[],
inputs=inputs,
is_chat=is_chat,
)
self.llm = llm
Expand Down Expand Up @@ -65,10 +67,10 @@ async def create_completion(self, request):

messages = self.create_generation(request)

stream = make_async(self.llm.stream)

result = await stream(
# https://github.com/langchain-ai/langchain/issues/14980
result = await make_async(self.llm.stream)(
input=messages,
**request.generation.settings
)

def create_event_stream():
Expand Down

0 comments on commit cf23d47

Please sign in to comment.