Skip to content

Commit

Permalink
x
Browse files Browse the repository at this point in the history
  • Loading branch information
eyurtsev committed Oct 11, 2023
1 parent b2f13b8 commit 743d7b0
Show file tree
Hide file tree
Showing 2 changed files with 108 additions and 22 deletions.
8 changes: 2 additions & 6 deletions langserve/server.py
Original file line number Diff line number Diff line change
Expand Up @@ -186,9 +186,7 @@ async def invoke(
# Request is first validated using InvokeRequest which takes into account
# config_keys as well as input_type.
config = _unpack_config(request.config, config_keys)
output = await runnable.ainvoke(
_unpack_input(request.input), config=config, **request.kwargs
)
output = await runnable.ainvoke(_unpack_input(request.input), config=config)

return InvokeResponse(output=simple_dumpd(output))

Expand All @@ -201,7 +199,7 @@ async def batch(request: Annotated[BatchRequest, BatchRequest]) -> BatchResponse
else:
config = _unpack_config(request.config, config_keys)
inputs = [_unpack_input(input_) for input_ in request.inputs]
output = await runnable.abatch(inputs, config=config, **request.kwargs)
output = await runnable.abatch(inputs, config=config)

return BatchResponse(output=simple_dumpd(output))

Expand All @@ -221,7 +219,6 @@ async def _stream() -> AsyncIterator[dict]:
async for chunk in runnable.astream(
input_,
config=config,
**request.kwargs,
):
yield {"data": simple_dumps(chunk), "event": "data"}
yield {"event": "end"}
Expand Down Expand Up @@ -251,7 +248,6 @@ async def _stream_log() -> AsyncIterator[dict]:
exclude_names=request.exclude_names,
exclude_types=request.exclude_types,
exclude_tags=request.exclude_tags,
**request.kwargs,
):
if request.diff: # Run log patch
if not isinstance(chunk, RunLogPatch):
Expand Down
122 changes: 106 additions & 16 deletions langserve/validation.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,9 +21,24 @@ def create_invoke_request_model(
"""Create a pydantic model for the invoke request."""
invoke_request_type = create_model(
f"{namespace}InvokeRequest",
input=(input_type, ...),
config=(config, Field(default_factory=dict)),
kwargs=(dict, Field(default_factory=dict)),
input=(input_type, Field(..., description="The input to the runnable.")),
config=(
config,
Field(
default_factory=dict,
description=(
"Subset of RunnableConfig object in LangChain. "
"Useful for passing information like tags, metadata etc."
),
),
),
kwargs=(
dict,
Field(
default_factory=dict,
description="Keyword arguments to the runnable. Currently ignored.",
),
),
)
invoke_request_type.update_forward_refs()
return invoke_request_type
Expand All @@ -37,9 +52,24 @@ def create_stream_request_model(
"""Create a pydantic model for the invoke request."""
stream_request_model = create_model(
f"{namespace}StreamRequest",
input=(input_type, ...),
config=(config, Field(default_factory=dict)),
kwargs=(dict, Field(default_factory=dict)),
input=(input_type, Field(..., description="The input to the runnable.")),
config=(
config,
Field(
default_factory=dict,
description=(
"Subset of RunnableConfig object in LangChain. "
"Useful for passing information like tags, metadata etc."
),
),
),
kwargs=(
dict,
Field(
default_factory=dict,
description="Keyword arguments to the runnable. Currently ignored.",
),
),
)
stream_request_model.update_forward_refs()
return stream_request_model
Expand All @@ -54,8 +84,24 @@ def create_batch_request_model(
batch_request_type = create_model(
f"{namespace}BatchRequest",
inputs=(List[input_type], ...),
config=(Union[config, List[config]], Field(default_factory=dict)),
kwargs=(dict, Field(default_factory=dict)),
config=(
Union[config, List[config]],
Field(
default_factory=dict,
description=(
"Subset of RunnableConfig object in LangChain. Either specify one "
"config for all inputs or a list of configs with one per input. "
"Useful for passing information like tags, metadata etc."
),
),
),
kwargs=(
dict,
Field(
default_factory=dict,
description="Keyword arguments to the runnable. Currently ignored.",
),
),
)
batch_request_type.update_forward_refs()
return batch_request_type
Expand All @@ -72,12 +118,48 @@ def create_stream_log_request_model(
input=(input_type, ...),
config=(config, Field(default_factory=dict)),
diff=(Optional[bool], False),
include_names=(Optional[Sequence[str]], None),
include_types=(Optional[Sequence[str]], None),
include_tags=(Optional[Sequence[str]], None),
exclude_names=(Optional[Sequence[str]], None),
exclude_types=(Optional[Sequence[str]], None),
exclude_tags=(Optional[Sequence[str]], None),
include_names=(
Optional[Sequence[str]],
Field(
None,
description="If specified, filter to runnables with matching names",
),
),
include_types=(
Optional[Sequence[str]],
Field(
None,
description="If specified, filter to runnables with matching types",
),
),
include_tags=(
Optional[Sequence[str]],
Field(
None,
description="If specified, filter to runnables with matching tags",
),
),
exclude_names=(
Optional[Sequence[str]],
Field(
None,
description="If specified, exclude runnables with matching names",
),
),
exclude_types=(
Optional[Sequence[str]],
Field(
None,
description="If specified, exclude runnables with matching types",
),
),
exclude_tags=(
Optional[Sequence[str]],
Field(
None,
description="If specified, exclude runnables with matching tags",
),
),
kwargs=(dict, Field(default_factory=dict)),
)
stream_log_request.update_forward_refs()
Expand All @@ -91,7 +173,7 @@ def create_invoke_response_model(
"""Create a pydantic model for the invoke response."""
invoke_response_type = create_model(
f"{namespace}InvokeResponse",
output=(output_type, ...),
output=(output_type, Field(..., description="The output of the invocation.")),
)
invoke_response_type.update_forward_refs()
return invoke_response_type
Expand All @@ -104,7 +186,15 @@ def create_batch_response_model(
"""Create a pydantic model for the batch response."""
batch_response_type = create_model(
f"{namespace}BatchResponse",
output=(List[output_type], ...),
output=(
List[output_type],
Field(
...,
description=(
"The outputs corresponding to the inputs the batch request."
),
),
),
)
batch_response_type.update_forward_refs()
return batch_response_type

0 comments on commit 743d7b0

Please sign in to comment.