Skip to content

Commit

Permalink
refactor: encode_kwargs parameter of SentenceTransformersDocumentEmbe…
Browse files Browse the repository at this point in the history
…dder and SentenceTransformersTextEmbedder mae to be the last positional parameter for backward compatibility (part II.)
  • Loading branch information
oroszgy committed Feb 5, 2025
1 parent 1f9ae33 commit ea14a19
Show file tree
Hide file tree
Showing 2 changed files with 2 additions and 2 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -160,8 +160,8 @@ def to_dict(self) -> Dict[str, Any]:
model_kwargs=self.model_kwargs,
tokenizer_kwargs=self.tokenizer_kwargs,
config_kwargs=self.config_kwargs,
encode_kwargs=self.encode_kwargs,
precision=self.precision,
encode_kwargs=self.encode_kwargs,
)
if serialization_dict["init_parameters"].get("model_kwargs") is not None:
serialize_hf_model_kwargs(serialization_dict["init_parameters"]["model_kwargs"])
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -49,8 +49,8 @@ def __init__( # noqa: PLR0913 # pylint: disable=too-many-positional-arguments
model_kwargs: Optional[Dict[str, Any]] = None,
tokenizer_kwargs: Optional[Dict[str, Any]] = None,
config_kwargs: Optional[Dict[str, Any]] = None,
encode_kwargs: Optional[Dict[str, Any]] = None,
precision: Literal["float32", "int8", "uint8", "binary", "ubinary"] = "float32",
encode_kwargs: Optional[Dict[str, Any]] = None,
):
"""
Create a SentenceTransformersTextEmbedder component.
Expand Down

0 comments on commit ea14a19

Please sign in to comment.