Skip to content

Commit

Permalink
Run black formatter.
Browse files Browse the repository at this point in the history
  • Loading branch information
madiator committed Dec 14, 2024
1 parent 574aee5 commit 419a275
Show file tree
Hide file tree
Showing 3 changed files with 34 additions and 28 deletions.
11 changes: 8 additions & 3 deletions examples/simple_poem.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@
Please see the poem.py for more complex use cases.
"""

from bespokelabs import curator

# Use GPT-4o-mini for this example.
Expand All @@ -15,6 +16,10 @@
print(poem)

# Note that we can also pass a list of prompts to generate multiple responses.
poems = llm(["Write a sonnet about the importance of data in AI.",
"Write a haiku about the importance of data in AI."])
print(poems)
poems = llm(
[
"Write a sonnet about the importance of data in AI.",
"Write a haiku about the importance of data in AI.",
]
)
print(poems)
2 changes: 1 addition & 1 deletion src/bespokelabs/curator/__init__.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,3 @@
from .dataset import Dataset
from .llm.llm import LLM
from .llm.simple_llm import SimpleLLM
from .llm.simple_llm import SimpleLLM
49 changes: 25 additions & 24 deletions src/bespokelabs/curator/llm/simple_llm.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,29 +4,30 @@


class SimpleLLM:
"""A simpler interface for the LLM class.
Usage:
llm = SimpleLLM(model_name="gpt-4o-mini")
llm("Do you know about the bitter lesson?")
llm(["What is the capital of France?", "What is the capital of Germany?"])
For more complex use cases (e.g. structured outputs and custom prompt functions), see the LLM class.
"""
def __init__(self, model_name: str, backend: str = "openai"):
self._model_name = model_name
self._backend = backend
"""A simpler interface for the LLM class.
def __call__(self, prompt: Union[str, List[str]]) -> Union[str, List[str]]:
prompt_list = [prompt] if isinstance(prompt, str) else prompt
dataset: Dataset = Dataset.from_dict({"prompt": prompt_list})
Usage:
llm = SimpleLLM(model_name="gpt-4o-mini")
llm("Do you know about the bitter lesson?")
llm(["What is the capital of France?", "What is the capital of Germany?"])
For more complex use cases (e.g. structured outputs and custom prompt functions), see the LLM class.
"""

llm = LLM(
prompt_func=lambda row: row['prompt'],
model_name=self._model_name,
response_format=None,
backend=self._backend
)
response = llm(dataset)
if isinstance(prompt, str):
return response["response"][0]
return response["response"]
def __init__(self, model_name: str, backend: str = "openai"):
self._model_name = model_name
self._backend = backend

def __call__(self, prompt: Union[str, List[str]]) -> Union[str, List[str]]:
prompt_list = [prompt] if isinstance(prompt, str) else prompt
dataset: Dataset = Dataset.from_dict({"prompt": prompt_list})

llm = LLM(
prompt_func=lambda row: row["prompt"],
model_name=self._model_name,
response_format=None,
backend=self._backend,
)
response = llm(dataset)
if isinstance(prompt, str):
return response["response"][0]
return response["response"]

0 comments on commit 419a275

Please sign in to comment.