diff --git a/pylib/llm_wrapper.py b/pylib/llm_wrapper.py index 9f2f9ef..6c97cf7 100644 --- a/pylib/llm_wrapper.py +++ b/pylib/llm_wrapper.py @@ -190,8 +190,7 @@ async def __call__(self, prompt, api_func=None, **kwargs): kwargs (dict, optional): Extra parameters to pass to the model via API. See Completions.create in OpenAI API, but in short, these: best_of, echo, frequency_penalty, logit_bias, logprobs, max_tokens, n - presence_penalty, seed, stop, stream, suffix, temperature, top_p, user -q + presence_penalty, seed, stop, stream, suffix, temperature, top_p, userq Returns: dict: JSON response from the LLM ''' diff --git a/pyproject.toml b/pyproject.toml index 749bec8..4f80c89 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -21,12 +21,12 @@ classifiers = [ "Programming Language :: Python", "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", "Programming Language :: Python :: Implementation :: CPython", "Programming Language :: Python :: Implementation :: PyPy", ] dependencies = [ "openai>=1.1.0", - "python-dotenv", "tomli", "amara3.iri" ] @@ -78,7 +78,7 @@ cov = [ ] [[tool.hatch.envs.all.matrix]] -python = ["3.10", "3.11"] +python = ["3.10", "3.11", "3.12"] [tool.hatch.envs.lint] detached = true