Skip to content

Commit

Permalink
Merge pull request #153 from bmritz/feature/bump-openai-library-requi…
Browse files Browse the repository at this point in the history
…rement

update openai requirements for version 1
  • Loading branch information
bmritz authored Feb 14, 2024
2 parents 94ed7e1 + b025a78 commit 54b3862
Show file tree
Hide file tree
Showing 9 changed files with 1,139 additions and 1,001 deletions.
1 change: 1 addition & 0 deletions .flake8
Original file line number Diff line number Diff line change
Expand Up @@ -8,3 +8,4 @@ rst-roles = class,const,func,meth,mod,ref
rst-directives = deprecated
per-file-ignores =
tests/**:D104,D103,D100
noxfile.py:E241,E231,E272,E202
12 changes: 9 additions & 3 deletions .github/workflows/tests.yml
Original file line number Diff line number Diff line change
Expand Up @@ -14,15 +14,21 @@ jobs:
include:
- { python: "3.11", os: "ubuntu-latest", session: "pre-commit" }
- { python: "3.11", os: "ubuntu-latest", session: "safety" }

- { python: "3.12", os: "ubuntu-latest", session: "mypy-3.12" }
- { python: "3.11", os: "ubuntu-latest", session: "mypy-3.11" }
- { python: "3.10", os: "ubuntu-latest", session: "mypy-3.10" }
- { python: "3.9", os: "ubuntu-latest", session: "mypy-3.9" }
- { python: "3.8", os: "ubuntu-latest", session: "mypy-3.8" }
- { python: "3.10", os: "ubuntu-latest", session: "mypy-3.10" }

- { python: "3.12", os: "ubuntu-latest", session: "tests-3.12" }
- { python: "3.11", os: "ubuntu-latest", session: "tests-3.11" }
- { python: "3.10", os: "ubuntu-latest", session: "tests-3.10" }
- { python: "3.9", os: "ubuntu-latest", session: "tests-3.9" }
- { python: "3.8", os: "ubuntu-latest", session: "tests-3.8" }
- { python: "3.10", os: "ubuntu-latest", session: "tests-3.10" }
- { python: "3.11", os: "macos-latest", session: "tests-3.11" }

- { python: "3.12", os: "macos-latest", session: "tests-3.12" }

- { python: "3.11", os: "ubuntu-latest", session: "xdoctest-3.11" }
- { python: "3.11", os: "ubuntu-latest", session: "docs-build" }

Expand Down
1 change: 1 addition & 0 deletions docs/conf.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
"""Sphinx configuration."""

project = "AI Ghostfunctions"
author = "Brian M. Ritz"
copyright = "2023, Brian M. Ritz"
Expand Down
4 changes: 2 additions & 2 deletions noxfile.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
"""Nox sessions."""

import os
import shlex
import shutil
Expand All @@ -9,8 +10,6 @@
import nox


print(os.curdir)

try:
from nox_poetry import Session
from nox_poetry import session
Expand All @@ -27,6 +26,7 @@
package = "ai_ghostfunctions"
python_version_default = "3.11"
python_versions = [
"3.12",
"3.11",
"3.10",
"3.9",
Expand Down
2,041 changes: 1,082 additions & 959 deletions poetry.lock

Large diffs are not rendered by default.

2 changes: 1 addition & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@ Changelog = "https://github.com/bmritz/ai-ghostfunctions/releases"

[tool.poetry.dependencies]
python = "^3.8"
openai = "^0.27.2"
openai = "^1"
typeguard = "^3.0.2"

[tool.poetry.group.dev.dependencies]
Expand Down
24 changes: 12 additions & 12 deletions src/ai_ghostfunctions/ghostfunctions.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
"""The AICallable class."""

import ast
import inspect
import os
Expand All @@ -11,6 +12,7 @@

import openai
import typeguard
from openai.types.chat.chat_completion import ChatCompletion

from .keywords import ASSISTANT
from .keywords import SYSTEM
Expand All @@ -34,7 +36,7 @@ def _make_chatgpt_message_from_function(
f"""
# The return type annotation for the function {f.__name__} is {get_type_hints(f)['return']}
# The docstring for the function {f.__name__} is the following:
"""
""" # noqa: E231
)
+ "\n".join([f"# {line}" for line in f.__doc__.split("\n")])
+ f"""
Expand Down Expand Up @@ -74,19 +76,17 @@ def _default_prompt_creation(
]


def _default_ai_callable() -> Callable[..., openai.openai_object.OpenAIObject]:
import openai
def _default_ai_callable() -> Callable[..., ChatCompletion]:

openai.api_key = os.environ["OPENAI_API_KEY"]
openai.organization = os.getenv("OPENAI_ORGANIZATION")
client = openai.OpenAI(api_key=os.environ.get("OPENAI_API_KEY"))

def f(**kwargs: Any) -> openai.openai_object.OpenAIObject:
create = openai.ChatCompletion.create
def f(**kwargs: Any) -> ChatCompletion:
create = client.chat.completions.create
try:
result: openai.openai_object.OpenAIObject = create(model="gpt-4", **kwargs) # type: ignore[no-untyped-call]
except openai.InvalidRequestError:
result: ChatCompletion = create(model="gpt-4", **kwargs)
except openai.NotFoundError:
# user may not have access to gpt-4 yet, perhaps they have 3.5
result: openai.openai_object.OpenAIObject = create(model="gpt-3.5-turbo", **kwargs) # type: ignore[no-untyped-call,no-redef]
result: ChatCompletion = create(model="gpt-3.5-turbo", **kwargs) # type: ignore[no-redef]
return result

return f
Expand Down Expand Up @@ -129,7 +129,7 @@ def _parse_ai_result(
The data from the ai result (data is of type `expected_return_type`)
"""
string_contents = [choice["message"]["content"] for choice in ai_result["choices"]]
string_contents = [choice.message.content for choice in ai_result.choices]
data = [
typeguard.check_type(
_string_to_python_data_structure(string, expected_return_type),
Expand All @@ -144,7 +144,7 @@ def ghostfunction(
function: Optional[Callable[..., Any]] = None,
/,
*,
ai_callable: Optional[Callable[..., openai.openai_object.OpenAIObject]] = None,
ai_callable: Optional[Callable[..., ChatCompletion]] = None,
prompt_function: Callable[
[Callable[..., Any]], List[Message]
] = _default_prompt_creation,
Expand Down
1 change: 1 addition & 0 deletions src/ai_ghostfunctions/keywords.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
"""Keywords for ai-ghostfunctions."""

from typing import get_args

from .types import CompletionType
Expand Down
54 changes: 30 additions & 24 deletions tests/test_ghostfunctions.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,8 +5,8 @@
from unittest.mock import Mock
from unittest.mock import patch

import openai
import pytest
from openai.types.chat.chat_completion import ChatCompletion

import ai_ghostfunctions.ghostfunctions
from ai_ghostfunctions import ghostfunction
Expand All @@ -18,7 +18,9 @@ def generate_n_random_words(n: int, startswith: str) -> List[str]: # type: igno
"""Return a list of `n` random words that start with `startswith`."""
pass

with patch.object(ai_ghostfunctions.ghostfunctions.os, "environ"): # type: ignore[attr-defined]
with patch.dict(
ai_ghostfunctions.ghostfunctions.os.environ, {"OPENAI_API_KEY": "api-key-mock"} # type: ignore[attr-defined]
):
decorated_function = ghostfunction(generate_n_random_words)
assert inspect.signature(decorated_function) == inspect.signature(
generate_n_random_words
Expand All @@ -30,8 +32,8 @@ def test_aicallable_function_decorator() -> None:
mock_return_result = str(expected_result)

mock_callable = Mock(
return_value=openai.openai_object.OpenAIObject.construct_from(
{"choices": [{"message": {"content": mock_return_result}}]}
return_value=ChatCompletion.model_construct( # type: ignore[attr-defined]
**{"choices": [{"message": {"content": mock_return_result}}]}
)
)
with patch.object(
Expand All @@ -56,8 +58,8 @@ def test_aicallable_function_decorator_with_open_close_parens() -> None:
mock_return_result = str(expected_result)

mock_callable = Mock(
return_value=openai.openai_object.OpenAIObject.construct_from(
{"choices": [{"message": {"content": mock_return_result}}]}
return_value=ChatCompletion.model_construct( # type: ignore[attr-defined]
**{"choices": [{"message": {"content": mock_return_result}}]}
)
)
with patch.object(
Expand All @@ -84,8 +86,8 @@ def test_aicallable_function_decorator_with_custom_prompt_function() -> None:
mock_return_result = str(expected_result)

mock_callable = Mock(
return_value=openai.openai_object.OpenAIObject.construct_from(
{"choices": [{"message": {"content": mock_return_result}}]}
return_value=ChatCompletion.model_construct( # type: ignore[attr-defined]
**{"choices": [{"message": {"content": mock_return_result}}]}
)
)
with patch.object(
Expand Down Expand Up @@ -127,8 +129,8 @@ def test_ghostfunction_decorator_returns_expected_type(
mock_return_result = str(expected_result)

mock_callable = Mock(
return_value=openai.openai_object.OpenAIObject.construct_from(
{"choices": [{"message": {"content": mock_return_result}}]}
return_value=ChatCompletion.model_construct( # type: ignore[attr-defined]
**{"choices": [{"message": {"content": mock_return_result}}]}
)
)
with patch.object(
Expand All @@ -150,8 +152,8 @@ def generate_n_random_words(n: int, startswith: str) -> annotation:

def test_ghostfunction_decorator_with_custom_agg_function() -> None:
mock_callable = Mock(
return_value=openai.openai_object.OpenAIObject.construct_from(
{
return_value=ChatCompletion.model_construct( # type: ignore[attr-defined]
**{
"choices": [
{"message": {"content": "good"}},
{"message": {"content": "goose"}},
Expand Down Expand Up @@ -189,8 +191,8 @@ def test_ghostfunction_can_be_called_with_positional_arguments(
mock_return_result = str(expected_result)

mock_callable = Mock(
return_value=openai.openai_object.OpenAIObject.construct_from(
{"choices": [{"message": {"content": mock_return_result}}]}
return_value=ChatCompletion.model_construct( # type: ignore[attr-defined]
**{"choices": [{"message": {"content": mock_return_result}}]}
)
)
with patch.object(
Expand Down Expand Up @@ -242,8 +244,8 @@ def test_ghostfunction_decorator_errors_if_no_return_type_annotation() -> None:
expected_result = "returned value from openai"

mock_callable = Mock(
return_value=openai.openai_object.OpenAIObject.construct_from(
{"choices": [{"message": {"content": expected_result}}]}
return_value=ChatCompletion.model_construct( # type: ignore[attr-defined]
**{"choices": [{"message": {"content": expected_result}}]}
)
)

Expand Down Expand Up @@ -333,7 +335,9 @@ def test__make_chatgpt_message_from_function_works_well_with_multiline_docstring
def test___parse_ai_result(
ai_result: str, expected_return_type: Any, expected_function_result: Any
) -> None:
ai_result_wrapper = {"choices": [{"message": {"content": ai_result}}]}
ai_result_wrapper = ChatCompletion.model_construct( # type: ignore[attr-defined]
**{"choices": [{"message": {"content": ai_result}}]}
)
assert (
ai_ghostfunctions.ghostfunctions._parse_ai_result(
ai_result_wrapper,
Expand All @@ -344,13 +348,15 @@ def test___parse_ai_result(


def test___parse_ai_result_non_default_agg_function() -> None:
ai_result_wrapper = {
"choices": [
{"message": {"content": "c1"}},
{"message": {"content": "c2"}},
{"message": {"content": "c3"}},
]
}
ai_result_wrapper = ChatCompletion.model_construct( # type: ignore[attr-defined]
**{
"choices": [
{"message": {"content": "c1"}},
{"message": {"content": "c2"}},
{"message": {"content": "c3"}},
]
}
)
assert (
ai_ghostfunctions.ghostfunctions._parse_ai_result(
ai_result_wrapper, str, aggregation_function=lambda x: x[1]
Expand Down

0 comments on commit 54b3862

Please sign in to comment.