Skip to content

Commit 4fcf298

Browse files
committed
refactor: add remaining prompting attributes
1 parent dc1977f commit 4fcf298

File tree

9 files changed

+59
-43
lines changed

9 files changed

+59
-43
lines changed

api/terraform/python/openai_api/lambda_openai_function/lambda_handler.py

Lines changed: 4 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -77,13 +77,16 @@ def handler(event, context):
7777
request_meta_data = request_meta_data_factory(model, object_type, temperature, max_tokens, input_text)
7878

7979
# does the prompt have anything to do with any of the search terms defined in a plugin?
80+
# FIX NOTE: need to decide on how to resolve which of many plugin values sets to use for model, temperature, max_tokens
8081
for plugin in plugins:
8182
if search_terms_are_in_messages(
8283
messages=messages,
8384
search_terms=plugin.selector.search_terms.strings,
8485
search_pairs=plugin.selector.search_terms.pairs,
8586
):
86-
model = "gpt-3.5-turbo-1106"
87+
model = plugin.prompting.model
88+
temperature = plugin.prompting.temperature
89+
max_tokens = plugin.prompting.max_tokens
8790
messages = customized_prompt(plugin=plugin, messages=messages)
8891
custom_tool = plugin_tool_factory(plugin=plugin)
8992
tools.append(custom_tool)

api/terraform/python/openai_api/lambda_openai_function/plugin_loader.py

Lines changed: 37 additions & 21 deletions
Original file line numberDiff line numberDiff line change
@@ -11,7 +11,7 @@
1111

1212
import yaml
1313
from openai_api.common.conf import settings
14-
from openai_api.common.const import PYTHON_ROOT
14+
from openai_api.common.const import PYTHON_ROOT, VALID_CHAT_COMPLETION_MODELS
1515
from pydantic import BaseModel, Field, ValidationError, field_validator, root_validator
1616

1717

@@ -55,24 +55,6 @@ def to_json(self) -> json:
5555
raise NotImplementedError
5656

5757

58-
class SystemPrompt(PluginBase):
59-
"""System prompt of a Plugin object"""
60-
61-
system_prompt: str = Field(..., description="System prompt")
62-
63-
@field_validator("system_prompt")
64-
@classmethod
65-
def validate_system_prompt(cls, system_prompt) -> str:
66-
"""Validate the system_prompt field"""
67-
if not isinstance(system_prompt, str):
68-
do_error(class_name=cls.__name__, err=f"Expected a string but received {type(system_prompt)}")
69-
return system_prompt
70-
71-
def to_json(self) -> json:
72-
"""Return the plugin as a JSON object"""
73-
return self.system_prompt
74-
75-
7658
class SearchTerms(PluginBase):
7759
"""Search terms of a Plugin object"""
7860

@@ -146,7 +128,12 @@ class Prompting(PluginBase):
146128
"""Prompting child class of a Plugin object"""
147129

148130
plugin_json: dict = Field(..., description="Plugin object")
149-
system_prompt: SystemPrompt = Field(None, description="System prompt of the plugin object")
131+
132+
# attributes
133+
system_prompt: str = Field("", description="System prompt of the prompt")
134+
model: str = Field("gpt-3.5-turbo-1106", description="Model of the system prompt")
135+
temperature: float = Field(0.0, description="Temperature of the system prompt")
136+
max_tokens: int = Field(0, description="Max tokens of the system prompt")
150137

151138
@root_validator(pre=True)
152139
def set_fields(cls, values):
@@ -155,7 +142,10 @@ def set_fields(cls, values):
155142
if not isinstance(plugin_json, dict):
156143
raise ValueError(f"Expected plugin_json to be a dict but received {type(plugin_json)}")
157144
if plugin_json:
158-
values["system_prompt"] = SystemPrompt(system_prompt=plugin_json["system_prompt"])
145+
values["system_prompt"] = plugin_json["system_prompt"]
146+
values["model"] = plugin_json["model"]
147+
values["temperature"] = plugin_json["temperature"]
148+
values["max_tokens"] = plugin_json["max_tokens"]
159149
return values
160150

161151
@field_validator("plugin_json")
@@ -166,11 +156,37 @@ def validate_plugin_json(cls, plugin_json) -> dict:
166156
validate_required_keys(class_name=cls.__name__, required_keys=required_keys, plugin_json=plugin_json)
167157
return plugin_json
168158

159+
@field_validator("model")
160+
@classmethod
161+
def validate_model(cls, model) -> dict:
162+
"""Validate the plugin object"""
163+
if model not in VALID_CHAT_COMPLETION_MODELS:
164+
do_error(
165+
class_name=cls.__name__,
166+
err=f"Invalid plugin object: {model}. 'model' should be one of {VALID_CHAT_COMPLETION_MODELS}.",
167+
)
168+
return model
169+
169170
@property
170171
def system_prompt(self) -> str:
171172
"""Return the system prompt"""
172173
return self.plugin_json.get("system_prompt")
173174

175+
@property
176+
def model(self) -> str:
177+
"""Return the model"""
178+
return self.plugin_json.get("model")
179+
180+
@property
181+
def temperature(self) -> float:
182+
"""Return the temperature"""
183+
return self.plugin_json.get("temperature")
184+
185+
@property
186+
def max_tokens(self) -> int:
187+
"""Return the max tokens"""
188+
return self.plugin_json.get("max_tokens")
189+
174190
def to_json(self) -> json:
175191
"""Return the plugin as a JSON object"""
176192
return self.plugin_json

api/terraform/python/openai_api/lambda_openai_function/plugin_manager.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -41,7 +41,7 @@ def customized_prompt(plugin: Plugin, messages: list) -> list:
4141
system_prompt = message.get("content")
4242
custom_prompt = {
4343
"role": "system",
44-
"content": system_prompt + "\n\n and also " + plugin.prompting.system_prompt.system_prompt,
44+
"content": system_prompt + "\n\n and also " + plugin.prompting.system_prompt,
4545
}
4646
messages[i] = custom_prompt
4747
break

api/terraform/python/openai_api/lambda_openai_function/plugins/everlasting-gobstopper.yaml

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -25,6 +25,10 @@ selector:
2525
prompting:
2626
system_prompt: >
2727
You are a helpful marketing agent for the [Willy Wonka Chocolate Factory](https://wwcf.com).
28+
model: gpt-3.5-turbo-1106
29+
temperature: 1.0
30+
max_tokens: 256
31+
2832
function_calling:
2933
function_description: Get additional information about the Everlasting Gobstopper product created by Willy Wonka Chocolate Factory. Information includes sales promotions, coupon codes, company contact information and biographical background on the company founder.
3034
# Information provided to the OpenAI "Function Calling" algorithm to help it generate custom responses.

api/terraform/python/openai_api/lambda_openai_function/plugins/example-configuration.yaml

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -35,6 +35,9 @@ prompting:
3535
system_prompt: >
3636
Your job is to provide helpful technical information about the OpenAI API Function Calling feature. You should include the following information in your response:
3737
"Congratulations!!! OpenAI API Function Calling chose to call this function. Here is the additional information that you requested:"
38+
model: gpt-3.5-turbo-1106
39+
temperature: 0.0
40+
max_tokens: 256
3841

3942
# ------------------------------------------------------------
4043
# 3. Required field: function_calling

api/terraform/python/openai_api/lambda_openai_function/plugins/lawrence-mcdaniel.yaml

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -86,6 +86,9 @@ prompting:
8686
Lawrence's Twitter is @FullStackWLarry. His email is [email protected].
8787
Lawrence's LinkedIn is https://www.linkedin.com/in/lawrencemcdaniel/.
8888
Lawrence's instructor profile at University of British Columbia is https://extendedlearning.ubc.ca/about-us/our-instructors/lawrence-mcdaniel
89+
model: gpt-3.5-turbo-1106
90+
temperature: 0.0
91+
max_tokens: 256
8992
function_calling:
9093
# Information provided to the OpenAI "Function Calling" algorithm to help it generate custom responses.
9194
function_description: Get additional information about Lawrence McDaniel, full stack web developer and host of YouTube channel FullStackwithLawrence. returns a personal bio, contact information, marketing information, client list, education background, professional certifications, etc.

api/terraform/python/openai_api/lambda_openai_function/tests/mock_data/plugins/everlasting-gobbstopper-invalid.yaml

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -21,6 +21,9 @@ selector:
2121
prompting:
2222
# Complete search terms that will trigger the chatbot to use your customized system prompt.
2323
system_prompt_invalid: [{ "key1": "not a string" }]
24+
model: gpt-3.5-turbo-1106
25+
temperature: 1.0
26+
max_tokens: 256
2427
function_calling:
2528
function_description: Get additional information about the Everlasting Gobstopper product created by Willy Wonka Chocolate Factory. Information includes sales promotions, coupon codes, company contact information and biographical background on the company founder.
2629
# Information provided to the OpenAI "Function Calling" algorithm to help it generate custom responses.

api/terraform/python/openai_api/lambda_openai_function/tests/mock_data/plugins/everlasting-gobbstopper.yaml

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -25,6 +25,9 @@ selector:
2525
prompting:
2626
system_prompt: >
2727
You are a helpful marketing agent for the [Willy Wonka Chocolate Factory](https://wwcf.com).
28+
model: gpt-3.5-turbo-1106
29+
temperature: 1.0
30+
max_tokens: 256
2831
function_calling:
2932
function_description: Get additional information about the Everlasting Gobstopper product created by Willy Wonka Chocolate Factory. Information includes sales promotions, coupon codes, company contact information and biographical background on the company founder.
3033
# Information provided to the OpenAI "Function Calling" algorithm to help it generate custom responses.

api/terraform/python/openai_api/lambda_openai_function/tests/test_plugin.py

Lines changed: 1 addition & 20 deletions
Original file line numberDiff line numberDiff line change
@@ -30,7 +30,6 @@
3030
Prompting,
3131
SearchTerms,
3232
Selector,
33-
SystemPrompt,
3433
validate_required_keys,
3534
)
3635
from openai_api.lambda_openai_function.tests.test_setup import ( # noqa: E402
@@ -60,24 +59,6 @@ def test_validate_required_keys(self):
6059
class_name="Plugin", plugin_json=self.everlasting_gobbstopper_invalid, required_keys=required_keys
6160
)
6261

63-
def test_system_prompt(self):
64-
"""Test system_prompt."""
65-
prompt = self.everlasting_gobbstopper["prompting"]["system_prompt"]
66-
system_prompt = SystemPrompt(system_prompt=prompt)
67-
68-
self.assertEqual(
69-
system_prompt.system_prompt,
70-
"You are a helpful marketing agent for the [Willy Wonka Chocolate Factory](https://wwcf.com).\n",
71-
)
72-
self.assertIsInstance(system_prompt, SystemPrompt)
73-
self.assertIsInstance(system_prompt.system_prompt, str)
74-
self.assertTrue(isinstance(system_prompt.to_json(), str))
75-
76-
def test_system_prompt_invalid(self):
77-
"""Test system_prompt."""
78-
with self.assertRaises(ValueError):
79-
SystemPrompt(system_prompt=self.everlasting_gobbstopper_invalid["prompting"]["system_prompt_invalid"])
80-
8162
def test_search_terms(self):
8263
"""Test search_terms."""
8364
plugin_json = self.everlasting_gobbstopper["selector"]["search_terms"]
@@ -140,7 +121,7 @@ def test_refers_to(self):
140121
},
141122
)
142123
self.assertEqual(
143-
refers_to.prompting.system_prompt.system_prompt,
124+
refers_to.prompting.system_prompt,
144125
"You are a helpful marketing agent for the [Willy Wonka Chocolate Factory](https://wwcf.com).\n",
145126
)
146127

0 commit comments

Comments
 (0)