Skip to content

Commit 08e313a

Browse files
authored
refactor(api, sdk-py, sdk-ts, ui, docs): rename send_message to store_message across documentation and codebase (#87)
1 parent f26eaaf commit 08e313a

File tree

43 files changed

+521
-497
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

43 files changed

+521
-497
lines changed

README.md

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -325,9 +325,9 @@ client.ping()
325325

326326
Acontext can manage agent sessions and artifacts.
327327

328-
### Save Messages [📖](https://docs.acontext.io/api-reference/session/send-message-to-session)
328+
### Save Messages [📖](https://docs.acontext.io/api-reference/session/store-message-to-session)
329329

330-
Acontext offers persistent storage for message data. When you call `session.send_message`, Acontext will persist the message and start to monitor this session:
330+
Acontext offers persistent storage for message data. When you call `session.store_message`, Acontext will persist the message and start to monitor this session:
331331

332332
<details>
333333
<summary>Code Snippet</summary>
@@ -345,7 +345,7 @@ messages = [
345345

346346
# Save messages
347347
for msg in messages:
348-
client.sessions.send_message(session_id=session.id, blob=msg, format="openai")
348+
client.sessions.store_message(session_id=session.id, blob=msg, format="openai")
349349
```
350350

351351
> [📖](https://docs.acontext.io/store/messages/multi-modal) We also support multi-modal message storage and anthropic SDK.
@@ -367,7 +367,7 @@ new_msg = r.items
367367
new_msg.append({"role": "user", "content": "How are you doing?"})
368368
r = openai_client.chat.completions.create(model="gpt-4.1", messages=new_msg)
369369
print(r.choices[0].message.content)
370-
client.sessions.send_message(session_id=session.id, blob=r.choices[0].message)
370+
client.sessions.store_message(session_id=session.id, blob=r.choices[0].message)
371371
```
372372

373373
</details>
@@ -478,9 +478,9 @@ messages = [
478478
},
479479
]
480480

481-
# Send messages in a loop
481+
# Store messages in a loop
482482
for msg in messages:
483-
client.sessions.send_message(session_id=session.id, blob=msg, format="openai")
483+
client.sessions.store_message(session_id=session.id, blob=msg, format="openai")
484484

485485
# Wait for task extraction to complete
486486
client.sessions.flush(session.id)

docs/api-reference/introduction.mdx

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -100,8 +100,8 @@ session = client.sessions.create(
100100
configs={"mode": "chat"}
101101
)
102102

103-
# Send a message (OpenAI format)
104-
response = client.sessions.send_message(
103+
# Store a message (OpenAI format)
104+
response = client.sessions.store_message(
105105
session_id=session.id,
106106
blob={'role': 'user', 'content': 'How do I reset my password?'},
107107
format='openai'
@@ -133,8 +133,8 @@ const session = await client.sessions.create({
133133
configs: { mode: 'chat' }
134134
});
135135

136-
// Send a message (OpenAI format)
137-
const response = await client.sessions.sendMessage(
136+
// Store a message (OpenAI format)
137+
const response = await client.sessions.storeMessage(
138138
session.id,
139139
{ role: 'user', content: 'How do I reset my password?' },
140140
{ format: 'openai' }
@@ -204,7 +204,7 @@ Acontext supports multiple message formats for maximum compatibility:
204204
- **Anthropic** - Compatible with Anthropic Messages format
205205
- **Acontext** - Native format with extended capabilities
206206

207-
You can convert between formats when retrieving or sending messages using the `format` parameter.
207+
You can convert between formats when retrieving or storing messages using the `format` parameter.
208208

209209
## Join the Community
210210

docs/api-reference/openapi.json

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -899,16 +899,16 @@
899899
"security" : [ {
900900
"BearerAuth" : [ ]
901901
} ],
902-
"summary" : "Send message to session",
902+
"summary" : "Store message to session",
903903
"tags" : [ "session" ],
904904
"x-code-samples" : [ {
905905
"label" : "Python",
906906
"lang" : "python",
907-
"source" : "from acontext import AcontextClient\nfrom acontext.messages import build_acontext_message\n\nclient = AcontextClient(api_key='sk_project_token')\n\n# Send a message in Acontext format\nmessage = build_acontext_message(role='user', parts=['Hello!'])\nclient.sessions.send_message(\n session_id='session-uuid',\n blob=message,\n format='acontext'\n)\n\n# Send a message in OpenAI format\nopenai_message = {'role': 'user', 'content': 'Hello from OpenAI format!'}\nclient.sessions.send_message(\n session_id='session-uuid',\n blob=openai_message,\n format='openai'\n)\n"
907+
"source" : "from acontext import AcontextClient\nfrom acontext.messages import build_acontext_message\n\nclient = AcontextClient(api_key='sk_project_token')\n\n# Store a message in Acontext format\nmessage = build_acontext_message(role='user', parts=['Hello!'])\nclient.sessions.store_message(\n session_id='session-uuid',\n blob=message,\n format='acontext'\n)\n\n# Store a message in OpenAI format\nopenai_message = {'role': 'user', 'content': 'Hello from OpenAI format!'}\nclient.sessions.store_message(\n session_id='session-uuid',\n blob=openai_message,\n format='openai'\n)\n"
908908
}, {
909909
"label" : "JavaScript",
910910
"lang" : "javascript",
911-
"source" : "import { AcontextClient, MessagePart } from '@acontext/acontext';\n\nconst client = new AcontextClient({ apiKey: 'sk_project_token' });\n\n// Send a message in Acontext format\nawait client.sessions.sendMessage(\n 'session-uuid',\n {\n role: 'user',\n parts: [MessagePart.textPart('Hello!')]\n },\n { format: 'acontext' }\n);\n\n// Send a message in OpenAI format\nawait client.sessions.sendMessage(\n 'session-uuid',\n {\n role: 'user',\n content: 'Hello from OpenAI format!'\n },\n { format: 'openai' }\n);\n"
911+
"source" : "import { AcontextClient, MessagePart } from '@acontext/acontext';\n\nconst client = new AcontextClient({ apiKey: 'sk_project_token' });\n\n// Store a message in Acontext format\nawait client.sessions.storeMessage(\n 'session-uuid',\n {\n role: 'user',\n parts: [MessagePart.textPart('Hello!')]\n },\n { format: 'acontext' }\n);\n\n// Store a message in OpenAI format\nawait client.sessions.storeMessage(\n 'session-uuid',\n {\n role: 'user',\n content: 'Hello from OpenAI format!'\n },\n { format: 'openai' }\n);\n"
912912
} ],
913913
"x-codegen-request-body-name" : "payload"
914914
}
@@ -2049,7 +2049,7 @@
20492049
"required" : [ "rename" ],
20502050
"type" : "object"
20512051
},
2052-
"handler.SendMessageReq" : {
2052+
"handler.StoreMessageReq" : {
20532053
"properties" : {
20542054
"blob" : {
20552055
"type" : "object"
@@ -2754,7 +2754,7 @@
27542754
"_session__session_id__messages_post_request" : {
27552755
"properties" : {
27562756
"payload" : {
2757-
"description" : "SendMessage payload (Content-Type: multipart/form-data)",
2757+
"description" : "StoreMessage payload (Content-Type: multipart/form-data)",
27582758
"type" : "string"
27592759
},
27602760
"file" : {

docs/integrations/agno.mdx

Lines changed: 10 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -92,12 +92,12 @@ acontext_client = AcontextClient(
9292

9393
## How It Works
9494

95-
The Agno integration works by sending conversation messages to Acontext in OpenAI message format. Agno's message format is compatible with Acontext, so no conversion is needed.
95+
The Agno integration works by storing conversation messages to Acontext in OpenAI message format. Agno's message format is compatible with Acontext, so no conversion is needed.
9696

9797
### Message Flow
9898

9999
1. **Create session**: Initialize a new Acontext session for your agent
100-
2. **Send messages**: Append each message (user and assistant) to Acontext as the conversation progresses
100+
2. **Store messages**: Append each message (user and assistant) to Acontext as the conversation progresses
101101
3. **Extract tasks**: After the conversation, flush the session and retrieve extracted tasks
102102
4. **Resume sessions**: Load previous conversation history to continue where you left off
103103

@@ -127,19 +127,19 @@ agent = Agent(
127127
space = acontext_client.spaces.create()
128128
session = acontext_client.sessions.create(space_id=space.id)
129129

130-
# Build conversation and send to Acontext
130+
# Build conversation and store to Acontext
131131
conversation = []
132132
user_msg = {"role": "user", "content": "Hello!"}
133133
conversation.append(user_msg)
134-
acontext_client.sessions.send_message(session_id=session.id, blob=user_msg)
134+
acontext_client.sessions.store_message(session_id=session.id, blob=user_msg)
135135

136136
# Run agent
137137
response = agent.run(conversation)
138138

139-
# Send assistant response to Acontext
139+
# Store assistant response to Acontext
140140
assistant_msg = {"role": "assistant", "content": response.content}
141141
conversation.append(assistant_msg)
142-
acontext_client.sessions.send_message(session_id=session.id, blob=assistant_msg)
142+
acontext_client.sessions.store_message(session_id=session.id, blob=assistant_msg)
143143
```
144144

145145
## Complete Example
@@ -173,7 +173,7 @@ def create_agno_agent() -> Agent:
173173

174174
def append_message(message: dict, conversation: list[dict], session_id: str):
175175
conversation.append(message)
176-
acontext_client.sessions.send_message(session_id=session_id, blob=message)
176+
acontext_client.sessions.store_message(session_id=session_id, blob=message)
177177
return conversation
178178

179179
async def main():
@@ -279,8 +279,8 @@ message = {
279279
"content": "Your message here"
280280
}
281281

282-
# Send directly to Acontext - no conversion needed
283-
acontext_client.sessions.send_message(session_id=session_id, blob=message)
282+
# Store directly to Acontext - no conversion needed
283+
acontext_client.sessions.store_message(session_id=session_id, blob=message)
284284
```
285285

286286
<Info>
@@ -290,7 +290,7 @@ Agno's `RunOutput.messages` can be converted to dictionaries using `[m.to_dict()
290290
## Best Practices
291291

292292
<Tip>
293-
**Batch message sending**: For better performance, you can batch multiple messages before sending them to Acontext, but ensure you send them in chronological order.
293+
**Batch message storing**: For better performance, you can batch multiple messages before storing them to Acontext, but ensure you store them in chronological order.
294294
</Tip>
295295

296296
<Tip>

docs/integrations/ai-sdk.mdx

Lines changed: 15 additions & 15 deletions
Original file line numberDiff line numberDiff line change
@@ -103,14 +103,14 @@ const acontextClient = new AcontextClient({
103103

104104
## How It Works
105105

106-
The Vercel AI SDK integration works by sending conversation messages to Acontext in OpenAI message format. The SDK uses `generateText` for text generation and requires manual tool execution.
106+
The Vercel AI SDK integration works by storing conversation messages to Acontext in OpenAI message format. The SDK uses `generateText` for text generation and requires manual tool execution.
107107

108108
### Message Flow
109109

110110
1. **Create session**: Initialize a new Acontext session for your agent
111111
2. **Generate text**: Use `generateText` with tools to get model responses
112112
3. **Handle tool calls**: Manually execute tools when the model requests them
113-
4. **Send messages**: Append each message (user, assistant, and tool) to Acontext
113+
4. **Store messages**: Append each message (user, assistant, and tool) to Acontext
114114
5. **Extract tasks**: After the conversation, flush the session and retrieve extracted tasks
115115
6. **Resume sessions**: Load previous conversation history to continue where you left off
116116

@@ -154,7 +154,7 @@ const session = await acontextClient.sessions.create({ spaceId: space.id });
154154
let conversation: any[] = [];
155155
const userMsg = { role: 'user', content: 'Hello!' };
156156
conversation.push(userMsg);
157-
await acontextClient.sessions.sendMessage(session.id, userMsg, {
157+
await acontextClient.sessions.storeMessage(session.id, userMsg, {
158158
format: 'openai',
159159
});
160160

@@ -164,13 +164,13 @@ const result = await generateText({
164164
messages: conversation,
165165
});
166166

167-
// Send assistant response to Acontext
167+
// Store assistant response to Acontext
168168
const assistantMsg = {
169169
role: 'assistant',
170170
content: result.text,
171171
};
172172
conversation.push(assistantMsg);
173-
await acontextClient.sessions.sendMessage(session.id, assistantMsg, {
173+
await acontextClient.sessions.storeMessage(session.id, assistantMsg, {
174174
format: 'openai',
175175
});
176176
```
@@ -227,7 +227,7 @@ async function runAgent(conversation: any[]): Promise<[string, any[]]> {
227227
iteration += 1;
228228

229229
// Filter messages for Vercel AI SDK (only user and assistant)
230-
const messagesToSend = conversation
230+
const messagesToStore = conversation
231231
.filter((msg: any) => {
232232
const role = msg.role;
233233
return (role === 'user' || role === 'assistant') && !msg._internal;
@@ -249,7 +249,7 @@ async function runAgent(conversation: any[]): Promise<[string, any[]]> {
249249
const result = await generateText({
250250
model,
251251
system: 'You are a helpful assistant',
252-
messages: messagesToSend,
252+
messages: messagesToStore,
253253
tools,
254254
});
255255

@@ -348,9 +348,9 @@ async function runAgent(conversation: any[]): Promise<[string, any[]]> {
348348
}
349349
```
350350

351-
### Send Messages to Acontext
351+
### Store Messages to Acontext
352352

353-
Send all messages (excluding internal ones) to Acontext:
353+
Store all messages (excluding internal ones) to Acontext:
354354

355355
```typescript
356356
async function appendMessage(
@@ -365,7 +365,7 @@ async function appendMessage(
365365
}
366366

367367
conversation.push(message);
368-
await acontextClient.sessions.sendMessage(sessionId, message, {
368+
await acontextClient.sessions.storeMessage(sessionId, message, {
369369
format: 'openai',
370370
});
371371
return conversation;
@@ -443,7 +443,7 @@ async function appendMessage(
443443
return conversation;
444444
}
445445
conversation.push(message);
446-
await acontextClient.sessions.sendMessage(sessionId, message, {
446+
await acontextClient.sessions.storeMessage(sessionId, message, {
447447
format: 'openai',
448448
});
449449
return conversation;
@@ -463,7 +463,7 @@ async function main(): Promise<void> {
463463
// Run agent with tool calling
464464
const [responseContent, newMessages] = await runAgent(conversation);
465465

466-
// Send all messages to Acontext
466+
// Store all messages to Acontext
467467
for (const msg of newMessages) {
468468
conversation = await appendMessage(msg, conversation, session.id);
469469
}
@@ -542,7 +542,7 @@ Vercel AI SDK has specific requirements for message formats:
542542

543543
```typescript
544544
// Filter messages for Vercel AI SDK
545-
const messagesToSend = conversation
545+
const messagesToStore = conversation
546546
.filter((msg: any) => {
547547
// Only user and assistant roles, exclude internal messages
548548
const role = msg.role;
@@ -582,7 +582,7 @@ const messagesToSend = conversation
582582
</Tip>
583583

584584
<Tip>
585-
**Format specification**: Always specify `format: 'openai'` when sending messages to Acontext to ensure proper format handling.
585+
**Format specification**: Always specify `format: 'openai'` when storing messages to Acontext to ensure proper format handling.
586586
</Tip>
587587

588588
<Tip>
@@ -608,7 +608,7 @@ Vercel AI SDK v5 only accepts 'user' and 'assistant' roles in messages. Tool res
608608
</Accordion>
609609

610610
<Accordion title="Content Type Requirements">
611-
Message content must be a string, not an array. Array content needs to be converted to a string before sending to the SDK.
611+
Message content must be a string, not an array. Array content needs to be converted to a string before passing to the SDK.
612612
</Accordion>
613613
</AccordionGroup>
614614

0 commit comments

Comments
 (0)