Skip to content

Commit 016f3f0

Browse files
refactor: adds statusCode to serverError (#16)
1 parent 000e23a commit 016f3f0

File tree

6 files changed

+27
-17
lines changed

6 files changed

+27
-17
lines changed

Package.swift

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
// swift-tools-version: 5.8
1+
// swift-tools-version: 5.10
22
// The swift-tools-version declares the minimum version of Swift required to build this package.
33

44
import PackageDescription

README.md

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -252,9 +252,9 @@ do {
252252
print(completion.choices.first?.message.content ?? "No response")
253253
} catch let error as LLMChatOpenAIError {
254254
switch error {
255-
case .serverError(let message):
255+
case .serverError(let statusCode, let message):
256256
// Handle server-side errors (e.g., invalid API key, rate limits)
257-
print("Server Error: \(message)")
257+
print("Server Error [\(statusCode)]: \(message)")
258258
case .networkError(let error):
259259
// Handle network-related errors (e.g., no internet connection)
260260
print("Network Error: \(error.localizedDescription)")

Sources/LLMChatOpenAI/Documentation.docc/Documentation.md

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -223,9 +223,9 @@ do {
223223
print(completion.choices.first?.message.content ?? "No response")
224224
} catch let error as LLMChatOpenAIError {
225225
switch error {
226-
case .serverError(let message):
226+
case .serverError(let statusCode, let message):
227227
// Handle server-side errors (e.g., invalid API key, rate limits)
228-
print("Server Error: \(message)")
228+
print("Server Error [\(statusCode)]: \(message)")
229229
case .networkError(let error):
230230
// Handle network-related errors (e.g., no internet connection)
231231
print("Network Error: \(error.localizedDescription)")

Sources/LLMChatOpenAI/LLMChatOpenAI.swift

Lines changed: 9 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -140,16 +140,16 @@ private extension LLMChatOpenAI {
140140
let (data, response) = try await URLSession.shared.data(for: request)
141141

142142
guard let httpResponse = response as? HTTPURLResponse else {
143-
throw LLMChatOpenAIError.serverError(response.description)
143+
throw LLMChatOpenAIError.serverError(statusCode: 0, message: response.description)
144144
}
145145

146146
// Check for API errors first, as they might come with 200 status
147147
if let errorResponse = try? JSONDecoder().decode(ChatCompletionError.self, from: data) {
148-
throw LLMChatOpenAIError.serverError(errorResponse.error.message)
148+
throw LLMChatOpenAIError.serverError(statusCode: httpResponse.statusCode, message: errorResponse.error.message)
149149
}
150150

151151
guard 200...299 ~= httpResponse.statusCode else {
152-
throw LLMChatOpenAIError.serverError(response.description)
152+
throw LLMChatOpenAIError.serverError(statusCode: httpResponse.statusCode, message: response.description)
153153
}
154154

155155
return try JSONDecoder().decode(ChatCompletion.self, from: data)
@@ -174,8 +174,12 @@ private extension LLMChatOpenAI {
174174
let request = try createRequest(for: endpoint, with: body)
175175
let (bytes, response) = try await URLSession.shared.bytes(for: request)
176176

177-
guard let httpResponse = response as? HTTPURLResponse, 200...299 ~= httpResponse.statusCode else {
178-
throw LLMChatOpenAIError.serverError(response.description)
177+
guard let httpResponse = response as? HTTPURLResponse else {
178+
throw LLMChatOpenAIError.serverError(statusCode: 0, message: response.description)
179+
}
180+
181+
guard 200...299 ~= httpResponse.statusCode else {
182+
throw LLMChatOpenAIError.serverError(statusCode: httpResponse.statusCode, message: response.description)
179183
}
180184

181185
for try await line in bytes.lines {

Sources/LLMChatOpenAI/LLMChatOpenAIError.swift

Lines changed: 4 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -21,8 +21,10 @@ public enum LLMChatOpenAIError: Error, Sendable {
2121

2222
/// An error returned by the server.
2323
///
24-
/// - Parameter message: The error message received from the server.
25-
case serverError(String)
24+
/// - Parameters:
25+
/// - statusCode: The HTTP status code returned by the server.
26+
/// - message: The error message received from the server.
27+
case serverError(statusCode: Int, message: String)
2628

2729
/// An error that occurs during stream processing.
2830
case streamError

Tests/LLMChatOpenAITests/ChatCompletionTests.swift

Lines changed: 9 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -174,14 +174,16 @@ extension ChatCompletionTests {
174174
"""
175175

176176
URLProtocolMock.mockData = mockErrorResponse.data(using: .utf8)
177+
URLProtocolMock.mockStatusCode = 401
177178

178179
do {
179180
_ = try await chat.send(model: "gpt-4o", messages: messages)
180181

181182
XCTFail("Expected serverError to be thrown")
182183
} catch let error as LLMChatOpenAIError {
183184
switch error {
184-
case .serverError(let message):
185+
case .serverError(let statusCode, let message):
186+
XCTAssertEqual(statusCode, 401)
185187
XCTAssertEqual(message, "Invalid API key provided")
186188
default:
187189
XCTFail("Expected serverError but got \(error)")
@@ -211,16 +213,17 @@ extension ChatCompletionTests {
211213
}
212214

213215
func testHTTPError() async throws {
214-
URLProtocolMock.mockStatusCode = 429
215216
URLProtocolMock.mockData = "Rate limit exceeded".data(using: .utf8)
217+
URLProtocolMock.mockStatusCode = 429
216218

217219
do {
218220
_ = try await chat.send(model: "gpt-4o", messages: messages)
219221

220222
XCTFail("Expected serverError to be thrown")
221223
} catch let error as LLMChatOpenAIError {
222224
switch error {
223-
case .serverError(let message):
225+
case .serverError(let statusCode, let message):
226+
XCTAssertEqual(statusCode, 429)
224227
XCTAssertTrue(message.contains("429"))
225228
default:
226229
XCTFail("Expected serverError but got \(error)")
@@ -311,16 +314,17 @@ extension ChatCompletionTests {
311314
}
312315

313316
func testStreamHTTPError() async throws {
314-
URLProtocolMock.mockStatusCode = 503
315317
URLProtocolMock.mockStreamData = [""]
318+
URLProtocolMock.mockStatusCode = 503
316319

317320
do {
318321
for try await _ in chat.stream(model: "gpt-4o", messages: messages) {
319322
XCTFail("Expected serverError to be thrown")
320323
}
321324
} catch let error as LLMChatOpenAIError {
322325
switch error {
323-
case .serverError(let message):
326+
case .serverError(let statusCode, let message):
327+
XCTAssertEqual(statusCode, 503)
324328
XCTAssertTrue(message.contains("503"))
325329
default:
326330
XCTFail("Expected serverError but got \(error)")

0 commit comments

Comments
 (0)