Swift Examples - Anthropic
Service setup
Create an Anthropic service in the AIProxy dashboard
Follow the integration guide, selecting the Anthropic icon on the 'Create a New Service' form.
Snippets
How to make an Anthropic message request
import AIProxy
/* Uncomment for BYOK use cases */
// let anthropicService = AIProxy.anthropicDirectService(
// unprotectedAPIKey: "your-anthropic-key"
// )
/* Uncomment for all other production use cases */
// let anthropicService = AIProxy.anthropicService(
// partialKey: "partial-key-from-your-developer-dashboard",
// serviceURL: "service-url-from-your-developer-dashboard"
// )
let requestBody = AnthropicMessageRequestBody(
maxTokens: 8192,
messages: [
// You can choose your level of sugar here. The most concise option is:
AnthropicMessageParam(content: "hello world", role: .user)
//
// Or, for more flexibility (e.g. adding images, tools, etc.):
// AnthropicInputMessage(content: [.text("hello world")], role: .user)
//
// Or, to fully spell out the call (this helps with Xcode's cmd-click to jump to source):
// AnthropicInputMessage(
// content: .blocks([
// .text(AnthropicTextBlockParam(text: "hello world"))
// ]),
// role: .user
// )
],
model: "claude-haiku-4-5-20251001",
system: "You are a friendly assistant"
)
do {
let response = try await anthropicService.messageRequest(
body: requestBody,
secondsToWait: 120
)
for case let .textBlock(textBlock) in response.content {
print("Received text from Claude: \(textBlock.text)")
}
} catch AIProxyError.unsuccessfulRequest(let statusCode, let responseBody) {
print("Received non-200 status code: \(statusCode) with response body: \(responseBody)")
} catch {
print("Could not get anthropic buffered chat: \(error)")
}
How to make an Anthropic streaming message request
import AIProxy
/* Uncomment for BYOK use cases */
// let anthropicService = AIProxy.anthropicDirectService(
// unprotectedAPIKey: "your-anthropic-key"
// )
/* Uncomment for all other production use cases */
// let anthropicService = AIProxy.anthropicService(
// partialKey: "partial-key-from-your-developer-dashboard",
// serviceURL: "service-url-from-your-developer-dashboard"
// )
let requestBody = AnthropicMessageRequestBody(
maxTokens: 1024,
messages: [
AnthropicMessageParam(
content: "Hello world",
role: .user
)
],
model: "claude-haiku-4-5-20251001"
)
do {
let stream = try await anthropicService.streamingMessageRequest(
body: requestBody,
secondsToWait: 120
)
for try await case .contentBlockDelta(let contentBlockDelta) in stream {
if case .textDelta(let textDelta) = contentBlockDelta.delta {
print("Received a text delta from Claude: \(textDelta.text)")
}
}
} catch AIProxyError.unsuccessfulRequest(let statusCode, let responseBody) {
print("Received non-200 status code: \(statusCode) with response body: \(responseBody)")
} catch {
print("Could not use Anthropic's message stream: \(error)")
}
How to make an Anthropic message request with tool calls
import AIProxy
/* Uncomment for BYOK use cases */
// let anthropicService = AIProxy.anthropicDirectService(
// unprotectedAPIKey: "your-anthropic-key"
// )
/* Uncomment for all other production use cases */
// let anthropicService = AIProxy.anthropicService(
// partialKey: "partial-key-from-your-developer-dashboard",
// serviceURL: "service-url-from-your-developer-dashboard"
// )
let requestBody = AnthropicMessageRequestBody(
maxTokens: 8192,
messages: [
AnthropicMessageParam(
content: "What is Apple's stock price?",
role: .user
)
],
model: "claude-haiku-4-5-20251001",
tools: [
.custom(
AnthropicTool(
description: "Call this function when the user wants a stock symbol",
inputSchema: [
"type": "object",
"properties": [
"ticker": [
"type": "string",
"description": "The stock ticker symbol, e.g. AAPL for Apple Inc."
]
],
"required": ["ticker"]
],
name: "get_stock_symbol"
)
)
]
)
do {
let response = try await anthropicService.messageRequest(
body: requestBody,
secondsToWait: 120
)
for content in response.content {
switch content {
case .textBlock(let textBlock):
print("Received text from Claude: \(textBlock.text)")
case .toolUseBlock(let toolUseBlock):
print("Claude wants to call \(toolUseBlock.name) with input: \(toolUseBlock.input)")
default:
continue
}
}
} catch AIProxyError.unsuccessfulRequest(let statusCode, let responseBody) {
print("Received non-200 status code: \(statusCode) with response body: \(responseBody)")
} catch {
print(error)
}
How to make an Anthropic streaming message request with tool calls
import AIProxy
/* Uncomment for BYOK use cases */
// let anthropicService = AIProxy.anthropicDirectService(
// unprotectedAPIKey: "your-anthropic-key"
// )
/* Uncomment for all other production use cases */
// let anthropicService = AIProxy.anthropicService(
// partialKey: "partial-key-from-your-developer-dashboard",
// serviceURL: "service-url-from-your-developer-dashboard"
// )
let requestBody = AnthropicMessageRequestBody(
maxTokens: 8192,
messages: [
AnthropicMessageParam(
content: "What is nvidia's stock price?",
role: .user
)
],
model: "claude-haiku-4-5-20251001",
tools: [
.init(
description: "Call this function when the user wants a stock symbol",
inputSchema: [
"type": "object",
"properties": [
"ticker": [
"type": "string",
"description": "The stock ticker symbol, e.g. AAPL for Apple Inc."
]
],
"required": ["ticker"]
],
name: "get_stock_symbol"
)
]
)
do {
let stream = try await anthropicService.streamingMessageRequest(
body: requestBody,
secondsToWait: 120
)
var toolCallAccumulator = AnthropicToolCallAccumulator()
for try await event in stream {
if let (toolName, toolInput) = try toolCallAccumulator.append(event) {
print("Claude wants to call tool \(toolName) with input \(toolInput)")
}
if case .contentBlockDelta(let contentBlockDelta) = event {
if case .textDelta(let textDelta) = contentBlockDelta.delta {
print("Received a text delta from Anthropic: \(textDelta.text)")
}
}
}
} catch AIProxyError.unsuccessfulRequest(let statusCode, let responseBody) {
print("Received non-200 status code: \(statusCode) with response body: \(responseBody)")
} catch {
print(error)
}
How to make an Anthropic streaming message request with fine-grained tool streaming
Fine-grained streaming can improve tool arrival time. For details, see Anthropic's documentation.
Start with the streaming tool calls snippet above, but set maxTokens to a large value (e.g. 65536) and include the following header:
let stream = try await anthropicService.streamingMessageRequest(
body: requestBody,
secondsToWait: 120,
additionalHeaders: [
"anthropic-beta": "fine-grained-tool-streaming-2025-05-14"
]
)
How to make an Anthropic message request with an image
On macOS, use NSImage(named:) in place of UIImage(named:)
import AIProxy
/* Uncomment for BYOK use cases */
// let anthropicService = AIProxy.anthropicDirectService(
// unprotectedAPIKey: "your-anthropic-key"
// )
/* Uncomment for all other production use cases */
// let anthropicService = AIProxy.anthropicService(
// partialKey: "partial-key-from-your-developer-dashboard",
// serviceURL: "service-url-from-your-developer-dashboard"
// )
guard let image = NSImage(named: "my-image") else {
print("Could not find an image named 'my-image' in your app assets")
return
}
guard let jpegData = AIProxy.encodeImageAsJpeg(image: image, compressionQuality: 0.8) else {
print("Could not convert image to jpeg")
return
}
let imageBlockParam = AnthropicImageBlockParam(
source: .base64(data: jpegData.base64EncodedString(), mediaType: .jpeg),
cacheControl: nil
)
let requestBody = AnthropicMessageRequestBody(
maxTokens: 8192,
messages: [
AnthropicMessageParam(
content: [
.text("Provide a very short description of this image"),
.image(imageBlockParam),
],
role: .user
)
],
model: "claude-haiku-4-5-20251001",
)
do {
let response = try await anthropicService.messageRequest(
body: requestBody,
secondsToWait: 120
)
for case let .textBlock(textBlock) in response.content {
print("Received text from Claude: \(textBlock.text)")
}
} catch AIProxyError.unsuccessfulRequest(let statusCode, let responseBody) {
print("Received non-200 status code: \(statusCode) with response body: \(responseBody)")
} catch {
print(error)
}
How to make an Anthropic message request with a PDF
This snippet includes a pdf mydocument.pdf in the Anthropic request. Adjust the filename to match the pdf included in your Xcode project. The snippet expects the pdf in the app bundle.
import AIProxy
/* Uncomment for BYOK use cases */
// let anthropicService = AIProxy.anthropicDirectService(
// unprotectedAPIKey: "your-anthropic-key"
// )
/* Uncomment for all other production use cases */
// let anthropicService = AIProxy.anthropicService(
// partialKey: "partial-key-from-your-developer-dashboard",
// serviceURL: "service-url-from-your-developer-dashboard"
// )
guard let pdfFileURL = Bundle.main.url(forResource: "mydocument", withExtension: "pdf"),
let pdfData = try? Data(contentsOf: pdfFileURL)
else {
print("Drop mydocument.pdf into your Xcode project first.")
return
}
let documentBlockParam = AnthropicDocumentBlockParam(
source: .base64PDF(AnthropicBase64PDFSource(data: pdfData.base64EncodedString()))
)
let requestBody = AnthropicMessageRequestBody(
maxTokens: 8192,
messages: [
AnthropicMessageParam(
content: [
.text("Provide a very short description of this pdf"),
.document(documentBlockParam),
],
role: .user
)
],
model: "claude-haiku-4-5-20251001"
)
do {
let response = try await anthropicService.messageRequest(
body: requestBody,
secondsToWait: 120
)
for case let .textBlock(textBlock) in response.content {
print("Received text from Claude: \(textBlock.text)")
}
} catch AIProxyError.unsuccessfulRequest(let statusCode, let responseBody) {
print("Received non-200 status code: \(statusCode) with response body: \(responseBody)")
} catch {
print("Could not use Anthropic's buffered pdf support: \(error)")
}
How to make an Anthropic streaming message request with a PDF
This snippet includes a pdf mydocument.pdf in the Anthropic request. Adjust the filename to match the pdf included in your Xcode project. The snippet expects the pdf in the app bundle.
import AIProxy
/* Uncomment for BYOK use cases */
// let anthropicService = AIProxy.anthropicDirectService(
// unprotectedAPIKey: "your-anthropic-key"
// )
/* Uncomment for all other production use cases */
// let anthropicService = AIProxy.anthropicService(
// partialKey: "partial-key-from-your-developer-dashboard",
// serviceURL: "service-url-from-your-developer-dashboard"
// )
guard let pdfFileURL = Bundle.main.url(forResource: "mydocument", withExtension: "pdf"),
let pdfData = try? Data(contentsOf: pdfFileURL)
else {
print("Drop mydocument.pdf into your Xcode project first.")
return
}
let documentBlockParam = AnthropicDocumentBlockParam(
source: .base64PDF(AnthropicBase64PDFSource(data: pdfData.base64EncodedString()))
)
let requestBody = AnthropicMessageRequestBody(
maxTokens: 8192,
messages: [
AnthropicMessageParam(
content: [
.text("Provide a very short description of this pdf"),
.document(documentBlockParam),
],
role: .user
)
],
model: "claude-haiku-4-5-20251001"
)
do {
let stream = try await anthropicService.streamingMessageRequest(
body: requestBody,
secondsToWait: 120
)
for try await case .contentBlockDelta(let contentBlockDelta) in stream {
if case .textDelta(let textDelta) = contentBlockDelta.delta {
print("Received a text delta from Anthropic: \(textDelta.text)")
}
}
} catch AIProxyError.unsuccessfulRequest(let statusCode, let responseBody) {
print("Received non-200 status code: \(statusCode) with response body: \(responseBody)")
} catch {
print("Could not use Anthropic's streaming pdf support: \(error)")
}
How to create an Anthropic request with a cached system prompt
For pricing, see Anthropic's documentation.
let requestBody = AnthropicMessageRequestBody(
maxTokens: 1024,
messages: [ /* snip */ ],
model: "claude-haiku-4-5-20251001",
system: .blocks([
AnthropicSystemTextBlockParam(
text: "This is a very long prompt",
cacheControl: AnthropicCacheControlEphemeral(ttl: .oneHour)
)
])
)
How to make requests to Anthropic on Azure
1. Use the same snippets as above, but add the aiproxy-key-format header to your messageRequest or streamingMessageRequest call:
try await anthropicService.messageRequest(
body: requestBody,
secondsToWait: 120,
additionalHeaders: [
"aiproxy-key-format": "x-api-key: {{key}}"
]
)
2. In the AIProxy dashboard, configure your service to use the base URL of your Azure deployment up to the /v1/messages path component.