-
Notifications
You must be signed in to change notification settings - Fork 5.8k
Swift: Added example code for text to image generation with Amazon Nova Canvas for Bedrock #7452
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
base: main
Are you sure you want to change the base?
Changes from 6 commits
56b1977
a333a1e
ed3fae0
2f73caf
bb3e1ea
0761bc6
5a10209
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -423,6 +423,14 @@ bedrock-runtime_Converse_MetaLlama: | |
- description: Send a text message to Meta Llama, using Bedrock's Converse API. | ||
snippet_tags: | ||
- javascript.v3.bedrock-runtime.Converse_MetaLlama | ||
Swift: | ||
versions: | ||
- sdk_version: 1 | ||
github: swift/example_code/bedrock-runtime | ||
excerpts: | ||
- description: Send a text message to Meta Llama, using Bedrock's Converse API. | ||
snippet_tags: | ||
- swift.example_code.bedrock-runtime.Converse_MetaLlama | ||
services: | ||
bedrock-runtime: {Converse} | ||
|
||
|
@@ -708,6 +716,14 @@ bedrock-runtime_ConverseStream_MetaLlama: | |
- description: Send a text message to Meta Llama, using Bedrock's Converse API and process the response stream in real-time. | ||
snippet_tags: | ||
- javascript.v3.bedrock-runtime.ConverseStream_MetaLlama | ||
Swift: | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Same as above, please remove. |
||
versions: | ||
- sdk_version: 1 | ||
github: swift/example_code/bedrock-runtime | ||
excerpts: | ||
- description: Send a text message to Meta Llama, using Bedrock's Converse API and process the response stream in real-time. | ||
snippet_tags: | ||
- swift.example_code.bedrock-runtime.ConverseStream_MetaLlama | ||
services: | ||
bedrock-runtime: {ConverseStream} | ||
|
||
|
@@ -1383,6 +1399,14 @@ bedrock-runtime_InvokeModel_AmazonNovaImageGeneration: | |
- description: Create an image with the Amazon Nova Canvas. | ||
snippet_tags: | ||
- python.example_code.bedrock-runtime.InvokeModel_AmazonNovaImageGeneration | ||
Swift: | ||
versions: | ||
- sdk_version: 1 | ||
github: swift/example_code/bedrock-runtime | ||
excerpts: | ||
- description: Create an image with the Amazon Nova Canvas. | ||
monadierickx marked this conversation as resolved.
Show resolved
Hide resolved
|
||
snippet_tags: | ||
- swift.example_code.bedrock-runtime.InvokeModel_AmazonNovaImageGeneration | ||
services: | ||
bedrock-runtime: {InvokeModel} | ||
|
||
|
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,31 @@ | ||
// swift-tools-version: 6.1 | ||
// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. | ||
// SPDX-License-Identifier: Apache-2.0 | ||
// | ||
// The swift-tools-version declares the minimum version of Swift required to build this package. | ||
|
||
import PackageDescription | ||
|
||
let package = Package( | ||
name: "AmazonNovaCanvas", | ||
// Let Xcode know the minimum Apple platforms supported. | ||
platforms: [ | ||
.macOS(.v13), | ||
.iOS(.v15) | ||
], | ||
dependencies: [ | ||
// Dependencies declare other packages that this package depends on. | ||
.package(url: "https://github.com/awslabs/aws-sdk-swift", from: "1.2.61") | ||
], | ||
targets: [ | ||
// Targets are the basic building blocks of a package, defining a module or a test suite. | ||
// Targets can depend on other targets in this package and products from dependencies. | ||
.executableTarget( | ||
name: "InvokeModel", | ||
dependencies: [ | ||
.product(name: "AWSBedrockRuntime", package: "aws-sdk-swift"), | ||
], | ||
path: "Sources" | ||
) | ||
] | ||
) |
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,74 @@ | ||
// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. | ||
// SPDX-License-Identifier: Apache-2.0 | ||
// | ||
// snippet-start:[swift.example_code.bedrock-runtime.InvokeModel_AmazonNovaImageGeneration] | ||
// Use the native inference API to create an image with Amazon Nova Canvas | ||
|
||
import AWSBedrockRuntime | ||
import Foundation | ||
|
||
struct NovaImageOutput: Decodable { | ||
monadierickx marked this conversation as resolved.
Show resolved
Hide resolved
|
||
let images: [Data] | ||
} | ||
|
||
func generateImage(_ textPrompt: String, to path: String) async throws { | ||
// Create a Bedrock Runtime client in the AWS Region you want to use. | ||
let config = | ||
try await BedrockRuntimeClient.BedrockRuntimeClientConfiguration( | ||
region: "us-east-1" | ||
) | ||
|
||
let client = BedrockRuntimeClient(config: config) | ||
|
||
// Set the model ID. | ||
let modelId = "amazon.nova-canvas-v1:0" | ||
|
||
// Format the request payload using the model's native structure. | ||
let input = InvokeModelInput( | ||
accept: "application/json", | ||
body: """ | ||
{ | ||
"textToImageParams": { | ||
"text": "\(textPrompt)" | ||
}, | ||
"taskType": "TEXT_IMAGE", | ||
"imageGenerationConfig": { | ||
"seed": 42, | ||
"quality": "standard", | ||
"width": 512, | ||
"height": 512, | ||
"numberOfImages": 1 | ||
} | ||
} | ||
""".data(using: .utf8), | ||
modelId: modelId | ||
) | ||
|
||
// Invoke the model with the request. | ||
let response = try await client.invokeModel(input: input) | ||
|
||
// Decode the response body. | ||
let titanImage = try JSONDecoder().decode(NovaImageOutput.self, from: response.body!) | ||
|
||
// Extract the image data. | ||
let data = titanImage.images.first | ||
guard let data = data else { | ||
monadierickx marked this conversation as resolved.
Show resolved
Hide resolved
|
||
print("No image data found") | ||
return | ||
} | ||
|
||
// Save the generated image to a local folder. | ||
let fileURL = URL(fileURLWithPath: path) | ||
try data.write(to: fileURL) | ||
print("Image is saved at \(path)") | ||
monadierickx marked this conversation as resolved.
Show resolved
Hide resolved
|
||
} | ||
|
||
// snippet-end:[swift.example_code.bedrock-runtime.InvokeModel_AmazonNovaImageGeneration] | ||
|
||
do { | ||
try await generateImage( | ||
"A tabby cat in a teacup", to: "/Users/monadierickx/Desktop/img/nova_canvas.png" | ||
monadierickx marked this conversation as resolved.
Show resolved
Hide resolved
|
||
) | ||
} catch { | ||
print("An error occurred: \(error)") | ||
} |
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Carried over from #7434. Please remove. |
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,38 @@ | ||
// swift-tools-version: 6.1 | ||
// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. | ||
// SPDX-License-Identifier: Apache-2.0 | ||
// | ||
// The swift-tools-version declares the minimum version of Swift required to build this package. | ||
|
||
import PackageDescription | ||
|
||
let package = Package( | ||
name: "MetaLlamaConverse", | ||
// Let Xcode know the minimum Apple platforms supported. | ||
platforms: [ | ||
.macOS(.v13), | ||
.iOS(.v15) | ||
], | ||
dependencies: [ | ||
// Dependencies declare other packages that this package depends on. | ||
.package(url: "https://github.com/awslabs/aws-sdk-swift", from: "1.2.61") | ||
], | ||
targets: [ | ||
// Targets are the basic building blocks of a package, defining a module or a test suite. | ||
// Targets can depend on other targets in this package and products from dependencies. | ||
.executableTarget( | ||
name: "Converse", | ||
dependencies: [ | ||
.product(name: "AWSBedrockRuntime", package: "aws-sdk-swift"), | ||
], | ||
path: "Sources/Converse" | ||
), | ||
.executableTarget( | ||
name: "ConverseStream", | ||
dependencies: [ | ||
.product(name: "AWSBedrockRuntime", package: "aws-sdk-swift"), | ||
], | ||
path: "Sources/ConverseStream" | ||
) | ||
] | ||
) |
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Carried over from #7434. Please remove. |
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,65 @@ | ||
// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. | ||
// SPDX-License-Identifier: Apache-2.0 | ||
// | ||
// snippet-start:[swift.example_code.bedrock-runtime.Converse_MetaLlama] | ||
// An example demonstrating how to use the Conversation API to send | ||
// a text message to Meta Llama. | ||
|
||
import AWSBedrockRuntime | ||
|
||
func converse(_ textPrompt: String) async throws -> String { | ||
|
||
// Create a Bedrock Runtime client in the AWS Region you want to use. | ||
let config = | ||
try await BedrockRuntimeClient.BedrockRuntimeClientConfiguration( | ||
region: "us-east-1" | ||
) | ||
let client = BedrockRuntimeClient(config: config) | ||
|
||
// Set the model ID. | ||
let modelId = "meta.llama3-8b-instruct-v1:0" | ||
|
||
// Start a conversation with the user message. | ||
let message = BedrockRuntimeClientTypes.Message( | ||
content: [.text(textPrompt)], | ||
role: .user | ||
) | ||
|
||
// Optionally use inference parameters | ||
let inferenceConfig = | ||
BedrockRuntimeClientTypes.InferenceConfiguration( | ||
maxTokens: 512, | ||
stopSequences: ["END"], | ||
temperature: 0.5, | ||
topp: 0.9 | ||
) | ||
|
||
// Create the ConverseInput to send to the model | ||
let input = ConverseInput( | ||
inferenceConfig: inferenceConfig, messages: [message], modelId: modelId) | ||
|
||
// Send the ConverseInput to the model | ||
let response = try await client.converse(input: input) | ||
|
||
// Extract and return the response text. | ||
if case let .message(msg) = response.output { | ||
if case let .text(textResponse) = msg.content![0] { | ||
return textResponse | ||
} else { | ||
return "No text response found in message content" | ||
} | ||
} else { | ||
return "No message found in converse output" | ||
} | ||
} | ||
|
||
// snippet-end:[swift.example_code.bedrock-runtime.Converse_MetaLlama] | ||
|
||
do { | ||
let reply = try await converse( | ||
"Describe the purpose of a 'hello world' program in one line." | ||
) | ||
print(reply) | ||
} catch { | ||
print("An error occurred: \(error)") | ||
} |
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Carried over from #7434. Please remove. |
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,75 @@ | ||
// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. | ||
// SPDX-License-Identifier: Apache-2.0 | ||
// | ||
// snippet-start:[swift.example_code.bedrock-runtime.ConverseStream_MetaLlama] | ||
// An example demonstrating how to use the Conversation API to send a text message | ||
// to Meta Llama and print the response stream. | ||
|
||
import AWSBedrockRuntime | ||
|
||
func printConverseStream(_ textPrompt: String) async throws { | ||
|
||
// Create a Bedrock Runtime client in the AWS Region you want to use. | ||
let config = | ||
try await BedrockRuntimeClient.BedrockRuntimeClientConfiguration( | ||
region: "us-east-1" | ||
) | ||
let client = BedrockRuntimeClient(config: config) | ||
|
||
// Set the model ID. | ||
let modelId = "meta.llama3-8b-instruct-v1:0" | ||
|
||
// Start a conversation with the user message. | ||
let message = BedrockRuntimeClientTypes.Message( | ||
content: [.text(textPrompt)], | ||
role: .user | ||
) | ||
|
||
// Optionally use inference parameters. | ||
let inferenceConfig = | ||
BedrockRuntimeClientTypes.InferenceConfiguration( | ||
maxTokens: 512, | ||
stopSequences: ["END"], | ||
temperature: 0.5, | ||
topp: 0.9 | ||
) | ||
|
||
// Create the ConverseStreamInput to send to the model. | ||
let input = ConverseStreamInput( | ||
inferenceConfig: inferenceConfig, messages: [message], modelId: modelId) | ||
|
||
// Send the ConverseStreamInput to the model. | ||
let response = try await client.converseStream(input: input) | ||
|
||
// Extract the streaming response. | ||
guard let stream = response.stream else { | ||
print("No stream available") | ||
return | ||
} | ||
|
||
// Extract and print the streamed response text in real-time. | ||
for try await event in stream { | ||
switch event { | ||
case .messagestart(_): | ||
print("\nMeta Llama:") | ||
|
||
case .contentblockdelta(let deltaEvent): | ||
if case .text(let text) = deltaEvent.delta { | ||
print(text, terminator: "") | ||
} | ||
|
||
default: | ||
break | ||
} | ||
} | ||
} | ||
|
||
// snippet-end:[swift.example_code.bedrock-runtime.ConverseStream_MetaLlama] | ||
|
||
do { | ||
try await printConverseStream( | ||
"Describe the purpose of a 'hello world' program in two paragraphs." | ||
) | ||
} catch { | ||
print("An error occurred: \(error)") | ||
} |
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
This change appears to be carried over from PR #7434. Could you please remove it to keep PRs isolated? Thanks!
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
I will do this!