Skip to content

[accidentally created pull-request] #215

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Closed
wants to merge 22 commits into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
82 changes: 48 additions & 34 deletions Package.swift
Original file line number Diff line number Diff line change
@@ -1,5 +1,4 @@
// swift-tools-version: 5.7.1
// The swift-tools-version declares the minimum version of Swift required to build this package.
// swift-tools-version:5.10

// Copyright 2023 Google LLC
//
Expand All @@ -18,36 +17,51 @@
import PackageDescription

let package = Package(
name: "generative-ai-swift",
platforms: [
.iOS(.v11),
.macOS(.v10_13),
.macCatalyst(.v13),
],
products: [
.library(
name: "GoogleGenerativeAI",
targets: ["GoogleGenerativeAI"]
),
],
targets: [
.target(
name: "GoogleGenerativeAI",
path: "Sources"
),
.testTarget(
name: "GoogleGenerativeAITests",
dependencies: ["GoogleGenerativeAI"],
path: "Tests",
resources: [
.process("GoogleAITests/CountTokenResponses"),
.process("GoogleAITests/GenerateContentResponses"),
]
),
.testTarget(
name: "CodeSnippetTests",
dependencies: ["GoogleGenerativeAI"],
path: "samples"
),
]
name: "Gemini",
platforms: [
.iOS(.v16),
.macOS(.v13),
.macCatalyst(.v16),
],
products: [
.library(
name: "Gemini",
targets: [
"Gemini",
"GoogleGenerativeAI",
]
),
],
dependencies: [
.package(url: "https://github.com/PreternaturalAI/AI.git", branch: "main"),
],
targets: [
.target(
name: "Gemini",
dependencies: [
"AI"
],
path: "Sources/GoogleAI"
),
.target(
name: "GoogleGenerativeAI",
dependencies: [
"Gemini"
],
path: "Sources/GoogleGenerativeAI"
),
.testTarget(
name: "GeminiTests",
dependencies: [
"Gemini",
"GoogleGenerativeAI",
],
path: "Tests",
resources: [
.process("GoogleAITests/CountTokenResponses"),
.process("GoogleAITests/GenerateContentResponses"),
]
),
],
swiftLanguageVersions: [.v5]
)
19 changes: 18 additions & 1 deletion Sources/GoogleAI/GenerateContentError.swift
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@ import Foundation

/// Errors that occur when generating content from a model.
@available(iOS 15.0, macOS 11.0, macCatalyst 15.0, *)
public enum GenerateContentError: Error {
public enum GenerateContentError: CustomStringConvertible, Error {
/// An error occurred when constructing the prompt. Examine the related error for details.
case promptImageContentError(underlying: ImageConversionError)

Expand All @@ -41,4 +41,21 @@ public enum GenerateContentError: Error {
/// - Important: The API is only available in
/// [specific regions](https://ai.google.dev/available_regions#available_regions).
case unsupportedUserLocation

public var description: String {
switch self {
case .internalError(underlying: let error):
return "An internal error occurred: \(error.localizedDescription)"
case .promptImageContentError(underlying: let underlying):
return "An error occurred when constructing the prompt - Image Content Error: \(underlying.localizedDescription)"
case .promptBlocked(response: let response):
return "A prompt was blocked: \(String(describing: response.promptFeedback?.blockReason.debugDescription ?? response.text?.debugDescription))"
case .responseStoppedEarly(reason: let reason, response: let response):
return "A response didn't fully complete: \(reason.rawValue), \(response.text ?? "")"
case .invalidAPIKey(message: let message):
return "The provided API key is invalid.: \(message)"
case .unsupportedUserLocation:
return "The user's location (region) is not supported by the API."
}
}
}
92 changes: 92 additions & 0 deletions Sources/GoogleAI/Preternatural/AbstractLLM+Gemini.swift
Original file line number Diff line number Diff line change
@@ -0,0 +1,92 @@
//
// Copyright (c) Vatsal Manot
//

import LargeLanguageModels
import Swallow

extension AbstractLLM.ChatMessage {
public init(
_from modelContent: ModelContent
) throws {
var content: PromptLiteral = .empty
let role = try AbstractLLM.ChatRole(_from: try modelContent._role.unwrap())

for part in modelContent.parts {
content.append(PromptLiteral(_from: part))
}

self.init(role: role, content: content)
}
}

extension PromptLiteral {
init(_from part: ModelContent.Part) {
switch part {
case .text(let value):
self.init(stringLiteral: value)
case .data(let mimetype, _):

self.init(stringLiteral: "[Data: \(mimetype)]")
case .fileData(let mimetype, let uri):

self.init(stringLiteral: "[File: \(mimetype) at \(uri)]")
case .functionCall(let functionCall):

self.init(stringLiteral: "Function Call: \(functionCall.name)")
case .functionResponse(let functionResponse):

self.init(stringLiteral: "Function Response: \(functionResponse.name)")
case .executableCode(let executableCode):

self.init(stringLiteral: """
```\(executableCode.language.lowercased())
\(executableCode.code)
```
""")
case .codeExecutionResult(let result):

let status = result.outcome == .ok ? "Success" : "Error"
self.init(stringLiteral: """
Execution Result (\(status)):
```
\(result.output)
```
""")
}
}
}

extension AbstractLLM.ChatRole {
init(_from role: ModelContent._Role) throws {
switch role {
case .systemInstruction:
self = .system
case .model:
self = .assistant // FIXME: Should we create a strategy selection for Gemini.Client to make the choice of converting from "model" to "assistant" explicit? _Is_ the semantic of a 'model' the same as that of an 'assistant' ¯\_(ツ)_/¯
case .user:
self = .user
}
}
}

extension AbstractLLM.ChatCompletion.StopReason {
public init(_from finishReason: FinishReason) throws {
switch finishReason {
case .unknown:
TODO.unimplemented
case .unspecified:
TODO.unimplemented
case .stop:
self.init(type: .endTurn) // FIXME: It could be stop sequence or natural end, we need to improve `StopReason` to accomodate for this ambiguity
case .maxTokens:
self.init(type: .maxTokens)
case .safety:
TODO.unimplemented
case .recitation:
TODO.unimplemented
case .other:
TODO.unimplemented
}
}
}
Loading
Loading