Skip to content

Commit 0436bea

Browse files
committed
[FirebaseAI] Implicit caching support
1 parent a9503fd commit 0436bea

File tree

2 files changed

+12
-0
lines changed

2 files changed

+12
-0
lines changed

FirebaseAI/Sources/GenerateContentResponse.swift

Lines changed: 11 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -23,6 +23,9 @@ public struct GenerateContentResponse: Sendable {
2323
/// The number of tokens in the request prompt.
2424
public let promptTokenCount: Int
2525

26+
/// Number of tokens in the cached part of the prompt (the cached content)
27+
public let cachedContentTokenCount: Int
28+
2629
/// The total number of tokens across the generated response candidates.
2730
public let candidatesTokenCount: Int
2831

@@ -42,6 +45,9 @@ public struct GenerateContentResponse: Sendable {
4245
/// The breakdown, by modality, of how many tokens are consumed by the prompt
4346
public let promptTokensDetails: [ModalityTokenCount]
4447

48+
/// The breakdown, by modality, of how many tokens are consumed by the cachedContent
49+
public let cacheTokensDetails: [ModalityTokenCount]
50+
4551
/// The breakdown, by modality, of how many tokens are consumed by the candidates
4652
public let candidatesTokensDetails: [ModalityTokenCount]
4753
}
@@ -339,22 +345,27 @@ extension GenerateContentResponse: Decodable {
339345
extension GenerateContentResponse.UsageMetadata: Decodable {
340346
enum CodingKeys: CodingKey {
341347
case promptTokenCount
348+
case cacheContentTokenCount
342349
case candidatesTokenCount
343350
case thoughtsTokenCount
344351
case totalTokenCount
345352
case promptTokensDetails
353+
case cacheTokensDetails
346354
case candidatesTokensDetails
347355
}
348356

349357
public init(from decoder: any Decoder) throws {
350358
let container = try decoder.container(keyedBy: CodingKeys.self)
351359
promptTokenCount = try container.decodeIfPresent(Int.self, forKey: .promptTokenCount) ?? 0
360+
cachedContentTokenCount = try container.decodeIfPresent(Int.self, forKey: .cacheContentTokenCount) ?? 0
352361
candidatesTokenCount =
353362
try container.decodeIfPresent(Int.self, forKey: .candidatesTokenCount) ?? 0
354363
thoughtsTokenCount = try container.decodeIfPresent(Int.self, forKey: .thoughtsTokenCount) ?? 0
355364
totalTokenCount = try container.decodeIfPresent(Int.self, forKey: .totalTokenCount) ?? 0
356365
promptTokensDetails =
357366
try container.decodeIfPresent([ModalityTokenCount].self, forKey: .promptTokensDetails) ?? []
367+
cacheTokensDetails =
368+
try container.decodeIfPresent([ModalityTokenCount].self, forKey: .cacheTokensDetails) ?? []
358369
candidatesTokensDetails = try container.decodeIfPresent(
359370
[ModalityTokenCount].self,
360371
forKey: .candidatesTokensDetails

FirebaseAI/Tests/Unit/APITests.swift

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -176,6 +176,7 @@ final class APITests: XCTestCase {
176176
// Usage Metadata
177177
guard let usageMetadata = response.usageMetadata else { fatalError() }
178178
let _: Int = usageMetadata.promptTokenCount
179+
let _: Int = usageMetadata.cachedContentTokenCount
179180
let _: Int = usageMetadata.candidatesTokenCount
180181
let _: Int = usageMetadata.totalTokenCount
181182

0 commit comments

Comments
 (0)