Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
31 commits
Select commit Hold shift + click to select a range
216a06d
[AI] Server Prompt Templates
paulb777 Oct 3, 2025
405d5fe
copyrights
paulb777 Oct 3, 2025
c3c03be
Checkpoint after trying to add imagen implementation
paulb777 Oct 7, 2025
953b3f0
end to end testGenerateContentWithText passes
paulb777 Oct 9, 2025
daf7b4e
FirebaseAI/Tests/Unit/TemplateGenerativeModelTests.swift
paulb777 Oct 9, 2025
29c79b1
CI fixes
paulb777 Oct 10, 2025
6aa13e0
Streaming APIs
paulb777 Oct 10, 2025
e75d7e9
chat history refactor
paulb777 Oct 10, 2025
6cc6c9d
Re-sort tests
paulb777 Oct 10, 2025
ac996fd
Fix TemplateImagenModel to encode variables as inputs
google-labs-jules[bot] Oct 10, 2025
7ba2c28
Fix: Correct URL construction for image generation requests
google-labs-jules[bot] Oct 10, 2025
dd66187
checkpoint
paulb777 Oct 12, 2025
b3c28d1
Imagen generate test works
paulb777 Oct 12, 2025
705d0c2
fix template imagen response return type
paulb777 Oct 12, 2025
17fd0b8
existing tests passing
paulb777 Oct 12, 2025
c4068e9
build fix
paulb777 Oct 12, 2025
7af117c
style
paulb777 Oct 12, 2025
32bc581
Add streaming integration tests
paulb777 Oct 13, 2025
1d162dc
testChatStream
paulb777 Oct 13, 2025
d7beef0
unit tests run
paulb777 Oct 13, 2025
fd8f76a
streaming unit tests
paulb777 Oct 13, 2025
a49dee1
existing mock files for template unit tests
paulb777 Oct 13, 2025
3f49b09
streamline integration tests
paulb777 Oct 13, 2025
3f3607a
fixes
paulb777 Oct 13, 2025
64df672
Templates do not have an implicit .prompt suffix and only global loca…
paulb777 Oct 14, 2025
9f91c21
fixes
paulb777 Oct 14, 2025
fa4a3f4
self review, including file/naming updates
paulb777 Oct 14, 2025
7bd5d2c
Update Integration test xcodeproj after rebase
paulb777 Oct 16, 2025
eeb9e66
module rename updates after rebase for unit tests
paulb777 Oct 16, 2025
1840566
[Firebase AI] Refactor SPT integration tests to Swift Testing (#15426)
andrewheard Oct 17, 2025
67d40eb
Continue migration from template templates to project templates
paulb777 Oct 17, 2025
File filter

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
79 changes: 9 additions & 70 deletions FirebaseAI/Sources/Chat.swift
Original file line number Diff line number Diff line change
Expand Up @@ -19,35 +19,21 @@ import Foundation
@available(iOS 15.0, macOS 12.0, macCatalyst 15.0, tvOS 15.0, watchOS 8.0, *)
public final class Chat: Sendable {
private let model: GenerativeModel
private let _history: History

/// Initializes a new chat representing a 1:1 conversation between model and user.
init(model: GenerativeModel, history: [ModelContent]) {
self.model = model
self.history = history
_history = History(history: history)
}

private let historyLock = NSLock()
private nonisolated(unsafe) var _history: [ModelContent] = []
/// The previous content from the chat that has been successfully sent and received from the
/// model. This will be provided to the model for each message sent as context for the discussion.
public var history: [ModelContent] {
get {
historyLock.withLock { _history }
return _history.history
}
set {
historyLock.withLock { _history = newValue }
}
}

private func appendHistory(contentsOf: [ModelContent]) {
historyLock.withLock {
_history.append(contentsOf: contentsOf)
}
}

private func appendHistory(_ newElement: ModelContent) {
historyLock.withLock {
_history.append(newElement)
_history.history = newValue
}
}

Expand Down Expand Up @@ -87,8 +73,8 @@ public final class Chat: Sendable {
let toAdd = ModelContent(role: "model", parts: reply.parts)

// Append the request and successful result to history, then return the value.
appendHistory(contentsOf: newContent)
appendHistory(toAdd)
_history.append(contentsOf: newContent)
_history.append(toAdd)
return result
}

Expand Down Expand Up @@ -136,63 +122,16 @@ public final class Chat: Sendable {
}

// Save the request.
appendHistory(contentsOf: newContent)
_history.append(contentsOf: newContent)

// Aggregate the content to add it to the history before we finish.
let aggregated = self.aggregatedChunks(aggregatedContent)
self.appendHistory(aggregated)
let aggregated = self._history.aggregatedChunks(aggregatedContent)
self._history.append(aggregated)
continuation.finish()
}
}
}

private func aggregatedChunks(_ chunks: [ModelContent]) -> ModelContent {
var parts: [InternalPart] = []
var combinedText = ""
var combinedThoughts = ""

func flush() {
if !combinedThoughts.isEmpty {
parts.append(InternalPart(.text(combinedThoughts), isThought: true, thoughtSignature: nil))
combinedThoughts = ""
}
if !combinedText.isEmpty {
parts.append(InternalPart(.text(combinedText), isThought: nil, thoughtSignature: nil))
combinedText = ""
}
}

// Loop through all the parts, aggregating the text.
for part in chunks.flatMap({ $0.internalParts }) {
// Only text parts may be combined.
if case let .text(text) = part.data, part.thoughtSignature == nil {
// Thought summaries must not be combined with regular text.
if part.isThought ?? false {
// If we were combining regular text, flush it before handling "thoughts".
if !combinedText.isEmpty {
flush()
}
combinedThoughts += text
} else {
// If we were combining "thoughts", flush it before handling regular text.
if !combinedThoughts.isEmpty {
flush()
}
combinedText += text
}
} else {
// This is a non-combinable part (not text), flush any pending text.
flush()
parts.append(part)
}
}

// Flush any remaining text.
flush()

return ModelContent(role: "model", parts: parts)
}

/// Populates the `role` field with `user` if it doesn't exist. Required in chat sessions.
private func populateContentRole(_ content: ModelContent) -> ModelContent {
if content.role != nil {
Expand Down
22 changes: 22 additions & 0 deletions FirebaseAI/Sources/FirebaseAI.swift
Original file line number Diff line number Diff line change
Expand Up @@ -135,6 +135,28 @@ public final class FirebaseAI: Sendable {
)
}

/// Initializes a new `TemplateGenerativeModel`.
///
/// - Returns: A new `TemplateGenerativeModel` instance.
public func templateGenerativeModel() -> TemplateGenerativeModel {
return TemplateGenerativeModel(
generativeAIService: GenerativeAIService(firebaseInfo: firebaseInfo,
urlSession: GenAIURLSession.default),
apiConfig: apiConfig
)
}

/// Initializes a new `TemplateImagenModel`.
///
/// - Returns: A new `TemplateImagenModel` instance.
public func templateImagenModel() -> TemplateImagenModel {
return TemplateImagenModel(
generativeAIService: GenerativeAIService(firebaseInfo: firebaseInfo,
urlSession: GenAIURLSession.default),
apiConfig: apiConfig
)
}

/// **[Public Preview]** Initializes a ``LiveGenerativeModel`` with the given parameters.
///
/// - Note: Refer to [the Firebase docs on the Live
Expand Down
9 changes: 0 additions & 9 deletions FirebaseAI/Sources/GenerateContentRequest.swift
Original file line number Diff line number Diff line change
Expand Up @@ -60,15 +60,6 @@ extension GenerateContentRequest: Encodable {
}
}

@available(iOS 15.0, macOS 12.0, macCatalyst 15.0, tvOS 15.0, watchOS 8.0, *)
extension GenerateContentRequest {
enum APIMethod: String {
case generateContent
case streamGenerateContent
case countTokens
}
}

@available(iOS 15.0, macOS 12.0, macCatalyst 15.0, tvOS 15.0, watchOS 8.0, *)
extension GenerateContentRequest: GenerativeAIRequest {
typealias Response = GenerateContentResponse
Expand Down
2 changes: 1 addition & 1 deletion FirebaseAI/Sources/GenerativeAIService.swift
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@ struct GenerativeAIService {
/// The Firebase SDK version in the format `fire/<version>`.
static let firebaseVersionTag = "fire/\(FirebaseVersion())"

private let firebaseInfo: FirebaseInfo
let firebaseInfo: FirebaseInfo

private let urlSession: URLSession

Expand Down
94 changes: 94 additions & 0 deletions FirebaseAI/Sources/History.swift
Original file line number Diff line number Diff line change
@@ -0,0 +1,94 @@
// Copyright 2025 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.

import Foundation

@available(iOS 15.0, macOS 12.0, macCatalyst 15.0, tvOS 15.0, watchOS 8.0, *)
final class History: Sendable {
private let historyLock = NSLock()
private nonisolated(unsafe) var _history: [ModelContent] = []
/// The previous content from the chat that has been successfully sent and received from the
/// model. This will be provided to the model for each message sent as context for the discussion.
public var history: [ModelContent] {
get {
historyLock.withLock { _history }
}
set {
historyLock.withLock { _history = newValue }
}
}

init(history: [ModelContent]) {
self.history = history
}

func append(contentsOf: [ModelContent]) {
historyLock.withLock {
_history.append(contentsOf: contentsOf)
}
}

func append(_ newElement: ModelContent) {
historyLock.withLock {
_history.append(newElement)
}
}

func aggregatedChunks(_ chunks: [ModelContent]) -> ModelContent {
var parts: [InternalPart] = []
var combinedText = ""
var combinedThoughts = ""

func flush() {
if !combinedThoughts.isEmpty {
parts.append(InternalPart(.text(combinedThoughts), isThought: true, thoughtSignature: nil))
combinedThoughts = ""
}
if !combinedText.isEmpty {
parts.append(InternalPart(.text(combinedText), isThought: nil, thoughtSignature: nil))
combinedText = ""
}
}

// Loop through all the parts, aggregating the text.
for part in chunks.flatMap({ $0.internalParts }) {
// Only text parts may be combined.
if case let .text(text) = part.data, part.thoughtSignature == nil {
// Thought summaries must not be combined with regular text.
if part.isThought ?? false {
// If we were combining regular text, flush it before handling "thoughts".
if !combinedText.isEmpty {
flush()
}
combinedThoughts += text
} else {
// If we were combining "thoughts", flush it before handling regular text.
if !combinedThoughts.isEmpty {
flush()
}
combinedText += text
}
} else {
// This is a non-combinable part (not text), flush any pending text.
flush()
parts.append(part)
}
}

// Flush any remaining text.
flush()

return ModelContent(role: "model", parts: parts)
}
}
109 changes: 109 additions & 0 deletions FirebaseAI/Sources/TemplateChatSession.swift
Original file line number Diff line number Diff line change
@@ -0,0 +1,109 @@
// Copyright 2025 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.

import Foundation

/// A chat session that allows for conversation with a model.
@available(iOS 15.0, macOS 12.0, macCatalyst 15.0, tvOS 15.0, watchOS 8.0, *)
public final class TemplateChatSession: Sendable {
private let model: TemplateGenerativeModel
private let template: String
private let _history: History

init(model: TemplateGenerativeModel, template: String, history: [ModelContent]) {
self.model = model
self.template = template
_history = History(history: history)
}

public var history: [ModelContent] {
get {
return _history.history
}
set {
_history.history = newValue
}
}

/// Sends a message to the model and returns the response.
public func sendMessage(_ message: any PartsRepresentable,
variables: [String: Any],
options: RequestOptions = RequestOptions()) async throws
-> GenerateContentResponse {
let templateVariables = try variables.mapValues { try TemplateVariable(value: $0) }
let newContent = populateContentRole(ModelContent(parts: message.partsValue))
let response = try await model.generateContentWithHistory(
history: _history.history + [newContent],
template: template,
variables: templateVariables,
options: options
)
_history.append(newContent)
if let modelResponse = response.candidates.first {
_history.append(modelResponse.content)
}
return response
}

public func sendMessageStream(_ message: any PartsRepresentable,
variables: [String: Any],
options: RequestOptions = RequestOptions()) throws
-> AsyncThrowingStream<GenerateContentResponse, Error> {
let templateVariables = try variables.mapValues { try TemplateVariable(value: $0) }
let newContent = populateContentRole(ModelContent(parts: message.partsValue))
let stream = try model.generateContentStreamWithHistory(
history: _history.history + [newContent],
template: template,
variables: templateVariables,
options: options
)
return AsyncThrowingStream { continuation in
Task {
var aggregatedContent: [ModelContent] = []

do {
for try await chunk in stream {
// Capture any content that's streaming. This should be populated if there's no error.
if let chunkContent = chunk.candidates.first?.content {
aggregatedContent.append(chunkContent)
}

// Pass along the chunk.
continuation.yield(chunk)
}
} catch {
// Rethrow the error that the underlying stream threw. Don't add anything to history.
continuation.finish(throwing: error)
return
}

// Save the request.
_history.append(newContent)

// Aggregate the content to add it to the history before we finish.
let aggregated = _history.aggregatedChunks(aggregatedContent)
_history.append(aggregated)
continuation.finish()
}
}
}

private func populateContentRole(_ content: ModelContent) -> ModelContent {
if content.role != nil {
return content
} else {
return ModelContent(role: "user", parts: content.parts)
}
}
}
Loading
Loading