Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
@@ -1,3 +1,12 @@
plugins {
id "com.android.application"
// START: FlutterFire Configuration
id 'com.google.gms.google-services'
// END: FlutterFire Configuration
id "kotlin-android"
id "dev.flutter.flutter-gradle-plugin"
}

def localProperties = new Properties()
def localPropertiesFile = rootProject.file('local.properties')
if (localPropertiesFile.exists()) {
Expand All @@ -6,11 +15,6 @@ if (localPropertiesFile.exists()) {
}
}

def flutterRoot = localProperties.getProperty('flutter.sdk')
if (flutterRoot == null) {
throw new GradleException("Flutter SDK not found. Define location with flutter.sdk in the local.properties file.")
}

def flutterVersionCode = localProperties.getProperty('flutter.versionCode')
if (flutterVersionCode == null) {
flutterVersionCode = '1'
Expand All @@ -21,20 +25,14 @@ if (flutterVersionName == null) {
flutterVersionName = '1.0'
}

apply plugin: 'com.android.application'
// START: FlutterFire Configuration
apply plugin: 'com.google.gms.google-services'
// END: FlutterFire Configuration
apply from: "$flutterRoot/packages/flutter_tools/gradle/flutter.gradle"

android {
namespace "com.example.example"

compileSdk 35

defaultConfig {
applicationId "com.example.example"
minSdk 21
minSdk 23
targetSdk 33
versionCode flutterVersionCode.toInteger()
versionName flutterVersionName
Expand All @@ -51,6 +49,9 @@ android {
signingConfig signingConfigs.debug
}
}
kotlinOptions {
jvmTarget = '1.8' // Or '11'
}
}

flutter {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,8 @@
<application
android:label="example"
android:name="${applicationName}"
android:icon="@mipmap/ic_launcher">
android:icon="@mipmap/ic_launcher"
android:usesCleartextTraffic="true">
<activity
android:name=".MainActivity"
android:exported="true"
Expand Down Expand Up @@ -44,4 +45,6 @@
<uses-permission android:name="android.permission.RECORD_AUDIO"/>
<uses-permission android:name="android.permission.WRITE_EXTERNAL_STORAGE"/>
<uses-permission android:name="android.permission.READ_EXTERNAL_STORAGE"/>
<uses-permission android:name="android.permission.INTERNET" />
<uses-permission android:name="android.permission.ACCESS_NETWORK_STATE" />
</manifest>
Original file line number Diff line number Diff line change
@@ -1,17 +1,3 @@
buildscript {
repositories {
google()
mavenCentral()
}

dependencies {
classpath 'com.android.tools.build:gradle:8.1.2'
// START: FlutterFire Configuration
classpath 'com.google.gms:google-services:4.4.0'
// END: FlutterFire Configuration
}
}

allprojects {
repositories {
google()
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -2,4 +2,4 @@ distributionBase=GRADLE_USER_HOME
distributionPath=wrapper/dists
zipStoreBase=GRADLE_USER_HOME
zipStorePath=wrapper/dists
distributionUrl=https\://services.gradle.org/distributions/gradle-7.6.3-all.zip
distributionUrl=https\://services.gradle.org/distributions/gradle-8.5-all.zip
Original file line number Diff line number Diff line change
Expand Up @@ -5,10 +5,9 @@ pluginManagement {
def flutterSdkPath = properties.getProperty("flutter.sdk")
assert flutterSdkPath != null, "flutter.sdk not set in local.properties"
return flutterSdkPath
}
settings.ext.flutterSdkPath = flutterSdkPath()
}()

includeBuild("${settings.ext.flutterSdkPath}/packages/flutter_tools/gradle")
includeBuild("$flutterSdkPath/packages/flutter_tools/gradle")

repositories {
google()
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,9 @@ import 'package:firebase_auth/firebase_auth.dart';
import 'package:firebase_vertexai/firebase_vertexai.dart';
import 'package:flutter/material.dart';

// Import after file is generated through flutterfire_cli.
// import 'package:vertex_ai_example/firebase_options.dart';

import 'pages/chat_page.dart';
import 'pages/audio_page.dart';
import 'pages/function_calling_page.dart';
Expand All @@ -28,11 +31,11 @@ import 'pages/document.dart';
import 'pages/video_page.dart';
import 'pages/bidi_page.dart';

// REQUIRED if you want to run on Web
const FirebaseOptions? options = null;

void main() async {
WidgetsFlutterBinding.ensureInitialized();
// Enable this line instead once have the firebase_options.dart generated and
// imported through flutterfire_cli.
// await Firebase.initializeApp(options: DefaultFirebaseOptions.currentPlatform);
await Firebase.initializeApp();
await FirebaseAuth.instance.signInAnonymously();

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -59,7 +59,7 @@ class _BidiPageState extends State<BidiPage> {
super.initState();

final config = LiveGenerationConfig(
speechConfig: SpeechConfig(voice: Voice.fenrir),
speechConfig: SpeechConfig(voiceName: 'Fenrir'),
responseModalities: [
ResponseModalities.audio,
],
Expand Down Expand Up @@ -328,25 +328,21 @@ class _BidiPageState extends State<BidiPage> {
}
}

Future<void> _handleLiveServerMessage(LiveServerMessage response) async {
if (response is LiveServerContent && response.modelTurn != null) {
await _handleLiveServerContent(response);
}

if (response is LiveServerContent &&
response.turnComplete != null &&
response.turnComplete!) {
await _handleTurnComplete();
}

if (response is LiveServerContent &&
response.interrupted != null &&
response.interrupted!) {
log('Interrupted: $response');
}
Future<void> _handleLiveServerMessage(LiveServerResponse response) async {
final message = response.message;

if (response is LiveServerToolCall && response.functionCalls != null) {
await _handleLiveServerToolCall(response);
if (message is LiveServerContent) {
if (message.modelTurn != null) {
await _handleLiveServerContent(message);
}
if (message.turnComplete != null && message.turnComplete!) {
await _handleTurnComplete();
}
if (message.interrupted != null && message.interrupted!) {
log('Interrupted: $response');
}
} else if (message is LiveServerToolCall && message.functionCalls != null) {
await _handleLiveServerToolCall(message);
}
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -137,6 +137,10 @@ class InMemoryAudioRecorder {
encoder: _encoder,
sampleRate: 16000,
numChannels: 1,
androidConfig: const AndroidRecordConfig(
muteAudio: true,
audioSource: AndroidAudioSource.mic,
),
);
final devs = await _recorder.listInputDevices();
debugPrint(devs.toString());
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -59,6 +59,7 @@
ignoresPersistentStateOnLaunch = "NO"
debugDocumentVersioning = "YES"
debugServiceExtension = "internal"
enableGPUValidationMode = "1"
allowLocationSimulation = "YES">
<BuildableProductRunnable
runnableDebuggingMode = "0">
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -72,11 +72,11 @@ export 'src/live_api.dart'
show
LiveGenerationConfig,
SpeechConfig,
Voice,
ResponseModalities,
LiveServerMessage,
LiveServerContent,
LiveServerToolCall,
LiveServerToolCallCancellation;
LiveServerToolCallCancellation,
LiveServerResponse;
export 'src/live_session.dart' show LiveSession;
export 'src/schema.dart' show Schema, SchemaType;
88 changes: 59 additions & 29 deletions packages/firebase_vertexai/firebase_vertexai/lib/src/live_api.dart
Original file line number Diff line number Diff line change
Expand Up @@ -15,53 +15,64 @@ import 'api.dart';
import 'content.dart';
import 'error.dart';

/// The available voice options for speech synthesis.
enum Voice {
/// Configuration for a prebuilt voice.
///
/// This class allows specifying a voice by its name.
class PrebuiltVoiceConfig {
// ignore: public_member_api_docs
aoede('Aoede'),
const PrebuiltVoiceConfig({this.voiceName});

/// The voice name to use for speech synthesis.
///
/// See https://cloud.google.com/text-to-speech/docs/chirp3-hd for names and
/// sound demos.
final String? voiceName;
// ignore: public_member_api_docs
charon('Charon'),
Map<String, Object?> toJson() =>
{if (voiceName case final voiceName?) 'voice_name': voiceName};
}

/// Configuration for the voice to be used in speech synthesis.
///
/// This class currently supports using a prebuilt voice configuration.
class VoiceConfig {
// ignore: public_member_api_docs
fenrir('Fenrir'),
VoiceConfig({this.prebuiltVoiceConfig});

// ignore: public_member_api_docs
kore('Kore'),

final PrebuiltVoiceConfig? prebuiltVoiceConfig;
// ignore: public_member_api_docs
puck('Puck');

const Voice(this._jsonString);
final String _jsonString;

// ignore: public_member_api_docs
String toJson() => _jsonString;
Map<String, Object?> toJson() => {
if (prebuiltVoiceConfig case final prebuiltVoiceConfig?)
'prebuilt_voice_config': prebuiltVoiceConfig.toJson()
};
}

/// Configures speech synthesis settings.
///
/// Allows specifying the desired voice for speech synthesis.
class SpeechConfig {
/// Creates a [SpeechConfig] instance.
///
/// [voice] (optional): The desired voice for speech synthesis.
SpeechConfig({this.voice});

/// The voice to use for speech synthesis.
final Voice? voice;
/// [voiceName] See https://cloud.google.com/text-to-speech/docs/chirp3-hd
/// for names and sound demos.
SpeechConfig({String? voiceName})
: voiceConfig = voiceName != null
? VoiceConfig(
prebuiltVoiceConfig: PrebuiltVoiceConfig(voiceName: voiceName))
: null;

/// The voice config to use for speech synthesis.
final VoiceConfig? voiceConfig;
// ignore: public_member_api_docs
Map<String, Object?> toJson() => {
if (voice case final voice?)
'voice_config': {
'prebuilt_voice_config': {'voice_name': voice.toJson()}
}
if (voiceConfig case final voiceConfig?)
'voice_config': voiceConfig.toJson()
};
}

/// The available response modalities.
enum ResponseModalities {
/// Unspecified response modality.
unspecified('MODALITY_UNSPECIFIED'),

/// Text response modality.
text('TEXT'),

Expand Down Expand Up @@ -132,6 +143,7 @@ class LiveServerContent implements LiveServerMessage {
/// [interrupted] (optional): Indicates if the generation was interrupted.
LiveServerContent({this.modelTurn, this.turnComplete, this.interrupted});

// TODO(cynthia): Add accessor for media content
/// The content generated by the model.
final Content? modelTurn;

Expand Down Expand Up @@ -176,6 +188,19 @@ class LiveServerToolCallCancellation implements LiveServerMessage {
final List<String>? functionIds;
}

/// A single response chunk received during a live content generation.
///
/// It can contain generated content, function calls to be executed, or
/// instructions to cancel previous function calls, along with the status of the
/// ongoing generation.
class LiveServerResponse {
// ignore: public_member_api_docs
LiveServerResponse({required this.message});

/// The server message generated by the live model.
final LiveServerMessage message;
}

/// Represents realtime input from the client in a live stream.
class LiveClientRealtimeInput {
/// Creates a [LiveClientRealtimeInput] instance.
Expand Down Expand Up @@ -237,7 +262,7 @@ class LiveClientToolResponse {
};
}

/// Parses a JSON object received from the live server into a [LiveServerMessage].
/// Parses a JSON object received from the live server into a [LiveServerResponse].
///
/// This function handles different types of server messages, including:
/// - Error messages, which result in a [VertexAIException] being thrown.
Expand Down Expand Up @@ -275,8 +300,13 @@ class LiveClientToolResponse {
/// - [jsonObject]: The JSON object received from the live server.
///
/// Returns:
/// - A [LiveServerMessage] object representing the parsed message.
LiveServerMessage parseServerMessage(Object jsonObject) {
/// - A [LiveServerResponse] object representing the parsed message.
LiveServerResponse parseServerResponse(Object jsonObject) {
LiveServerMessage message = _parseServerMessage(jsonObject);
return LiveServerResponse(message: message);
}

LiveServerMessage _parseServerMessage(Object jsonObject) {
if (jsonObject case {'error': final Object error}) {
throw parseError(error);
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,7 @@ class LiveSession {
var jsonString = utf8.decode(message);
var response = json.decode(jsonString);

_messageController.add(parseServerMessage(response));
_messageController.add(parseServerResponse(response));
} catch (e) {
_messageController.addError(e);
}
Expand All @@ -45,7 +45,7 @@ class LiveSession {
);
}
final WebSocketChannel _ws;
final _messageController = StreamController<LiveServerMessage>.broadcast();
final _messageController = StreamController<LiveServerResponse>.broadcast();
late StreamSubscription _wsSubscription;

/// Sends content to the server.
Expand Down Expand Up @@ -107,10 +107,10 @@ class LiveSession {

/// Receives messages from the server.
///
/// Returns a [Stream] of [LiveServerMessage] objects representing the
/// Returns a [Stream] of [LiveServerResponse] objects representing the
/// messages received from the server. The stream will stops once the server
/// sends turn complete message.
Stream<LiveServerMessage> receive() async* {
Stream<LiveServerResponse> receive() async* {
_checkWsStatus();

await for (final result in _messageController.stream) {
Expand Down
Loading
Loading