Skip to content
Merged
Prev Previous commit
Next Next commit
Clean up before review
  • Loading branch information
cynthiajoan committed May 30, 2025
commit 340db1f92d4faef76eda9e827f46fd7f33e555ca
78 changes: 27 additions & 51 deletions packages/firebase_ai/firebase_ai/example/lib/pages/bidi_page.dart
Original file line number Diff line number Diff line change
Expand Up @@ -11,15 +11,15 @@
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
import 'dart:typed_data';
import 'dart:async';
import 'dart:developer';
import 'dart:developer' as developer;

import 'package:flutter/material.dart';
import 'package:firebase_ai/firebase_ai.dart';

import '../utils/audio_input.dart';
import '../utils/audio_output.dart';
import '../widgets/message_widget.dart';
import '../utils/audio_player.dart';
import '../utils/audio_recorder.dart';

class BidiPage extends StatefulWidget {
const BidiPage({super.key, required this.title, required this.model});
Expand Down Expand Up @@ -48,11 +48,9 @@ class _BidiPageState extends State<BidiPage> {
bool _recording = false;
late LiveGenerativeModel _liveModel;
late LiveSession _session;
final _audioManager = AudioStreamManager();
final _audioRecorder = InMemoryAudioRecorder();
var _chunkBuilder = BytesBuilder();
var _audioIndex = 0;
StreamController<bool> _stopController = StreamController<bool>();
final AudioOutput audioOutput = AudioOutput();
final AudioInput audioInput = AudioInput();

@override
void initState() {
Expand All @@ -65,13 +63,20 @@ class _BidiPageState extends State<BidiPage> {
],
);

// ignore: deprecated_member_use
_liveModel = FirebaseAI.vertexAI().liveGenerativeModel(
model: 'gemini-2.0-flash-exp',
liveGenerationConfig: config,
tools: [
Tool.functionDeclarations([lightControlTool]),
],
);
initAudioOutput();
}

Future<void> initAudioOutput() async {
Comment thread
cynthiajoan marked this conversation as resolved.
Outdated
await audioOutput.init();
await audioInput.init();
}

void _scrollDown() {
Expand All @@ -89,13 +94,7 @@ class _BidiPageState extends State<BidiPage> {
@override
void dispose() {
if (_sessionOpening) {
_audioManager.stopAudioPlayer();
_audioManager.disposeAudioPlayer();

_audioRecorder.stopRecording();

_stopController.close();

_sessionOpening = false;
_session.close();
}
Expand Down Expand Up @@ -243,8 +242,6 @@ class _BidiPageState extends State<BidiPage> {
await _stopController.close();

await _session.close();
await _audioManager.stopAudioPlayer();
await _audioManager.disposeAudioPlayer();
_sessionOpening = false;
}

Expand All @@ -258,21 +255,25 @@ class _BidiPageState extends State<BidiPage> {
_recording = true;
});
try {
await _audioRecorder.checkPermission();
final audioRecordStream = _audioRecorder.startRecordingStream();
var inputStream = await audioInput.startRecordingStream();
await audioOutput.playStream();
// Map the Uint8List stream to InlineDataPart stream
final mediaChunkStream = audioRecordStream.map((data) {
return InlineDataPart('audio/pcm', data);
});
await _session.sendMediaStream(mediaChunkStream);
if (inputStream != null) {
Stream<InlineDataPart> inlineDataStream = inputStream.map((data) {
Comment thread
cynthiajoan marked this conversation as resolved.
Outdated
return InlineDataPart('audio/pcm', data);
});

await _session.sendMediaStream(inlineDataStream);
}
} catch (e) {
print(e);
Comment thread
cynthiajoan marked this conversation as resolved.
Outdated
_showError(e.toString());
}
}

Future<void> _stopRecording() async {
try {
await _audioRecorder.stopRecording();
await audioInput.stopRecording();
} catch (e) {
_showError(e.toString());
}
Expand Down Expand Up @@ -335,11 +336,8 @@ class _BidiPageState extends State<BidiPage> {
if (message.modelTurn != null) {
await _handleLiveServerContent(message);
}
if (message.turnComplete != null && message.turnComplete!) {
await _handleTurnComplete();
}
if (message.interrupted != null && message.interrupted!) {
log('Interrupted: $response');
developer.log('Interrupted: $response');
}
} else if (message is LiveServerToolCall && message.functionCalls != null) {
await _handleLiveServerToolCall(message);
Expand All @@ -355,7 +353,7 @@ class _BidiPageState extends State<BidiPage> {
} else if (part is InlineDataPart) {
await _handleInlineDataPart(part);
} else {
log('receive part with type ${part.runtimeType}');
developer.log('receive part with type ${part.runtimeType}');
}
}
}
Expand All @@ -376,29 +374,7 @@ class _BidiPageState extends State<BidiPage> {

Future<void> _handleInlineDataPart(InlineDataPart part) async {
if (part.mimeType.startsWith('audio')) {
_chunkBuilder.add(part.bytes);
_audioIndex++;
if (_audioIndex == 15) {
Uint8List chunk = await audioChunkWithHeader(
_chunkBuilder.toBytes(),
24000,
);
_audioManager.addAudio(chunk);
_chunkBuilder.clear();
_audioIndex = 0;
}
}
}

Future<void> _handleTurnComplete() async {
if (_chunkBuilder.isNotEmpty) {
Uint8List chunk = await audioChunkWithHeader(
_chunkBuilder.toBytes(),
24000,
);
_audioManager.addAudio(chunk);
_audioIndex = 0;
_chunkBuilder.clear();
audioOutput.addAudioStream(part.bytes);
}
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -170,23 +170,21 @@ class _FunctionCallingPageState extends State<FunctionCallingPage> {
final functionCalls = response.functionCalls.toList();
// When the model response with a function call, invoke the function.
if (functionCalls.isNotEmpty) {
final functionCall = functionCalls.first;
if (functionCall.name == 'fetchWeather') {
Map<String, dynamic> location =
functionCall.args['location']! as Map<String, dynamic>;
var date = functionCall.args['date']! as String;
var city = location['city'] as String;
var state = location['state'] as String;
final functionResult = await fetchWeather(Location(city, state), date);
// Send the response to the model so that it can use the result to
// generate text for the user.
response = await functionCallChat.sendMessage(
Content.functionResponse(functionCall.name, functionResult),
);
} else {
throw UnimplementedError(
'Function not declared to the model: ${functionCall.name}',
);
for (final functionCall in functionCalls) {
Comment thread
cynthiajoan marked this conversation as resolved.
Outdated
if (functionCall.name == 'fetchWeather') {
Map<String, dynamic> location =
functionCall.args['location']! as Map<String, dynamic>;
var date = functionCall.args['date']! as String;
var city = location['city'] as String;
var state = location['state'] as String;
final functionResult =
await fetchWeather(Location(city, state), date);
// Send the response to the model so that it can use the result to
// generate text for the user.
response = await functionCallChat.sendMessage(
Content.functionResponse(functionCall.name, functionResult),
);
}
}
}
// When the model responds with non-null text content, print it.
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,95 @@
// Copyright 2025 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.

import 'package:flutter/material.dart';
import 'package:record/record.dart';
import 'dart:typed_data';

class AudioInput extends ChangeNotifier {
final _recorder = AudioRecorder();
final AudioEncoder _encoder = AudioEncoder.pcm16bits;
bool isRecording = false;
bool isPaused = false;
Stream<Uint8List>? audioStream;

Future<void> init() async {
await checkPermission();
}

@override
void dispose() {
_recorder.dispose();
super.dispose();
}

Future<void> checkPermission() async {
Comment thread
cynthiajoan marked this conversation as resolved.
Outdated
final hasPermission = await _recorder.hasPermission();
if (!hasPermission) {
throw MicrophonePermissionDeniedException(
'App does not have mic permissions',
);
}
}

Future<Stream<Uint8List>?> startRecordingStream() async {
var recordConfig = RecordConfig(
encoder: _encoder,
sampleRate: 24000,
numChannels: 1,
echoCancel: true,
noiseSuppress: true,
androidConfig: const AndroidRecordConfig(
audioSource: AndroidAudioSource.voiceCommunication,
),
iosConfig: const IosRecordConfig(categoryOptions: []),
);
await _recorder.listInputDevices();
audioStream = await _recorder.startStream(recordConfig);
isRecording = true;
notifyListeners();
return audioStream;
}

Future<void> stopRecording() async {
await _recorder.stop();
isRecording = false;
notifyListeners();
}

Future<void> togglePause() async {
if (isPaused) {
await _recorder.resume();
isPaused = false;
} else {
await _recorder.pause();
isPaused = true;
}
notifyListeners();
return;
}
}

/// An exception thrown when microphone permission is denied or not granted.
class MicrophonePermissionDeniedException implements Exception {
/// The optional message associated with the permission denial.
final String? message;

/// Creates a new [MicrophonePermissionDeniedException] with an optional [message].
MicrophonePermissionDeniedException([this.message]);

@override
String toString() {
return 'MicrophonePermissionDeniedException: $message';
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,66 @@
// Copyright 2025 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.

import 'dart:typed_data';

import 'package:flutter_soloud/flutter_soloud.dart';

class AudioOutput {
AudioSource? stream; // Start playback
Comment thread
cynthiajoan marked this conversation as resolved.
Outdated
SoundHandle? handle;

Future<void> init() async {
/// Initialize the player.
Comment thread
cynthiajoan marked this conversation as resolved.
Outdated
await SoLoud.instance.init(sampleRate: 24000, channels: Channels.mono);
await setupNewStream();
}

Future<void> setupNewStream() async {
if (SoLoud.instance.isInitialized) {
// Stop and clear any previous playback handle if it's still valid
await stopStream(); // Ensure previous sound is stopped

stream = SoLoud.instance.setBufferStream(
maxBufferSizeBytes:
1024 * 1024 * 10, // 10MB of max buffer (not allocated)
bufferingType: BufferingType.released,
bufferingTimeNeeds: 0,
onBuffering: (isBuffering, handle, time) {},
);
// Reset handle to null until the stream is played again
handle = null;
}
}

Future<AudioSource?> playStream() async {
handle = await SoLoud.instance.play(stream!);
return stream;
}

Future<void> stopStream() async {
if (stream != null &&
handle != null &&
SoLoud.instance.getIsValidVoiceHandle(handle!)) {
SoLoud.instance.setDataIsEnded(stream!);
await SoLoud.instance.stop(handle!);

// Clear old stream, set up new session for next time.
await setupNewStream();
}
}

void addAudioStream(Uint8List audioChunk) {
SoLoud.instance.addAudioDataStream(stream!, audioChunk);
}
}
Loading