Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
8 changes: 8 additions & 0 deletions melos.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -290,6 +290,10 @@ scripts:
--ignore "**/generated/**" \
--ignore "**/flutter/generated_plugin_registrant.h" \
--ignore "**/flutter/generated_plugin_registrant.cc" \
--ignore "**/android/app/build.gradle.kts" \
--ignore "**/android/build.gradle.kts" \
--ignore "**/android/settings.gradle.kts" \
--ignore "**/RunnerTests/RunnerTests.swift" \
.
description: Add a license header to all necessary files.

Expand Down Expand Up @@ -326,6 +330,10 @@ scripts:
--ignore "**/generated/**" \
--ignore "**/flutter/generated_plugin_registrant.h" \
--ignore "**/flutter/generated_plugin_registrant.cc" \
--ignore "**/android/app/build.gradle.kts" \
--ignore "**/android/build.gradle.kts" \
--ignore "**/android/settings.gradle.kts" \
--ignore "**/RunnerTests/RunnerTests.swift" \
.
description: Add a license header to all necessary files.

Expand Down
82 changes: 29 additions & 53 deletions packages/firebase_ai/firebase_ai/example/lib/pages/bidi_page.dart
Original file line number Diff line number Diff line change
Expand Up @@ -11,15 +11,15 @@
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
import 'dart:typed_data';
import 'dart:async';
import 'dart:developer';
import 'dart:developer' as developer;

import 'package:flutter/material.dart';
import 'package:firebase_ai/firebase_ai.dart';

import '../utils/audio_input.dart';
import '../utils/audio_output.dart';
import '../widgets/message_widget.dart';
import '../utils/audio_player.dart';
import '../utils/audio_recorder.dart';

class BidiPage extends StatefulWidget {
const BidiPage({super.key, required this.title, required this.model});
Expand Down Expand Up @@ -48,11 +48,9 @@ class _BidiPageState extends State<BidiPage> {
bool _recording = false;
late LiveGenerativeModel _liveModel;
late LiveSession _session;
final _audioManager = AudioStreamManager();
final _audioRecorder = InMemoryAudioRecorder();
var _chunkBuilder = BytesBuilder();
var _audioIndex = 0;
StreamController<bool> _stopController = StreamController<bool>();
final AudioOutput _audioOutput = AudioOutput();
final AudioInput _audioInput = AudioInput();

@override
void initState() {
Expand All @@ -65,13 +63,20 @@ class _BidiPageState extends State<BidiPage> {
],
);

// ignore: deprecated_member_use
_liveModel = FirebaseAI.vertexAI().liveGenerativeModel(
model: 'gemini-2.0-flash-exp',
liveGenerationConfig: config,
tools: [
Tool.functionDeclarations([lightControlTool]),
],
);
_initAudio();
}

Future<void> _initAudio() async {
await _audioOutput.init();
await _audioInput.init();
}

void _scrollDown() {
Expand All @@ -89,13 +94,7 @@ class _BidiPageState extends State<BidiPage> {
@override
void dispose() {
if (_sessionOpening) {
_audioManager.stopAudioPlayer();
_audioManager.disposeAudioPlayer();

_audioRecorder.stopRecording();

_stopController.close();

_sessionOpening = false;
_session.close();
}
Expand Down Expand Up @@ -234,7 +233,7 @@ class _BidiPageState extends State<BidiPage> {
_sessionOpening = true;
_stopController = StreamController<bool>();
unawaited(
processMessagesContinuously(
_processMessagesContinuously(
stopSignal: _stopController,
),
);
Expand All @@ -243,8 +242,6 @@ class _BidiPageState extends State<BidiPage> {
await _stopController.close();

await _session.close();
await _audioManager.stopAudioPlayer();
await _audioManager.disposeAudioPlayer();
_sessionOpening = false;
}

Expand All @@ -258,21 +255,25 @@ class _BidiPageState extends State<BidiPage> {
_recording = true;
});
try {
await _audioRecorder.checkPermission();
final audioRecordStream = _audioRecorder.startRecordingStream();
var inputStream = await _audioInput.startRecordingStream();
await _audioOutput.playStream();
// Map the Uint8List stream to InlineDataPart stream
final mediaChunkStream = audioRecordStream.map((data) {
return InlineDataPart('audio/pcm', data);
});
await _session.sendMediaStream(mediaChunkStream);
if (inputStream != null) {
final inlineDataStream = inputStream.map((data) {
return InlineDataPart('audio/pcm', data);
});

await _session.sendMediaStream(inlineDataStream);
}
} catch (e) {
developer.log(e.toString());
_showError(e.toString());
}
}

Future<void> _stopRecording() async {
try {
await _audioRecorder.stopRecording();
await _audioInput.stopRecording();
} catch (e) {
_showError(e.toString());
}
Expand All @@ -298,7 +299,7 @@ class _BidiPageState extends State<BidiPage> {
});
}

Future<void> processMessagesContinuously({
Future<void> _processMessagesContinuously({
required StreamController<bool> stopSignal,
}) async {
bool shouldContinue = true;
Expand Down Expand Up @@ -335,11 +336,8 @@ class _BidiPageState extends State<BidiPage> {
if (message.modelTurn != null) {
await _handleLiveServerContent(message);
}
if (message.turnComplete != null && message.turnComplete!) {
await _handleTurnComplete();
}
if (message.interrupted != null && message.interrupted!) {
log('Interrupted: $response');
developer.log('Interrupted: $response');
}
} else if (message is LiveServerToolCall && message.functionCalls != null) {
await _handleLiveServerToolCall(message);
Expand All @@ -355,7 +353,7 @@ class _BidiPageState extends State<BidiPage> {
} else if (part is InlineDataPart) {
await _handleInlineDataPart(part);
} else {
log('receive part with type ${part.runtimeType}');
developer.log('receive part with type ${part.runtimeType}');
}
}
}
Expand All @@ -376,29 +374,7 @@ class _BidiPageState extends State<BidiPage> {

Future<void> _handleInlineDataPart(InlineDataPart part) async {
if (part.mimeType.startsWith('audio')) {
_chunkBuilder.add(part.bytes);
_audioIndex++;
if (_audioIndex == 15) {
Uint8List chunk = await audioChunkWithHeader(
_chunkBuilder.toBytes(),
24000,
);
_audioManager.addAudio(chunk);
_chunkBuilder.clear();
_audioIndex = 0;
}
}
}

Future<void> _handleTurnComplete() async {
if (_chunkBuilder.isNotEmpty) {
Uint8List chunk = await audioChunkWithHeader(
_chunkBuilder.toBytes(),
24000,
);
_audioManager.addAudio(chunk);
_audioIndex = 0;
_chunkBuilder.clear();
_audioOutput.addAudioStream(part.bytes);
}
}

Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,95 @@
// Copyright 2025 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.

import 'package:flutter/material.dart';
import 'package:record/record.dart';
import 'dart:typed_data';

class AudioInput extends ChangeNotifier {
final _recorder = AudioRecorder();
final AudioEncoder _encoder = AudioEncoder.pcm16bits;
bool isRecording = false;
bool isPaused = false;
Stream<Uint8List>? audioStream;

Future<void> init() async {
await _checkPermission();
}

@override
void dispose() {
_recorder.dispose();
super.dispose();
}

Future<void> _checkPermission() async {
final hasPermission = await _recorder.hasPermission();
if (!hasPermission) {
throw MicrophonePermissionDeniedException(
'App does not have mic permissions',
);
}
}

Future<Stream<Uint8List>?> startRecordingStream() async {
var recordConfig = RecordConfig(
encoder: _encoder,
sampleRate: 24000,
numChannels: 1,
echoCancel: true,
noiseSuppress: true,
androidConfig: const AndroidRecordConfig(
audioSource: AndroidAudioSource.voiceCommunication,
),
iosConfig: const IosRecordConfig(categoryOptions: []),
);
await _recorder.listInputDevices();
audioStream = await _recorder.startStream(recordConfig);
isRecording = true;
notifyListeners();
return audioStream;
}

Future<void> stopRecording() async {
await _recorder.stop();
isRecording = false;
notifyListeners();
}

Future<void> togglePause() async {
if (isPaused) {
await _recorder.resume();
isPaused = false;
} else {
await _recorder.pause();
isPaused = true;
}
notifyListeners();
return;
}
}

/// An exception thrown when microphone permission is denied or not granted.
class MicrophonePermissionDeniedException implements Exception {
/// The optional message associated with the permission denial.
final String? message;

/// Creates a new [MicrophonePermissionDeniedException] with an optional [message].
MicrophonePermissionDeniedException([this.message]);

@override
String toString() {
return 'MicrophonePermissionDeniedException: $message';
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,66 @@
// Copyright 2025 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.

import 'dart:typed_data';

import 'package:flutter_soloud/flutter_soloud.dart';

class AudioOutput {
AudioSource? stream;
SoundHandle? handle;

Future<void> init() async {
// Initialize the player.
await SoLoud.instance.init(sampleRate: 24000, channels: Channels.mono);
await setupNewStream();
}

Future<void> setupNewStream() async {
if (SoLoud.instance.isInitialized) {
// Stop and clear any previous playback handle if it's still valid
await stopStream(); // Ensure previous sound is stopped

stream = SoLoud.instance.setBufferStream(
maxBufferSizeBytes:
1024 * 1024 * 10, // 10MB of max buffer (not allocated)
bufferingType: BufferingType.released,
bufferingTimeNeeds: 0,
onBuffering: (isBuffering, handle, time) {},
);
// Reset handle to null until the stream is played again
handle = null;
}
}

Future<AudioSource?> playStream() async {
handle = await SoLoud.instance.play(stream!);
return stream;
}

Future<void> stopStream() async {
if (stream != null &&
handle != null &&
SoLoud.instance.getIsValidVoiceHandle(handle!)) {
SoLoud.instance.setDataIsEnded(stream!);
await SoLoud.instance.stop(handle!);

// Clear old stream, set up new session for next time.
await setupNewStream();
}
}

void addAudioStream(Uint8List audioChunk) {
SoLoud.instance.addAudioDataStream(stream!, audioChunk);
}
}
Loading
Loading