https://flutter-sound.canardoux.xyz/flutter_sound_install.html
<uses-permission android:name="android.permission.RECORD_AUDIO" /> <uses-permission android:name="android.permission.FOREGROUND_SERVICE" />
/* * Copyright 2018, 2019, 2020, 2021 Dooboolab. * * This file is part of Flutter-Sound. * * Flutter-Sound is free software: you can redistribute it and/or modify * it under the terms of the Mozilla Public License version 2 (MPL2.0), * as published by the Mozilla organization. * * Flutter-Sound is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * MPL General Public License for more details. * * This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ import 'dart:async'; import 'dart:io'; import 'dart:math'; import 'dart:typed_data' show Uint8List; import 'package:audio_session/audio_session.dart'; import 'package:flutter/foundation.dart' show kIsWeb; import 'package:flutter/material.dart'; import 'package:flutter/services.dart' show rootBundle; import 'package:flutter_sound/flutter_sound.dart'; import 'package:intl/date_symbol_data_local.dart'; import 'package:intl/intl.dart' show DateFormat; import 'package:path_provider/path_provider.dart'; import 'package:permission_handler/permission_handler.dart'; /// const int tSAMPLERATE = 8000; /// Sample rate used for Streams const int tSTREAMSAMPLERATE = 44000; // 44100 does not work for recorder on iOS /// const int tBLOCKSIZE = 4096; /// enum Media { buffer, } /// enum AudioState { /// isPlaying, /// isPaused, /// isStopped, /// isRecording, /// isRecordingPaused, } /// class Demo extends StatefulWidget { @override _MyAppState createState() => _MyAppState(); } class _MyAppState extends State<Demo> { bool _isRecording = false; final List<String?> _path = [ null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, ]; StreamSubscription? _recorderSubscription; StreamSubscription? _playerSubscription; StreamSubscription? _recordingDataSubscription; FlutterSoundPlayer playerModule = FlutterSoundPlayer(); FlutterSoundRecorder recorderModule = FlutterSoundRecorder(); String _recorderTxt = '00:00:00'; String _playerTxt = '00:00:00'; double? _dbLevel; double sliderCurrentPosition = 0.0; double maxDuration = 1.0; Media? _media = Media.buffer; Codec _codec = Codec.aacADTS; bool? _encoderSupported = true; // Optimist bool _decoderSupported = true; // Optimist StreamController<Food>? recordingDataController; IOSink? sink; Future<void> _initializeExample() async { await playerModule.closePlayer(); await playerModule.openPlayer(); await playerModule.setSubscriptionDuration(Duration(milliseconds: 10)); await recorderModule.setSubscriptionDuration(Duration(milliseconds: 10)); await initializeDateFormatting(); await setCodec(_codec); } Future<void> openTheRecorder() async { if (!kIsWeb) { var status = await Permission.microphone.request(); if (status != PermissionStatus.granted) { throw RecordingPermissionException('Microphone permission not granted'); } } await recorderModule.openRecorder(); if (!await recorderModule.isEncoderSupported(_codec) && kIsWeb) { _codec = Codec.opusWebM; } } Future<void> init() async { await openTheRecorder(); await _initializeExample(); if ((!kIsWeb) && Platform.isAndroid) { await copyAssets(); } final session = await AudioSession.instance; await session.configure(AudioSessionConfiguration( avAudioSessionCategory: AVAudioSessionCategory.playAndRecord, avAudioSessionCategoryOptions: AVAudioSessionCategoryOptions.allowBluetooth | AVAudioSessionCategoryOptions.defaultToSpeaker, avAudioSessionMode: AVAudioSessionMode.spokenAudio, avAudioSessionRouteSharingPolicy: AVAudioSessionRouteSharingPolicy.defaultPolicy, avAudioSessionSetActiveOptions: AVAudioSessionSetActiveOptions.none, androidAudioAttributes: const AndroidAudioAttributes( contentType: AndroidAudioContentType.speech, flags: AndroidAudioFlags.none, usage: AndroidAudioUsage.voiceCommunication, ), androidAudioFocusGainType: AndroidAudioFocusGainType.gain, androidWillPauseWhenDucked: true, )); } Future<void> copyAssets() async { var dataBuffer = (await rootBundle.load('assets/canardo.png')).buffer.asUint8List(); var path = '${await playerModule.getResourcePath()}/assets'; if (!await Directory(path).exists()) { await Directory(path).create(recursive: true); } await File('$path/canardo.png').writeAsBytes(dataBuffer); } @override void initState() { super.initState(); init(); } void cancelRecorderSubscriptions() { if (_recorderSubscription != null) { _recorderSubscription!.cancel(); _recorderSubscription = null; } } void cancelPlayerSubscriptions() { if (_playerSubscription != null) { _playerSubscription!.cancel(); _playerSubscription = null; } } void cancelRecordingDataSubscription() { if (_recordingDataSubscription != null) { _recordingDataSubscription!.cancel(); _recordingDataSubscription = null; } recordingDataController = null; if (sink != null) { sink!.close(); sink = null; } } @override void dispose() { super.dispose(); cancelPlayerSubscriptions(); cancelRecorderSubscriptions(); cancelRecordingDataSubscription(); releaseFlauto(); } Future<void> releaseFlauto() async { try { await playerModule.closePlayer(); await recorderModule.closeRecorder(); } on Exception { playerModule.logger.e('Released unsuccessful'); } } void startRecorder() async { try { // Request Microphone permission if needed if (!kIsWeb) { var status = await Permission.microphone.request(); if (status != PermissionStatus.granted) { throw RecordingPermissionException( 'Microphone permission not granted'); } } var path = ''; if (!kIsWeb) { var tempDir = await getTemporaryDirectory(); path = '${tempDir.path}/flutter_sound${ext[_codec.index]}'; } else { path = '_flutter_sound${ext[_codec.index]}'; } if (_media == Media.buffer) { await recorderModule.startRecorder( toFile: path, codec: _codec, bitRate: 8000, numChannels: 1, sampleRate: (_codec == Codec.pcm16) ? tSTREAMSAMPLERATE : tSAMPLERATE, ); } recorderModule.logger.d('startRecorder'); _recorderSubscription = recorderModule.onProgress!.listen((e) { var date = DateTime.fromMillisecondsSinceEpoch( e.duration.inMilliseconds, isUtc: true); var txt = DateFormat('mm:ss:SS', 'en_GB').format(date); setState(() { _recorderTxt = txt.substring(0, 8); _dbLevel = e.decibels; }); }); setState(() { _isRecording = true; _path[_codec.index] = path; }); } on Exception catch (err) { recorderModule.logger.e('startRecorder error: $err'); setState(() { stopRecorder(); _isRecording = false; cancelRecordingDataSubscription(); cancelRecorderSubscriptions(); }); } } void stopRecorder() async { try { await recorderModule.stopRecorder(); recorderModule.logger.d('stopRecorder'); cancelRecorderSubscriptions(); cancelRecordingDataSubscription(); } on Exception catch (err) { recorderModule.logger.d('stopRecorder error: $err'); } setState(() { _isRecording = false; }); } Future<bool> fileExists(String path) async { return await File(path).exists(); } // In this simple example, we just load a file in memory.This is stupid but just for demonstration of startPlayerFromBuffer() Future<Uint8List?> makeBuffer(String path) async { try { if (!await fileExists(path)) return null; var file = File(path); file.openRead(); var contents = await file.readAsBytes(); playerModule.logger.i('The file is ${contents.length} bytes long.'); return contents; } on Exception catch (e) { playerModule.logger.e(e); return null; } } void _addListeners() { cancelPlayerSubscriptions(); _playerSubscription = playerModule.onProgress!.listen((e) { maxDuration = e.duration.inMilliseconds.toDouble(); if (maxDuration <= 0) maxDuration = 0.0; sliderCurrentPosition = min(e.position.inMilliseconds.toDouble(), maxDuration); if (sliderCurrentPosition < 0.0) { sliderCurrentPosition = 0.0; } var date = DateTime.fromMillisecondsSinceEpoch(e.position.inMilliseconds, isUtc: true); var txt = DateFormat('mm:ss:SS', 'en_GB').format(date); setState(() { _playerTxt = txt.substring(0, 8); }); }); } Future<Uint8List> _readFileByte(String filePath) async { var myUri = Uri.parse(filePath); var audioFile = File.fromUri(myUri); Uint8List bytes; var b = await audioFile.readAsBytes(); bytes = Uint8List.fromList(b); playerModule.logger.d('reading of bytes is completed'); return bytes; } Future<Uint8List> getAssetData(String path) async { var asset = await rootBundle.load(path); return asset.buffer.asUint8List(); } /* Future<void> feedHim(String path) async { var data = await _readFileByte(path); return await playerModule.feedFromStream(data); } */ final int blockSize = 4096; Future<void> feedHim(String path) async { var buffer = await _readFileByte(path); //var buffer = await getAssetData('assets/samples/sample.pcm'); var lnData = 0; var totalLength = buffer.length; while (totalLength > 0 && !playerModule.isStopped) { var bsize = totalLength > blockSize ? blockSize : totalLength; await playerModule .feedFromStream(buffer.sublist(lnData, lnData + bsize)); // await !!!! lnData += bsize; totalLength -= bsize; } } Future<void> startPlayer() async { try { Uint8List? dataBuffer; String? audioFilePath; var codec = _codec; if (_media == Media.buffer) { // Do we want to play from buffer or from file ? if (await fileExists(_path[codec.index]!)) { dataBuffer = await makeBuffer(_path[codec.index]!); if (dataBuffer == null) { throw Exception('Unable to create the buffer'); } } } if (_media == Media.buffer) { if (audioFilePath != null) { await playerModule.startPlayer( fromURI: audioFilePath, codec: codec, sampleRate: tSTREAMSAMPLERATE, whenFinished: () { playerModule.logger.d('Play finished'); setState(() {}); }); } else if (dataBuffer != null) { await playerModule.startPlayer( fromDataBuffer: dataBuffer, sampleRate: tSAMPLERATE, codec: codec, whenFinished: () { playerModule.logger.d('Play finished'); setState(() {}); }); } } _addListeners(); setState(() {}); playerModule.logger.d('<--- startPlayer'); } on Exception catch (err) { playerModule.logger.e('error: $err'); } } Future<void> stopPlayer() async { try { await playerModule.stopPlayer(); playerModule.logger.d('stopPlayer'); if (_playerSubscription != null) { await _playerSubscription!.cancel(); _playerSubscription = null; } sliderCurrentPosition = 0.0; } on Exception catch (err) { playerModule.logger.d('error: $err'); } setState(() {}); } void pauseResumePlayer() async { try { if (playerModule.isPlaying) { await playerModule.pausePlayer(); } else { await playerModule.resumePlayer(); } } on Exception catch (err) { playerModule.logger.e('error: $err'); } setState(() {}); } void pauseResumeRecorder() async { try { if (recorderModule.isPaused) { await recorderModule.resumeRecorder(); } else { await recorderModule.pauseRecorder(); assert(recorderModule.isPaused); } } on Exception catch (err) { recorderModule.logger.e('error: $err'); } setState(() {}); } Future<void> seekToPlayer(int milliSecs) async { //playerModule.logger.d('-->seekToPlayer'); try { if (playerModule.isPlaying) { await playerModule.seekToPlayer(Duration(milliseconds: milliSecs)); } } on Exception catch (err) { playerModule.logger.e('error: $err'); } setState(() {}); //playerModule.logger.d('<--seekToPlayer'); } Widget makeDropdowns(BuildContext context) { final mediaDropdown = Row( mainAxisAlignment: MainAxisAlignment.start, crossAxisAlignment: CrossAxisAlignment.center, children: <Widget>[ Padding( padding: const EdgeInsets.only(right: 16.0), child: Text('Media:'), ), DropdownButton<Media>( value: _media, onChanged: (newMedia) { _media = newMedia; setState(() {}); }, items: <DropdownMenuItem<Media>>[ DropdownMenuItem<Media>( value: Media.buffer, child: Text('Buffer'), ), ], ), ], ); final codecDropdown = Row( mainAxisAlignment: MainAxisAlignment.start, crossAxisAlignment: CrossAxisAlignment.center, children: <Widget>[ Padding( padding: const EdgeInsets.only(right: 16.0), child: Text('Codec:'), ), DropdownButton<Codec>( value: _codec, onChanged: (newCodec) { setCodec(newCodec!); _codec = newCodec; setState(() {}); }, items: <DropdownMenuItem<Codec>>[ DropdownMenuItem<Codec>( value: Codec.aacADTS, child: Text('AAC/ADTS'), ), DropdownMenuItem<Codec>( value: Codec.pcm16WAV, child: Text('PCM16/WAV'), ), ], ), ], ); return Padding( padding: const EdgeInsets.all(8.0), child: Column( mainAxisAlignment: MainAxisAlignment.start, crossAxisAlignment: CrossAxisAlignment.center, children: <Widget>[ Padding( padding: const EdgeInsets.only(bottom: 16.0), child: mediaDropdown, ), codecDropdown, ], ), ); } void Function()? onPauseResumePlayerPressed() { if (playerModule.isPaused || playerModule.isPlaying) { return pauseResumePlayer; } return null; } void Function()? onPauseResumeRecorderPressed() { if (recorderModule.isPaused || recorderModule.isRecording) { return pauseResumeRecorder; } return null; } void Function()? onStopPlayerPressed() { return (playerModule.isPlaying || playerModule.isPaused) ? stopPlayer : null; } void Function()? onStartPlayerPressed() { if (_media == Media.buffer && kIsWeb) { return null; } if ( //_media == Media.file || // _media == Media.stream || _media == Media.buffer) // A file must be already recorded to play it { if (_path[_codec.index] == null) return null; } // Disable the button if the selected codec is not supported if (!(_decoderSupported || _codec == Codec.pcm16)) { return null; } return (playerModule.isStopped) ? startPlayer : null; } void startStopRecorder() { if (recorderModule.isRecording || recorderModule.isPaused) { stopRecorder(); } else { startRecorder(); } } void Function()? onStartRecorderPressed() { // Disable the button if the selected codec is not supported if (!_encoderSupported!) return null; // if (_media == Media.stream && _codec != Codec.pcm16) return null; return startStopRecorder; } AssetImage recorderAssetImage() { if (onStartRecorderPressed() == null) { return AssetImage('res/icons/ic_mic_disabled.png'); } return (recorderModule.isStopped) ? AssetImage('res/icons/ic_mic.png') : AssetImage('res/icons/ic_stop.png'); } Future<void> setCodec(Codec codec) async { _encoderSupported = await recorderModule.isEncoderSupported(codec); _decoderSupported = await playerModule.isDecoderSupported(codec); setState(() { _codec = codec; }); } @override Widget build(BuildContext context) { final dropdowns = makeDropdowns(context); Widget recorderSection = Column( crossAxisAlignment: CrossAxisAlignment.center, mainAxisAlignment: MainAxisAlignment.center, children: <Widget>[ Container( margin: EdgeInsets.only(top: 12.0, bottom: 16.0), child: Text( _recorderTxt, style: TextStyle( fontSize: 35.0, color: Colors.black, ), ), ), _isRecording ? LinearProgressIndicator( value: 100.0 / 160.0 * (_dbLevel ?? 1) / 100, valueColor: AlwaysStoppedAnimation<Color>(Colors.green), backgroundColor: Colors.red) : Container(), Row( mainAxisAlignment: MainAxisAlignment.center, crossAxisAlignment: CrossAxisAlignment.center, children: <Widget>[ Container( width: 56.0, height: 50.0, child: ClipOval( child: TextButton( onPressed: onStartRecorderPressed(), //padding: EdgeInsets.all(8.0), child: Image( image: recorderAssetImage(), ), ), ), ), Container( width: 56.0, height: 50.0, child: ClipOval( child: TextButton( onPressed: onPauseResumeRecorderPressed(), //disabledColor: Colors.white, //padding: EdgeInsets.all(8.0), child: Image( width: 36.0, height: 36.0, image: AssetImage(onPauseResumeRecorderPressed() != null ? 'res/icons/ic_pause.png' : 'res/icons/ic_pause_disabled.png'), ), ), ), ), ], ), ]); Widget playerSection = Column( crossAxisAlignment: CrossAxisAlignment.center, mainAxisAlignment: MainAxisAlignment.center, children: <Widget>[ Container( margin: EdgeInsets.only(top: 12.0, bottom: 16.0), child: Text( _playerTxt, style: TextStyle( fontSize: 35.0, color: Colors.black, ), ), ), Row( mainAxisAlignment: MainAxisAlignment.center, crossAxisAlignment: CrossAxisAlignment.center, children: <Widget>[ Container( width: 56.0, height: 50.0, child: ClipOval( child: TextButton( onPressed: onStartPlayerPressed(), //disabledColor: Colors.white, //padding: EdgeInsets.all(8.0), child: Image( image: AssetImage(onStartPlayerPressed() != null ? 'res/icons/ic_play.png' : 'res/icons/ic_play_disabled.png'), ), ), ), ), Container( width: 56.0, height: 50.0, child: ClipOval( child: TextButton( onPressed: onPauseResumePlayerPressed(), //disabledColor: Colors.white, //padding: EdgeInsets.all(8.0), child: Image( width: 36.0, height: 36.0, image: AssetImage(onPauseResumePlayerPressed() != null ? 'res/icons/ic_pause.png' : 'res/icons/ic_pause_disabled.png'), ), ), ), ), Container( width: 56.0, height: 50.0, child: ClipOval( child: TextButton( onPressed: onStopPlayerPressed(), //disabledColor: Colors.white, //padding: EdgeInsets.all(8.0), child: Image( width: 28.0, height: 28.0, image: AssetImage(onStopPlayerPressed() != null ? 'res/icons/ic_stop.png' : 'res/icons/ic_stop_disabled.png'), ), ), ), ), ], ), Container( height: 30.0, child: Slider( value: min(sliderCurrentPosition, maxDuration), min: 0.0, max: maxDuration, onChanged: (value) async { await seekToPlayer(value.toInt()); }, divisions: maxDuration == 0.0 ? 1 : maxDuration.toInt())), ], ); return Scaffold( appBar: AppBar( title: const Text('Flutter Sound Demo'), ), body: ListView( children: <Widget>[ recorderSection, playerSection, dropdowns, ], ), ); } }