123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281 |
- import 'dart:async';
- import 'dart:io';
- import 'package:file/local.dart';
- import 'package:flutter/services.dart';
- import 'package:path/path.dart' as p;
- /// Audio Recorder Plugin
- class FlutterAudioRecorder {
- static const MethodChannel _channel =
- MethodChannel('flutter_audio_recorder2');
- static const String DEFAULT_EXTENSION = '.m4a';
- static LocalFileSystem fs = LocalFileSystem();
- String? _path;
- String? _extension;
- Recording? _recording;
- String? _sampleRate;
- Future? _initRecorder;
- Future? get initialized => _initRecorder;
- Recording? get recording => _recording;
- /// 构造方法
- /// path audio文件路径
- FlutterAudioRecorder(String path,
- {AudioFormat? audioFormat, String sampleRate = "16000"}) {
- _initRecorder = _init(path, audioFormat, sampleRate);
- }
- /// 初始化 FlutterAudioRecorder 对象
- Future _init(
- String? path, AudioFormat? audioFormat, String sampleRate) async {
- String extension;
- String extensionInPath;
- if (path != null) {
- // Extension(.xyz) of Path
- extensionInPath = p.extension(path);
- // Use AudioFormat
- if (audioFormat != null) {
- // .m4a != .m4a
- if (_stringToAudioFormat(extensionInPath) != audioFormat) {
- // use AudioOutputFormat
- extension = _audioFormatToString(audioFormat);
- path = p.withoutExtension(path) + extension;
- } else {
- extension = p.extension(path);
- }
- } else {
- // Else, Use Extension that inferred from Path
- // if extension in path is valid
- if (_isValidAudioFormat(extensionInPath)) {
- extension = extensionInPath;
- } else {
- extension = DEFAULT_EXTENSION; // default value
- path += extension;
- }
- }
- File file = fs.file(path);
- if (await file.exists()) {
- throw Exception("A file already exists at the path :" + path);
- } else if (!await file.parent.exists()) {
- throw Exception(
- "The specified parent directory does not exist ${file.parent}");
- }
- } else {
- extension = DEFAULT_EXTENSION; // default value
- }
- _path = path;
- _extension = extension;
- _sampleRate = sampleRate;
- late Map<String, Object> response;
- var result = await _channel.invokeMethod('init',
- {"path": _path, "extension": _extension, "sampleRate": _sampleRate});
- if (result != false) {
- response = Map.from(result);
- }
- _recording = Recording()
- ..status = _stringToRecordingStatus(response['status'] as String?)
- ..metering = AudioMetering(
- averagePower: -120, peakPower: -120, isMeteringEnabled: true);
- return;
- }
- /// Request an initialized recording instance to be [started]
- /// Once executed, audio recording will start working and
- /// a file will be generated in user's file system
- Future start() async {
- return _channel.invokeMethod('start');
- }
- /// Request currently [Recording] recording to be [Paused]
- /// Note: Use [current] to get latest state of recording after [pause]
- Future pause() async {
- return _channel.invokeMethod('pause');
- }
- /// Request currently [Paused] recording to continue
- Future resume() async {
- return _channel.invokeMethod('resume');
- }
- /// Request the recording to stop
- /// Once its stopped, the recording file will be finalized
- /// and will not be start, resume, pause anymore.
- Future<Recording?> stop() async {
- Map<String, Object> response;
- var result = await _channel.invokeMethod('stop');
- if (result != null) {
- response = Map.from(result);
- _responseToRecording(response);
- }
- return _recording;
- }
- /// Ask for current status of recording
- /// Returns the result of current recording status
- /// Metering level, Duration, Status...
- Future<Recording?> current({int channel = 0}) async {
- Map<String, Object> response;
- var result = await _channel.invokeMethod('current', {"channel": channel});
- if (result != null && _recording?.status != RecordingStatus.Stopped) {
- response = Map.from(result);
- _responseToRecording(response);
- }
- return _recording;
- }
- /// Returns the result of record permission
- /// if not determined(app first launch),
- /// this will ask user to whether grant the permission
- static Future<bool?> get hasPermissions async {
- bool? hasPermission = await _channel.invokeMethod('hasPermissions');
- return hasPermission;
- }
- /// util - response msg to recording object.
- void _responseToRecording(Map<String, Object>? response) {
- if (response == null) return;
- _recording!.duration = Duration(milliseconds: response['duration'] as int);
- _recording!.path = response['path'] as String?;
- _recording!.audioFormat =
- _stringToAudioFormat(response['audioFormat'] as String?);
- _recording!.extension = response['audioFormat'] as String?;
- _recording!.metering = AudioMetering(
- peakPower: response['peakPower'] as double?,
- averagePower: response['averagePower'] as double?,
- isMeteringEnabled: response['isMeteringEnabled'] as bool?);
- _recording!.status =
- _stringToRecordingStatus(response['status'] as String?);
- }
- /// util - verify if extension string is supported
- static bool _isValidAudioFormat(String extension) {
- switch (extension) {
- case ".wav":
- case ".mp4":
- case ".aac":
- case ".m4a":
- return true;
- default:
- return false;
- }
- }
- /// util - Convert String to Enum
- static AudioFormat? _stringToAudioFormat(String? extension) {
- switch (extension) {
- case ".wav":
- return AudioFormat.WAV;
- case ".mp4":
- case ".aac":
- case ".m4a":
- return AudioFormat.AAC;
- default:
- return null;
- }
- }
- /// Convert Enum to String
- static String _audioFormatToString(AudioFormat format) {
- switch (format) {
- case AudioFormat.WAV:
- return ".wav";
- case AudioFormat.AAC:
- return ".m4a";
- default:
- return ".m4a";
- }
- }
- /// util - Convert String to Enum
- static RecordingStatus _stringToRecordingStatus(String? status) {
- switch (status) {
- case "unset":
- return RecordingStatus.Unset;
- case "initialized":
- return RecordingStatus.Initialized;
- case "recording":
- return RecordingStatus.Recording;
- case "paused":
- return RecordingStatus.Paused;
- case "stopped":
- return RecordingStatus.Stopped;
- default:
- return RecordingStatus.Unset;
- }
- }
- }
- /// Recording Object - represent a recording file
- class Recording {
- /// File path
- String? path;
- /// Extension
- String? extension;
- /// Duration in milliseconds
- Duration? duration;
- /// Audio format
- AudioFormat? audioFormat;
- /// Metering
- AudioMetering? metering;
- /// Is currently recording
- RecordingStatus? status;
- }
- /// Audio Metering Level - describe the metering level of microphone when recording
- class AudioMetering {
- /// Represent peak level of given short duration
- double? peakPower;
- /// Represent average level of given short duration
- double? averagePower;
- /// Is metering enabled in system
- bool? isMeteringEnabled;
- AudioMetering({this.peakPower, this.averagePower, this.isMeteringEnabled});
- }
- /// 自定义录音状态
- enum RecordingStatus {
- /// Recording not initialized
- Unset,
- /// Ready for start recording
- Initialized,
- /// Currently recording
- Recording,
- /// Currently Paused
- Paused,
- /// This specific recording Stopped, cannot be start again
- Stopped,
- }
- /// Audio Format,
- /// WAV is lossless audio, recommended
- enum AudioFormat {
- AAC,
- WAV,
- }
|