•  


FLUTTER VOICE MESSAGE RECORD AND PLAY | by Dhananjayafdo | May, 2024 | Medium

FLUTTER VOICE MESSAGE RECORD AND PLAY

Dhananjayafdo
3 min read May 17, 2024

U sed Packages:

record: ^5.0.5

audioplayers: ^6.0.0

Check the document of each package:

record: https://pub.dev/packages/record
audio player: https://pub.dev/packages/audioplayers

Create a new file and copy and paste the below code:

class VoiceNoteRecorder extends StatefulWidget {

String audioPath;
ValueSetter<String> setAudioPath;
String audioError;
ValueSetter<String> setError;
bool isRecording;
ValueSetter<bool> setRecording;
bool isRecordingFinish;
ValueSetter<bool> setRecordingFinish;
bool isPlaying;
ValueSetter<bool> setPlaying;
AudioRecorder audioRecording;
AudioPlayer audioPlayer;

VoiceNoteRecorder({
super.key,
required this.audioPath,
required this.audioError,
required this.isPlaying,
required this.isRecording,
required this.isRecordingFinish,
required this.audioPlayer,
required this.audioRecording,
required this.setRecording,
required this.setRecordingFinish,
required this.setPlaying,
required this.setAudioPath,
required this.setError,
});

@override
State<VoiceNoteRecorder> createState() => _VoiceNoteRecorderState();
}

class _VoiceNoteRecorderState extends State<VoiceNoteRecorder> {
Future<void> startRecording() async {
try {
log('START RECORD');
if (await widget.audioRecording.hasPermission()) {
widget.setError('');
final Directory tempDir = await getTemporaryDirectory();

widget.audioRecording.start(const RecordConfig(echoCancel: true, noiseSuppress: true), path: "${tempDir.path}/recordings");

widget.setRecording(true);
} else {
await widget.audioRecording.hasPermission();
}
} catch (e) {
log('AUDIO RECORDER PLAY ERROR $e');
}
}

Future<void> finishRecording() async {
try {
log('FINISH RECORD');
String? stop = await widget.audioRecording.stop();
if (stop == null) return;
widget.setAudioPath(stop);
log(stop.toString());

widget.setRecordingFinish(true);
} catch (e) {
log('AUDIO RECORDER STOP ERROR $e');
}
}

Future<void> cancelRecording() async {
try {
log('CANCEL RECORD');
await widget.audioRecording.cancel();
widget.audioPlayer.pause();
widget.setAudioPath('');

widget.setRecordingFinish(false);
widget.setRecording(false);
widget.setPlaying(false);
} catch (e) {
log('AUDIO RECORDER CANCEL ERROR $e');
}
}

Future<void> playAudion() async {
try {
if (!widget.isRecordingFinish && widget.audioPath.isEmpty) return;
log('PLAY RECORD');
log('PLAY RECORD PATH => ${widget.audioPath}');
Source urlSource = UrlSource(widget.audioPath);
await widget.audioPlayer.play(urlSource);

widget.setPlaying(true);
widget.audioPlayer.onPlayerComplete.listen((event) {
widget.setPlaying(false);
});
} catch (e) {
log('AUDION PLAYER ERROR => $e');
}
}

pauseAudio() {
try {
log('PAUSE RECORD');
widget.audioPlayer.pause();
widget.setPlaying(false);
} catch (e) {
log('PAUSE AUDION ERROR => $e');
}
}

@override
Widget build(BuildContext context) {
return Padding(
padding: const EdgeInsets.only(bottom: 10),
child: Column(
children: [
Row(
children: [
SizedBox(width: widthUsingMQ(context, 0.25), child: Text('Voice Note', style: Theme.of(context).textTheme.bodySmall)),
Text(':', style: Theme.of(context).textTheme.bodySmall),
Expanded(
child: Column(
crossAxisAlignment: CrossAxisAlignment.start,
children: [
Padding(
padding: EdgeInsets.symmetric(horizontal: widthUsingMQ(context, 0.02)),
child: (() {
if (!widget.isRecording) {
return startRecordingWidget();
}

if (widget.isRecording && !widget.isRecordingFinish) {
return recordingWidget();
}

if (widget.isRecordingFinish) {
return recordingFinishWidget();
}
}()),
),
widget.audioError.isNotEmpty
? Padding(
padding: const EdgeInsets.only(left: 15, top: 5),
child: Text("required", style: Theme.of(context).textTheme.bodySmall!.copyWith(color: Colors.redAccent, fontSize: 12)),
)
: const SizedBox(),
],
),
),
],
),
],
),
);
}

circleIcon({required Color color, required IconData icon, required void Function()? onTap}) => GestureDetector(
onTap: onTap,
child: Container(
height: 35,
width: 35,
decoration: BoxDecoration(color: color, shape: BoxShape.circle),
child: Icon(icon, color: ThemeSettings.white),
),
);

startRecordingWidget() {
return GestureDetector(
onTap: () async => await startRecording(),
child: Container(
height: 50,
decoration: BoxDecoration(color: ThemeSettings.white, borderRadius: BorderRadius.circular(40)),
alignment: Alignment.center,
child: const Icon(Icons.mic),
),
);
}

recordingWidget() {
return Row(
mainAxisSize: MainAxisSize.max,
children: [
Expanded(
child: Container(
height: 50,
decoration: BoxDecoration(color: ThemeSettings.white, borderRadius: BorderRadius.circular(40)),
alignment: Alignment.center,
child: Row(
mainAxisAlignment: MainAxisAlignment.spaceEvenly,
children: [
const Icon(Icons.record_voice_over_rounded),
SizedBox(width: widthUsingMQ(context, 0.22), child: const FittedBox(child: Text('Recording.....'))),
],
),
),
),
spacerHor(5),
circleIcon(color: Colors.green, icon: Icons.check, onTap: () async => await finishRecording()),
spacerHor(5),
circleIcon(color: Colors.redAccent, icon: Icons.close, onTap: () async => await cancelRecording()),
spacerHor(5),
],
);
}

recordingFinishWidget() {
return GestureDetector(
onTap: () {
setState(() {
// isRecording = true;
});
},
child: Row(
children: [
Expanded(
child: Container(
height: 50,
decoration: BoxDecoration(color: ThemeSettings.white, borderRadius: BorderRadius.circular(40)),
alignment: Alignment.center,
child: voicePlayer(),
),
),
spacerHor(5),
circleIcon(
color: Colors.redAccent,
icon: Icons.delete,
onTap: () => cancelRecording(),
),
],
),
);
}

voicePlayer() {
return GestureDetector(
onTap: () async => widget.isPlaying && widget.isRecordingFinish ? await pauseAudio() : await playAudion(),
child: Icon(widget.isPlaying && widget.isRecordingFinish ? Icons.pause : Icons.play_arrow),
);
}
}

Create another new file and copy and paste the below code:

class VoiceNoteShowerScreen extends StatefulWidget {

const VoiceNoteShowerScreen({super.key});

@override
State<VoiceNoteShowerScreen> createState() => _VoiceNoteShowerScreenState();
}

class _VoiceNoteShowerScreenState extends State<VoiceNoteShowerScreen> {
late AudioRecorder audioRecorder;
late AudioPlayer audioPlayer;

bool isRecording = false;
bool isRecordingFinish = false;
bool isPlaying = false;

String audioPath = '';

String audioError = '';

@override
void initState() {
super.initState();
audioRecorder = AudioRecorder();
audioPlayer = AudioPlayer();
}

@override
void dispose() {
super.dispose();
audioPlayer.dispose();
audioRecorder.dispose();
}

@override
Widget build(BuildContext context) {
return Scaffold(
body: Column(
crossAxisAlignment: CrossAxisAlignment.center,
mainAxisAlignment: MainAxisAlignment.center,
children: [
Padding(
padding: const EdgeInsets.symmetric(horizontal: 20),
child: VoiceNoteRecorder(
audioPath: audioPath,
setAudioPath: (value) => setState(() => audioPath = value),
audioError: audioError,
audioPlayer: audioPlayer,
audioRecording: audioRecorder,
isPlaying: isPlaying,
isRecording: isRecording,
isRecordingFinish: isRecordingFinish,
setRecording: (value) => setState(() => isRecording = value),
setRecordingFinish: (value) => setState(() => isRecordingFinish = value),
setPlaying: (value) => setState(() => isPlaying = value),
setError: (value) => setState(() => audioError = value),
),
),
],
),
);
}
}

Enjoy !!

https://drive.google.com/file/d/1ASrMsWMubIGckgKWt5ufiYuluwDmWK2z/view?usp=drive_link

--

--

- "漢字路" 한글한자자동변환 서비스는 교육부 고전문헌국역지원사업의 지원으로 구축되었습니다.
- "漢字路" 한글한자자동변환 서비스는 전통문화연구회 "울산대학교한국어처리연구실 옥철영(IT융합전공)교수팀"에서 개발한 한글한자자동변환기를 바탕하여 지속적으로 공동 연구 개발하고 있는 서비스입니다.
- 현재 고유명사(인명, 지명등)을 비롯한 여러 변환오류가 있으며 이를 해결하고자 많은 연구 개발을 진행하고자 하고 있습니다. 이를 인지하시고 다른 곳에서 인용시 한자 변환 결과를 한번 더 검토하시고 사용해 주시기 바랍니다.
- 변환오류 및 건의,문의사항은 juntong@juntong.or.kr로 메일로 보내주시면 감사하겠습니다. .
Copyright ⓒ 2020 By '전통문화연구회(傳統文化硏究會)' All Rights reserved.
 한국   대만   중국   일본