flutteraudio-playercustom-widgets

Audio file doesn't show up to be displayed in a custom audio recorder/player widget


I'm new to Flutter and trying to achieve custom audio recorder/player widget using audio_waveforms package. Here is my full code:

import 'package:flutter/material.dart';
import 'package:audio_waveforms/audio_waveforms.dart';
import 'package:path_provider/path_provider.dart';
import 'dart:io';
import 'dart:async';

void main() => runApp(const MyApp());

class MyApp extends StatelessWidget {
  const MyApp({super.key});

  @override
  Widget build(BuildContext context) {
    return const MaterialApp(
      title: 'Audio Waveforms',
      debugShowCheckedModeBanner: false,
      home: AudioRecorder(),
    );
  }
}

class AudioRecorder extends StatefulWidget {
  const AudioRecorder({
    super.key,
    this.width,
    this.height,
  });

  final double? width;
  final double? height;

  @override
  State<AudioRecorder> createState() => _AudioRecorderState();
}

class _AudioRecorderState extends State<AudioRecorder> {
  late RecorderController _recorderController;
  late Timer _timer;
  int _recordDuration = 0;
  bool _isRecording = false;
  String? _filePath;
  late Directory appDirectory;

  @override
  void initState() {
    super.initState();
    _initialiseControllers();
  }

  void _initialiseControllers() {
    _recorderController = RecorderController()
      ..androidEncoder = AndroidEncoder.aac
      ..androidOutputFormat = AndroidOutputFormat.mpeg4
      ..iosEncoder = IosEncoder.kAudioFormatMPEG4AAC
      ..sampleRate = 44100;
  }

  @override
  void dispose() {
    _recorderController.dispose();
    if (_timer.isActive) {
      _timer.cancel();
    }
    super.dispose();
  }

  Future<void> _startRecording() async {
    Directory appDocDir = await getApplicationDocumentsDirectory();
    String appDocPath = appDocDir.path;
    _filePath =
        '$appDocPath/record_${DateTime.now().millisecondsSinceEpoch}.m4a';
    await _recorderController.record(path: _filePath);
    _timer = Timer.periodic(const Duration(seconds: 1), (timer) {
      setState(() {
        _recordDuration++;
      });
    });
    setState(() {
      _isRecording = true;
    });
  }

  Future<void> _pauseRecording() async {
    await _recorderController.pause();
    _timer.cancel();
    setState(() {
      _isRecording = false;
    });
    if (_filePath != null) {
      setState(() {});
    }
  }

  Future<void> _deleteRecording() async {
    if (_isRecording) {
      await _recorderController.stop();
      _timer.cancel();
    }
    if (_filePath != null) {
      final file = File(_filePath!);
      if (await file.exists()) {
        await file.delete();
      }
      setState(() {
        _filePath = null;
        _isRecording = false;
        _recordDuration = 0;
      });
    }
  }

  @override
  Widget build(BuildContext context) {
    return Column(
      mainAxisSize: MainAxisSize.max,
      mainAxisAlignment: MainAxisAlignment.spaceBetween,
      children: [
        Expanded(
          child: Container(
            decoration: const BoxDecoration(),
            child: Row(
              mainAxisSize: MainAxisSize.min,
              mainAxisAlignment: MainAxisAlignment.center,
              children: [
                if (_isRecording)
                  Padding(
                    padding: const EdgeInsetsDirectional.fromSTEB(4, 0, 4, 0),
                    child: Text(
                      'Recording Time: ${_formatDuration(_recordDuration)}',
                      style: const TextStyle(
                        fontFamily: 'Readex Pro',
                        letterSpacing: 0,
                        fontSize: 14,
                        color: Colors.white,
                      ),
                    ),
                  ),
                if (_isRecording)
                  AudioWaveforms(
                    enableGesture: true,
                    size: Size(MediaQuery.of(context).size.width / 2, 50),
                    recorderController: _recorderController,
                    waveStyle: const WaveStyle(
                      waveColor: Colors.white,
                      extendWaveform: true,
                      showMiddleLine: false,
                    ),
                    decoration: BoxDecoration(
                      borderRadius: BorderRadius.circular(12.0),
                      color: const Color(0xFF1E1B26),
                    ),
                    padding: const EdgeInsets.only(left: 18),
                    margin: const EdgeInsets.symmetric(horizontal: 15),
                  ),
                if (_isRecording == false && _filePath == null)
                  const Padding(
                    padding: EdgeInsetsDirectional.fromSTEB(0, 4, 0, 0),
                    child: Text(
                      'Tap mic to start recording',
                      style: TextStyle(
                        fontFamily: 'Readex Pro',
                        letterSpacing: 0,
                        fontSize: 14,
                        color: Colors.white,
                      ),
                    ),
                  ),
                if (_isRecording == false && _filePath != null)
                  AudioPlayer(audioFile: _filePath),
              ],
            ),
          ),
        ),
        Padding(
          padding: const EdgeInsetsDirectional.fromSTEB(0, 0, 0, 4),
          child: Row(
            mainAxisSize: MainAxisSize.max,
            mainAxisAlignment: MainAxisAlignment.spaceBetween,
            children: [
              Align(
                alignment: const AlignmentDirectional(1, -1),
                child: IconButton(
                  icon: const Icon(
                    Icons.delete_rounded,
                    color: Colors.red,
                    size: 24,
                  ),
                  onPressed: _isRecording || _filePath != null
                      ? _deleteRecording
                      : null,
                ),
              ),
              Row(
                mainAxisSize: MainAxisSize.max,
                children: [
                  if (_isRecording)
                    Padding(
                      padding: const EdgeInsetsDirectional.fromSTEB(0, 0, 4, 0),
                      child: IconButton(
                        icon: const Icon(
                          Icons.pause_rounded,
                          color: Colors.white,
                          size: 24,
                        ),
                        onPressed: () async {
                          await _pauseRecording();
                        },
                      ),
                    ),
                  if (_isRecording == false)
                    IconButton(
                      icon: const Icon(
                        Icons.mic_rounded,
                        color: Colors.white,
                        size: 24,
                      ),
                      onPressed: () async {
                        await _startRecording();
                      },
                    ),
                ],
              ),
            ],
          ),
        ),
      ],
    );
  }

  String _formatDuration(int seconds) {
    int min = seconds ~/ 60;
    int sec = seconds % 60;
    return '${min.toString().padLeft(2, '0')}:${sec.toString().padLeft(2, '0')}';
  }
}

class AudioPlayer extends StatefulWidget {
  const AudioPlayer({
    super.key,
    this.width,
    this.height,
    required this.audioFile,
  });

  final double? width;
  final double? height;
  final String? audioFile;

  @override
  _AudioPlayerState createState() => _AudioPlayerState();
}

class _AudioPlayerState extends State<AudioPlayer> {
  late PlayerController _playerController;
  late StreamSubscription<PlayerState> _playerStateSubscription;

  @override
  void initState() {
    super.initState();
    _playerController = PlayerController();
    _preparePlayer();
    _playerStateSubscription =
        _playerController.onPlayerStateChanged.listen((state) {
      setState(() {});
    });
  }

  void _preparePlayer() async {
    // this is how it's done in the package example
    /*file = File('${widget.appDirectory.path}/audio${widget.index}.mp3');
    await file?.writeAsBytes(
        (await rootBundle.load('assets/audios/audio${widget.index}.mp3'))
            .buffer
            .asUint8List());
    Directory appDocDir = await getApplicationDocumentsDirectory();
    String appDocPath = appDocDir.path;
    _filePath =
        '$appDocPath/record_${DateTime.now().millisecondsSinceEpoch}.m4a';*/

    // this is how I did
    await _playerController.preparePlayer(
        path: widget.audioFile!, shouldExtractWaveform: true);
  }

  @override
  void dispose() {
    _playerStateSubscription.cancel();
    _playerController.dispose();
    super.dispose();
  }

  @override
  Widget build(BuildContext context) {
    return Container(
      width: 300,
      height: 200,
      padding: const EdgeInsets.all(8),
      margin: const EdgeInsets.symmetric(vertical: 8, horizontal: 12),
      decoration: BoxDecoration(
        borderRadius: BorderRadius.circular(10),
        color: Colors.blueGrey,
      ),
      child: Row(
        mainAxisSize: MainAxisSize.min,
        children: [
          IconButton(
            onPressed: () async {
              if (_playerController.playerState.isPlaying) {
                await _playerController.pausePlayer();
              } else {
                await _playerController.startPlayer();
              }
            },
            icon: Icon(
              _playerController.playerState.isPlaying
                  ? Icons.pause
                  : Icons.play_arrow,
              color: Colors.white,
            ),
          ),
          AudioFileWaveforms(
            size: Size(MediaQuery.of(context).size.width / 2, 70),
            playerController: _playerController,
            waveformType: WaveformType.fitWidth,
            playerWaveStyle: const PlayerWaveStyle(
              fixedWaveColor: Colors.white54,
              liveWaveColor: Colors.white,
              spacing: 6,
            ),
          ),
        ],
      ),
    );
  }
}

Recording part is working fine and what I wanna achieve is that when pause button is pressed, I want to display the recorded file before it's uploaded to database. Although audio player widget shows up when paused, recorded audio just doesn't show up therefore play/pause button in player is not working.

I assume there is something wrong with storing the audio file or maybe showing the path of the recorded file but please feel free to tell me if anything else is also wrong.


Solution

  • Use _recorderController.stop() which returns a future filepath.


    Future<void> _pauseRecording() async {
     _filePath =  await _recorderController.stop();
     _timer.cancel();
        setState(() {
          _isRecording = false;
        });
        if (_filePath != null) {
          setState(() {});
        }
    }