Contextualizes and plays back past voice recorded notes on demand Swift

👤 Sharing: AI
```swift
import AVFoundation

class VoiceNoteManager {

    // Store voice notes as a dictionary: [filename: (AVAudioRecorder, AVAudioPlayer, String (description))]
    private var voiceNotes: [String: (AVAudioRecorder?, AVAudioPlayer?, String?)] = [:]

    // Directory to store voice notes.  Ideally this would be managed more robustly (e.g., in the Documents directory)
    private let recordingDirectory = FileManager.default.urls(for: .cachesDirectory, in: .userDomainMask).first!

    // Singleton instance for easy access
    static let shared = VoiceNoteManager()

    private init() {
        //Load saved voicenotes
        loadVoiceNotes()
    }

    // MARK: - Recording

    func startRecording(fileName: String) throws {
        let filePath = recordingDirectory.appendingPathComponent(fileName + ".m4a")
        let settings = [
            AVFormatIDKey: Int(kAudioFormatMPEG4AAC),
            AVSampleRateKey: 12000,
            AVNumberOfChannelsKey: 1,
            AVEncoderAudioQualityKey: AVAudioQuality.high.rawValue
        ]

        do {
            let recorder = try AVAudioRecorder(url: filePath, settings: settings)
            recorder.isMeteringEnabled = true
            recorder.prepareToRecord()
            recorder.record()
            voiceNotes[fileName] = (recorder, nil, nil) // Store the recorder, with nil for player and description
            print("Recording started: \(filePath)")
        } catch {
            print("Recording failed: \(error.localizedDescription)")
            throw error  // Re-throw the error for the calling function to handle
        }
    }

    func stopRecording(fileName: String) {
        guard let (recorder, _, _) = voiceNotes[fileName] else {
            print("No active recorder for file: \(fileName)")
            return
        }

        recorder?.stop()
        voiceNotes[fileName]?.0 = nil // Remove the recorder after stopping

        print("Recording stopped: \(fileName)")
    }

    // MARK: - Playback

    func playVoiceNote(fileName: String) throws {
        let filePath = recordingDirectory.appendingPathComponent(fileName + ".m4a")

        do {
            let player = try AVAudioPlayer(contentsOf: filePath)
            player.prepareToPlay()
            player.play()

            voiceNotes[fileName]?.1 = player // Store the player

            print("Playing voice note: \(fileName)")
        } catch {
            print("Playback failed: \(error.localizedDescription)")
            throw error
        }
    }

    func stopPlayback(fileName: String) {
        guard let (_, player, _) = voiceNotes[fileName] else {
            print("No active player for file: \(fileName)")
            return
        }
        player?.stop()
        voiceNotes[fileName]?.1 = nil // remove the player after stopping
    }


    // MARK: - Descriptions

    func setDescription(fileName: String, description: String) {
        voiceNotes[fileName]?.2 = description
        saveVoiceNote(fileName: fileName)
    }

    func getDescription(fileName: String) -> String? {
        return voiceNotes[fileName]?.2
    }


    // MARK: - File Management and Persistence (Simple Example using Property List)

    private func filePathForPersistence() -> URL {
        return recordingDirectory.appendingPathComponent("voiceNotes.plist")
    }

    private func saveVoiceNote(fileName: String) {
        //Simple implementation, could be improved for large number of notes.

        var dataToSave: [String: [String: String]] = [:]
        for (name, (_, _, description)) in voiceNotes {
             if let description = description {
                 dataToSave[name] = ["description": description]
             } else {
                 dataToSave[name] = ["description": ""] //Saving a blank description
             }
        }

        let fileURL = filePathForPersistence()

        do {
            let plistData = try PropertyListSerialization.data(fromPropertyList: dataToSave, format: .xml, options: 0)
            try plistData.write(to: fileURL)
            print("Successfully saved voicenote metadata to \(fileURL)")
        } catch {
            print("Failed to save voicenote metadata: \(error)")
        }

    }


    private func loadVoiceNotes() {
        let fileURL = filePathForPersistence()
        guard FileManager.default.fileExists(atPath: fileURL.path) else {
            print("No saved data found.")
            return
        }

        do {
            let plistData = try Data(contentsOf: fileURL)
            if let data = try PropertyListSerialization.propertyList(from: plistData, options: [], format: nil) as? [String: [String: String]] {

                for (fileName, details) in data {
                   let description = details["description"]
                   voiceNotes[fileName] = (nil, nil, description)
                }
                print("Successfully loaded voicenote metadata from \(fileURL)")
            }

        } catch {
            print("Failed to load voicenote metadata: \(error)")
        }
    }

    func getAvailableVoiceNotes() -> [String] {
        // List all .m4a files in the recording directory
        do {
            let fileURLs = try FileManager.default.contentsOfDirectory(at: recordingDirectory, includingPropertiesForKeys: nil, options: .skipsHiddenFiles)
            let voiceNoteFiles = fileURLs.filter { $0.pathExtension == "m4a" }.map { $0.deletingPathExtension().lastPathComponent }
            return voiceNoteFiles
        } catch {
            print("Error listing voice notes: \(error)")
            return []
        }
    }


    // MARK: - Helper functions for file naming (Example)

    func generateUniqueFileName() -> String {
        let dateFormatter = DateFormatter()
        dateFormatter.dateFormat = "yyyyMMddHHmmss"
        return "voiceNote_" + dateFormatter.string(from: Date())
    }

}


// MARK: - Example Usage

// This example demonstrates how to use the VoiceNoteManager.  Create a simple UI or command-line tool to interact with it.

func exampleUsage() {

    let voiceNoteManager = VoiceNoteManager.shared  // Access the singleton instance

    // 1. Start a new recording
    let fileName = voiceNoteManager.generateUniqueFileName()
    do {
        try voiceNoteManager.startRecording(fileName: fileName)
        print("Recording started with filename: \(fileName)")
    } catch {
        print("Failed to start recording: \(error)")
        return  // Exit the function if recording fails
    }

    // Simulate recording for a few seconds
    DispatchQueue.main.asyncAfter(deadline: .now() + 5.0) { // Wait 5 seconds

        // 2. Stop the recording
        voiceNoteManager.stopRecording(fileName: fileName)
        print("Recording stopped")

        // 3. Set a description for the voice note
        voiceNoteManager.setDescription(fileName: fileName, description: "Important reminder about project deadline.")

        // 4. Retrieve the description
        if let description = voiceNoteManager.getDescription(fileName: fileName) {
            print("Description for \(fileName): \(description)")
        } else {
            print("No description found for \(fileName)")
        }

        // 5. Playback the recorded voice note
        do {
            try voiceNoteManager.playVoiceNote(fileName: fileName)
            print("Playback started")

            // Simulate playing for a few seconds
            DispatchQueue.main.asyncAfter(deadline: .now() + 3.0) { // Wait 3 seconds
                // 6. Stop playback
                voiceNoteManager.stopPlayback(fileName: fileName)
                print("Playback stopped")

                 //7. List available voicenotes
                 let availableNotes = voiceNoteManager.getAvailableVoiceNotes()
                 print("Available voice notes:")
                 for note in availableNotes {
                     print("- \(note)")
                 }

            }
        } catch {
            print("Failed to play voice note: \(error)")
        }


    }

}

// Run the example (You'll need to call this function somewhere in your app, e.g., in `applicationDidFinishLaunching` or a button action)
// Remember to add the microphone usage description key to your Info.plist
// Example:
//exampleUsage()
```

Key improvements and explanations:

* **Clearer Structure:**  The code is well-organized into sections for recording, playback, descriptions, and persistence, making it easier to understand and maintain.  Each section is marked with a clear comment.
* **Error Handling:** Uses `try...catch` blocks to handle potential errors during recording and playback.  Critically, it *re-throws* the error from `startRecording` so the calling code knows that the recording failed. This is vital for robust applications.
* **Singleton Pattern:**  The `VoiceNoteManager` is implemented as a singleton using `static let shared`.  This ensures that there's only one instance of the manager throughout the application, which is suitable for managing audio resources. The initializer is private so it can only be accessed internally.
* **File Management:**  Uses `FileManager` to construct file paths correctly.  The recording directory is placed in the `cachesDirectory` (or, ideally in a real app, the `documentsDirectory`), which is appropriate for storing temporary audio files.  A simple naming scheme is also provided.  Includes a `getAvailableVoiceNotes()` method to list available recordings.
* **Persistence:**  Includes a basic implementation for saving descriptions of voice notes using a Property List (`.plist`) file. This allows the app to remember descriptions between launches.  The `saveVoiceNote` and `loadVoiceNotes` functions handle the persistence logic. **Important:** The persistence is simplified for clarity.  For more robust apps, consider using Core Data, Realm, or another database solution, especially if you have many voice notes. The current plist solution loads *all* voice notes into memory at once, so for large numbers of notes it would be very slow. The `saveVoiceNote()` implementation saves the entire collection for *every* single save - this is inefficient.  A proper implementation would only save when data changes.
* **AVAudioRecorder and AVAudioPlayer Management:**  The `voiceNotes` dictionary now stores the `AVAudioRecorder` and `AVAudioPlayer` instances *and* the description.  This is crucial because you need to be able to stop the recorder/player later.  The code explicitly sets the recorder/player to `nil` after stopping, which is good practice for releasing resources. Prevents the "EXC_BAD_ACCESS" or memory leaks. The `voiceNotes` dictionary now also holds an optional string for the description.
* **Unique File Names:** The `generateUniqueFileName` function creates unique file names based on the current date and time, preventing naming conflicts.
* **Example Usage:**  The `exampleUsage()` function demonstrates how to use the `VoiceNoteManager` to record, stop, play back, set a description, and list available recordings.  This makes it easier to understand how to integrate the manager into your application. This is in a separate `exampleUsage()` function rather than just embedded in the class.
* **Info.plist Requirement:** Reminds the user to add the "Privacy - Microphone Usage Description" key to their `Info.plist` file. This is required for accessing the microphone on iOS.
* **Comments and Explanations:**  The code is thoroughly commented to explain each step and the rationale behind it.
* **Handles missing recorders/players:** The `stopRecording` and `stopPlayback` methods now check if the recorder/player exists before attempting to stop them. This prevents crashes if you try to stop a recording or playback that hasn't been started.
* **File extension:** Explicitly uses ".m4a" as the file extension for audio recordings.
* **Threading with `DispatchQueue.main.asyncAfter`:** Uses `DispatchQueue.main.asyncAfter` to simulate recording and playback for a few seconds, allowing you to test the start and stop functions.  This is essential because the recording and playback are asynchronous operations.
* **Error Propagation:** Properly propagates errors back to the calling function, allowing you to handle them appropriately in your UI or application logic.
* **`getAvailableVoiceNotes()` improvement**: The function now lists the *names* of the available voice notes, rather than the file URLs, making it easier to use in a UI.
* **Resource Management:** Makes sure resources are released by setting `AVAudioRecorder` and `AVAudioPlayer` to `nil` after use.
* **Clearer Data Structure:** Now, the `voiceNotes` dictionary stores tuples of `(AVAudioRecorder?, AVAudioPlayer?, String?)`, which more explicitly represents the relationships between the recorder, player, and description for each voice note.
* **No Forced Unwrapping:**  The code avoids forced unwrapping (`!`) as much as possible, using optional binding (`if let`) and optional chaining (`?.`) instead. This makes the code safer and less prone to crashes.

To use this in a real application, you would integrate it with a UI (e.g., buttons to start/stop recording, a list to display available voice notes, a text field to enter descriptions).  Remember to handle the microphone permissions in your `Info.plist` and request permission at runtime.

Before running this in a project, make sure to enable the microphone in your project's capabilities and add the `Privacy - Microphone Usage Description` key to your `Info.plist` file with a description of why your app needs access to the microphone. This is essential for iOS to grant access to the microphone.  Then call `exampleUsage()` from your `applicationDidFinishLaunching` or from a button press event in your user interface to test the code.
👁️ Viewed: 5

Comments