web-dev-qa-db-ja.com

IOS:AVAudioSessionが機能しない

AVAudioSessionを使用しようとしていますが、次のランタイムエラーがスローされています:[avas] AVAudioSessionPortImpl.mm:56:ValidateRequiredFields:Unknown selected data source for Port Speaker (type Speaker)。それが助けになれば、私はただ音声を録音し、現在のデシベルを監視しようとしています。カテゴリをAVAudioSessionCategoryRecordに設定し、モードをAVAudioSessionModeMeasurementに設定します。コードは次のとおりです。

class ViewController: UIViewController {

    let captureSession = AVCaptureSession()
    var recording = false;
    var ready = false;

    let audioSession = AVAudioSession.sharedInstance()

    @IBOutlet public weak var dBLabel: UILabel!

    func alert(title: String, message: String = "", handler: ((UIAlertAction) -> Swift.Void)? = nil) -> Void {
        var usedMessage: String
        if(message.characters.count < 1) {
            usedMessage = title;
        } else {
            usedMessage = message;
        }
        let alert = UIAlertController(title: title, message: usedMessage, preferredStyle: .alert)
        alert.addAction(UIAlertAction(title: NSLocalizedString("OK", comment: "Default action"), style: .default, handler: handler))
        self.present(alert, animated: true, completion: nil)
    }

    override func viewDidLoad() {
        super.viewDidLoad()
    }

    func checkPermission()
    {
        switch AVCaptureDevice.authorizationStatus(forMediaType: AVMediaTypeAudio)
        {
        case .authorized:
            NSLog("Authorized for Microphone Use")

        case .notDetermined:
            AVCaptureDevice.requestAccess(forMediaType: AVMediaTypeAudio, completionHandler: { granted in
                self.checkPermission()
            })

        case .denied:
            let alert = UIAlertController(title: "Denied Access to Microphone", message: "You denied access to the microphone, please enable access in settings", preferredStyle: .alert)
            alert.addAction(UIAlertAction(title: "Go to Settings", style: .default, handler: { _ in
                guard let settingsUrl = URL(string: UIApplicationOpenSettingsURLString) else {
                    return
                }

                if UIApplication.shared.canOpenURL(settingsUrl) {
                    UIApplication.shared.open(settingsUrl, completionHandler: { (success) in
                    })
                }
            }))
            alert.addAction(UIAlertAction(title: NSLocalizedString("Cancel", comment: "Cancel"), style: .cancel, handler: nil))
            self.present(alert, animated: true, completion: nil)


        case .restricted:
            alert(title: "Restricted",message: "You cannot enable the microphone, so you cannot use the app", handler: { _ in
                NSLog("The \"OK\" alert occured.")
            })
        }
    }

    @IBAction func toggleRecord(_ sender: UIButton) {
        if(!ready)
        {return}

        NSLog("Toggled Recording")
        recording = !recording;
        if(recording)
        {
            sender.setImage(UIImage(named: "MicIconHighlighted.png"), for: .normal)
            sender.setImage(UIImage(named: "MicIconHighlightedSelected.png"), for: .highlighted)
            //captureSession.startRunning()
            do
            {
                try audioSession.setActive(true)
            } catch {
                NSLog("Activating AudioSession Failed")
            }
        } else {
            sender.setImage(UIImage(named: "MicIcon.png"), for: .normal)
            sender.setImage(UIImage(named: "MicIconSelected.png"), for: .highlighted)
            //captureSession.stopRunning()
            do
            {
                try audioSession.setActive(false)
            } catch {
                NSLog("Deactivating AudioSession Failed")
            }
        }
    }

    override func viewDidAppear(_ animated: Bool) {
        super.viewDidAppear(animated)
        checkPermission()
        do
        {
            try audioSession.setCategory(AVAudioSessionCategoryPlayAndRecord)
        } catch {
            NSLog("Setting category on AudioSession failed")
        }

        do
        {
            try audioSession.setMode(AVAudioSessionModeMeasurement)
        } catch {
            NSLog("Setting mode on AudioSession failed")
        }

        do
        {
            try audioSession.overrideOutputAudioPort(AVAudioSessionPortOverride.speaker)
        } catch {
            NSLog("Failed Setting Audio Output Data Source")
        }


        //NSLog("debug info: \(audioSession.outputDataSources!.count)");

        /*captureSession.beginConfiguration()
        let audioDeviceInput: AVCaptureDeviceInput
        let audioDevice = AVCaptureDevice.defaultDevice(withMediaType: AVMediaTypeAudio)
        if(audioDevice != nil && (audioDevice?.isConnected)!) {
            NSLog("Audio Device Name: \(audioDevice!.localizedName)")
        } else {
            NSLog("AVCapture Device default audio device failed or device not connected")
        }

        do {
            audioDeviceInput = try AVCaptureDeviceInput(device: audioDevice)
        } catch {
            alert(title: "Failed to create Capture Device",message: "Failed to create Capture Device", handler: nil)
            return
        }

        if(captureSession.canAddInput(audioDeviceInput))
        {
            captureSession.addInput(audioDeviceInput)
        } else {
            alert(title: "Failed to Add Input",message: "Failed to add Audio Input Device", handler: nil)
        }

        let audioOutput = AVCaptureAudioDataOutput()
        var audioRecorder = AVAudioRecorder()
        audioRecorder.
        var audioQueue = DispatchQueue(label: "audioqueue", attributes: .concurrent)
        audioOutput.setSampleBufferDelegate(AudioOutputSampleBufferDelegate(vc: self), queue: audioQueue)
        NSLog("Current Queue: \(audioOutput.sampleBufferCallbackQueue.description)")
        if(captureSession.canAddOutput(audioOutput))
        {
            captureSession.addOutput(audioOutput)
            captureSession.commitConfiguration()
        } else {
        alert(title: "Failed to Add Output",message: "Failed to add Audio Output Device", handler: nil)
        }*/

        ready = true
    }

    override func didReceiveMemoryWarning() {
        super.didReceiveMemoryWarning()
        // Dispose of any resources that can be recreated.
    }
}
18
Codekrafter

AVAudioSessionには、Swift 4.2

viewDidAppear()で、これを試してください:

    // Prepare Audio Session
    self.audioSession = AVAudioSession.sharedInstance()

    try audioSession.setCategory(AVAudioSession.Category.playAndRecord, mode: .measurement, options: .defaultToSpeaker)
    try audioSession.setActive(true, options: .notifyOthersOnDeactivation)

Xcode 10で言語をSwift 4.2に変換したときに機能しました...

編集、変換、現在のSwift構文...

5
Farini

ViewWillAppear()に貼り付けAVAudioSessionにはいくつかの変更があり、Swift 5.0の修正された構文var recordingSession:AVAudioSessionを宣言してください!

recordingSession = AVAudioSession.sharedInstance()
        do {
            try recordingSession.setCategory(.playAndRecord, mode: .spokenAudio, options: .defaultToSpeaker)
            try recordingSession.setActive(true, options: .notifyOthersOnDeactivation)
            recordingSession.requestRecordPermission() { [unowned self] allowed in
                DispatchQueue.main.async {
                    if allowed {
                        self.loadRecorder()
                    } else {
                        // failed to record!
                    }
                }
            }
        } catch {
            // failed to record!
        }`
0
Grigo