1. Use CocoaPods to include the AudioKit framework by add this line to the Podfile:
pod 'AudioKit'
*************** Updated October 4, 2020 for AudioKit 4 *****************
Type this terminal command:
pod install
*************************** Update 2020 End *****************************
2. Enable the microphone by adding NSMicrophoneUsageDescription and a request string such as "This app needs microphone access." to Info.plist.
Your Info.plist should be like this:
Without this modification, you'll see error like this:
This app has crashed because it attempted to access privacy-sensitive data without a usage description. The app's Info.plist must contain an NSMicrophoneUsageDescription key with a string value explaining to the user how the app uses this data.
3. Modify ViewController.swift as below
import UIKit
import AudioKit
class ViewController: UIViewController {
var labelFrequencyValue : UILabel!
var labelAmplitudeValue : UILabel!
var labelSharpValue : UILabel!
var labelFlatValue : UILabel!
let mic = AKMicrophone()
var tracker : AKFrequencyTracker!
override func viewDidLoad() {
super.viewDidLoad()
let labelSing = UILabel(frame: CGRect(x: 0, y: 28, width: view.frame.width, height: 50))
labelSing.text = "Sing into the Microphone"
labelSing.textAlignment = .center
labelSing.font = UIFont.systemFont(ofSize: 24, weight: UIFontWeightBold) //System Font Bold
labelSing.textColor = UIColor.white
labelSing.backgroundColor = UIColor(red: 2/255, green: 181/255, blue: 31/255, alpha: 1.0)
view.addSubview(labelSing)
let labelFrequency = UILabel(frame: CGRect(x: 16, y: 86, width: 85.5, height: 20.5))
labelFrequency.text = "Frequency:"
view.addSubview(labelFrequency)
labelFrequencyValue = UILabel(frame: CGRect(x: view.frame.width-70, y: 86, width: 50, height: 20.5))
labelFrequencyValue.text = "0"
labelFrequencyValue.textAlignment = .right
view.addSubview(labelFrequencyValue)
let labelAmplitude = UILabel(frame: CGRect(x: 16, y: 114.5, width: 85.5, height: 20.5))
labelAmplitude.text = "Amplitude:"
view.addSubview(labelAmplitude)
labelAmplitudeValue = UILabel(frame: CGRect(x: view.frame.width-70, y: 114.5, width: 50, height: 20.5))
labelAmplitudeValue.text = "0"
labelAmplitudeValue.textAlignment = .right
view.addSubview(labelAmplitudeValue)
let labelSharp = UILabel(frame: CGRect(x: 16, y: 142, width: 111.5, height: 20.5))
labelSharp.text = "Note (Sharps):"
view.addSubview(labelSharp)
labelSharpValue = UILabel(frame: CGRect(x: view.frame.width-70, y: 142, width: 50, height: 20.5))
labelSharpValue.text = "C4"
labelSharpValue.textAlignment = .right
view.addSubview(labelSharpValue)
let labelFlat = UILabel(frame: CGRect(x: 16, y: 170.5, width: 94, height: 20.5))
labelFlat.text = "Note (Flats):"
view.addSubview(labelFlat)
labelFlatValue = UILabel(frame: CGRect(x: view.frame.width-70, y: 170.5, width: 50, height: 20.5))
labelFlatValue.text = "F4"
labelFlatValue.textAlignment = .right
view.addSubview(labelFlatValue)
let labelPlot = UILabel(frame: CGRect(x: 0, y: 199, width: view.frame.width, height: 21.5))
labelPlot.text = "Audio Input Plot"
labelPlot.textAlignment = .center
view.addSubview(labelPlot)
tracker = AKFrequencyTracker.init(mic)
let silence = AKBooster(tracker, gain: 0)
AudioKit.output = silence
AudioKit.start()
setupPlot()
Timer.scheduledTimer(timeInterval: 0.1, target: self, selector: #selector(updateUI), userInfo: nil, repeats: true)
}
override func didReceiveMemoryWarning() {
super.didReceiveMemoryWarning()
}
func setupPlot() {
let audioInputPlot = EZAudioPlot(frame: CGRect(x: 0, y: view.center.y, width: view.frame.width, height: 200))
let plot = AKNodeOutputPlot(mic, frame: audioInputPlot.bounds)
plot.plotType = .rolling
plot.shouldFill = true
plot.shouldMirror = true
plot.color = UIColor.blue
audioInputPlot.addSubview(plot)
view.addSubview(audioInputPlot)
}
func updateUI() {
let noteFrequencies = [16.35,17.32,18.35,19.45,20.6,21.83,23.12,24.5,25.96,27.5,29.14,30.87]
let noteNamesWithSharps = ["C", "C♯","D","D♯","E","F","F♯","G","G♯","A","A♯","B"]
let noteNamesWithFlats = ["C", "D♭","D","E♭","E","F","G♭","G","A♭","A","B♭","B"]
if tracker.amplitude > 0.1 {
labelFrequencyValue.text = String(format: "%0.1f", tracker.frequency)
var frequency = Float(tracker.frequency)
while (frequency > Float(noteFrequencies[noteFrequencies.count-1])){
frequency = frequency / 2.0
}
while (frequency < Float(noteFrequencies[0])) {
frequency = frequency * 2.0
}
var minDistance : Float = 10000.0
var index = 0
for i in 0..<noteFrequencies.count {
let distance = fabsf(Float(noteFrequencies[i]) - frequency)
if (distance < minDistance) {
index = i
minDistance = distance
}
}
let octave = Int(log2f(Float(tracker.frequency) / frequency))
labelSharpValue.text = "\(noteNamesWithSharps[index])\(octave)"
labelFlatValue.text = "\(noteNamesWithFlats[index])\(octave)"
}
labelAmplitudeValue.text = String(format: "%0.2f", tracker.amplitude)
}
}
4. Result:
AudioKit
Beethoven (Pitch Detection)
No comments:
Post a Comment