-
Notifications
You must be signed in to change notification settings - Fork 318
/
Copy pathRouteVoiceController.swift
307 lines (251 loc) · 12.9 KB
/
RouteVoiceController.swift
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
import Foundation
import AVFoundation
import MapboxDirections
import MapboxCoreNavigation
extension ErrorUserInfoKey {
static let spokenInstructionErrorCode = MBSpokenInstructionErrorCodeKey
}
extension NSAttributedString {
@available(iOS 10.0, *)
public func pronounced(_ pronunciation: String) -> NSAttributedString {
let phoneticWords = pronunciation.components(separatedBy: " ")
let phoneticString = NSMutableAttributedString()
for (word, phoneticWord) in zip(string.components(separatedBy: " "), phoneticWords) {
// AVSpeechSynthesizer doesn’t recognize some common IPA symbols.
let phoneticWord = phoneticWord.byReplacing([("ɡ", "g"), ("ɹ", "r")])
if phoneticString.length > 0 {
phoneticString.append(NSAttributedString(string: " "))
}
phoneticString.append(NSAttributedString(string: word, attributes: [
NSAttributedStringKey(rawValue: AVSpeechSynthesisIPANotationAttribute): phoneticWord
]))
}
return phoneticString
}
}
extension SpokenInstruction {
@available(iOS 10.0, *)
func attributedText(for legProgress: RouteLegProgress) -> NSAttributedString {
let attributedText = NSMutableAttributedString(string: text)
if let step = legProgress.upComingStep,
let name = step.names?.first,
let phoneticName = step.phoneticNames?.first {
let nameRange = attributedText.mutableString.range(of: name)
if (nameRange.location != NSNotFound) {
attributedText.replaceCharacters(in: nameRange, with: NSAttributedString(string: name).pronounced(phoneticName))
}
}
if let step = legProgress.followOnStep,
let name = step.names?.first,
let phoneticName = step.phoneticNames?.first {
let nameRange = attributedText.mutableString.range(of: name)
if (nameRange.location != NSNotFound) {
attributedText.replaceCharacters(in: nameRange, with: NSAttributedString(string: name).pronounced(phoneticName))
}
}
return attributedText
}
}
/**
The `RouteVoiceController` class provides voice guidance.
*/
@objc(MBRouteVoiceController)
open class RouteVoiceController: NSObject, AVSpeechSynthesizerDelegate, AVAudioPlayerDelegate {
lazy var speechSynth = AVSpeechSynthesizer()
var audioPlayer: AVAudioPlayer?
let audioQueue = DispatchQueue(label: Bundle.mapboxNavigation.bundleIdentifier! + ".audio")
/**
A boolean value indicating whether instructions should be announced by voice or not.
*/
@objc public var isEnabled: Bool = true
/**
Volume of announcements.
*/
@objc public var volume: Float = 1.0
/**
SSML option which controls at which speed Polly instructions are read.
*/
@objc public var instructionVoiceSpeedRate = 1.08
/**
SSML option that specifies the voice loudness.
*/
@objc public var instructionVoiceVolume = "default"
/**
If true, a noise indicating the user is going to be rerouted will play prior to rerouting.
*/
@objc public var playRerouteSound = true
/**
Sound to play prior to reroute. Inherits volume level from `volume`.
*/
@objc public var rerouteSoundPlayer: AVAudioPlayer = try! AVAudioPlayer(data: NSDataAsset(name: "reroute-sound", bundle: .mapboxNavigation)!.data, fileTypeHint: AVFileType.mp3.rawValue)
/**
Buffer time between announcements. After an announcement is given any announcement given within this `TimeInterval` will be suppressed.
*/
@objc public var bufferBetweenAnnouncements: TimeInterval = 3
/**
Delegate used for getting metadata information about a particular spoken instruction.
*/
public weak var voiceControllerDelegate: VoiceControllerDelegate?
var lastSpokenInstruction: SpokenInstruction?
var routeProgress: RouteProgress?
var volumeToken: NSKeyValueObservation?
var muteToken: NSKeyValueObservation?
/**
Default initializer for `RouteVoiceController`.
*/
override public init() {
super.init()
verifyBackgroundAudio()
speechSynth.delegate = self
rerouteSoundPlayer.delegate = self
resumeNotifications()
}
private func verifyBackgroundAudio() {
guard UIApplication.shared.isKind(of: UIApplication.self) else {
return
}
if !Bundle.main.backgroundModes.contains("audio") {
assert(false, "This application’s Info.plist file must include “audio” in UIBackgroundModes. This background mode is used for spoken instructions while the application is in the background.")
}
}
deinit {
suspendNotifications()
speechSynth.stopSpeaking(at: .immediate)
}
func resumeNotifications() {
NotificationCenter.default.addObserver(self, selector: #selector(didPassSpokenInstructionPoint(notification:)), name: .routeControllerDidPassSpokenInstructionPoint, object: nil)
NotificationCenter.default.addObserver(self, selector: #selector(pauseSpeechAndPlayReroutingDing(notification:)), name: .routeControllerWillReroute, object: nil)
NotificationCenter.default.addObserver(self, selector: #selector(didReroute(notification:)), name: .routeControllerDidReroute, object: nil)
volumeToken = NavigationSettings.shared.observe(\.voiceVolume) { [weak self] (settings, change) in
self?.audioPlayer?.volume = settings.voiceVolume
}
muteToken = NavigationSettings.shared.observe(\.voiceMuted) { [weak self] (settings, change) in
if settings.voiceMuted {
self?.audioPlayer?.stop()
self?.speechSynth.stopSpeaking(at: .immediate)
}
}
}
func suspendNotifications() {
NotificationCenter.default.removeObserver(self, name: .routeControllerDidPassSpokenInstructionPoint, object: nil)
NotificationCenter.default.removeObserver(self, name: .routeControllerWillReroute, object: nil)
NotificationCenter.default.removeObserver(self, name: .routeControllerDidReroute, object: nil)
}
@objc func didReroute(notification: NSNotification) {
// Play reroute sound when a faster route is found
if notification.userInfo?[RouteControllerNotificationUserInfoKey.isProactiveKey] as! Bool {
pauseSpeechAndPlayReroutingDing(notification: notification)
}
}
@objc func pauseSpeechAndPlayReroutingDing(notification: NSNotification) {
speechSynth.stopSpeaking(at: .word)
guard playRerouteSound && !NavigationSettings.shared.voiceMuted else {
return
}
do {
try mixAudio()
} catch {
voiceControllerDelegate?.voiceController?(self, spokenInstructionsDidFailWith: error)
}
rerouteSoundPlayer.volume = volume
rerouteSoundPlayer.play()
}
@objc public func audioPlayerDidFinishPlaying(_ player: AVAudioPlayer, successfully flag: Bool) {
do {
try unDuckAudio()
} catch {
voiceControllerDelegate?.voiceController?(self, spokenInstructionsDidFailWith: error)
}
}
@objc public func speechSynthesizer(_ synthesizer: AVSpeechSynthesizer, didFinish utterance: AVSpeechUtterance) {
do {
try unDuckAudio()
} catch {
voiceControllerDelegate?.voiceController?(self, spokenInstructionsDidFailWith: error)
}
}
func duckAudio() throws {
let categoryOptions: AVAudioSessionCategoryOptions = [.duckOthers, .interruptSpokenAudioAndMixWithOthers]
try AVAudioSession.sharedInstance().setMode(AVAudioSessionModeSpokenAudio)
try AVAudioSession.sharedInstance().setCategory(AVAudioSessionCategoryPlayback, with: categoryOptions)
try AVAudioSession.sharedInstance().setActive(true)
}
func mixAudio() throws {
try AVAudioSession.sharedInstance().setCategory(AVAudioSessionCategoryAmbient)
try AVAudioSession.sharedInstance().setActive(true)
}
func unDuckAudio() throws {
try AVAudioSession.sharedInstance().setActive(false, with: [.notifyOthersOnDeactivation])
}
@objc open func didPassSpokenInstructionPoint(notification: NSNotification) {
guard !NavigationSettings.shared.voiceMuted else { return }
routeProgress = notification.userInfo![RouteControllerNotificationUserInfoKey.routeProgressKey] as? RouteProgress
assert(routeProgress != nil, "routeProgress should not be nil.")
guard let instruction = routeProgress!.currentLegProgress.currentStepProgress.currentSpokenInstruction else { return }
lastSpokenInstruction = instruction
speak(instruction)
}
/**
Reads aloud the given instruction.
- parameter instruction: The instruction to read aloud.
*/
open func speak(_ instruction: SpokenInstruction) {
assert(routeProgress != nil, "routeProgress should not be nil.")
if speechSynth.isSpeaking, let lastSpokenInstruction = lastSpokenInstruction {
voiceControllerDelegate?.voiceController?(self, didInterrupt: lastSpokenInstruction, with: instruction)
}
do {
try duckAudio()
} catch {
voiceControllerDelegate?.voiceController?(self, spokenInstructionsDidFailWith: error)
}
var utterance: AVSpeechUtterance?
if Locale.preferredLocalLanguageCountryCode == "en-US" {
// Alex can’t handle attributed text.
utterance = AVSpeechUtterance(string: instruction.text)
utterance!.voice = AVSpeechSynthesisVoice(identifier: AVSpeechSynthesisVoiceIdentifierAlex)
}
let modifiedInstruction = voiceControllerDelegate?.voiceController?(self, willSpeak: instruction, routeProgress: routeProgress!) ?? instruction
if #available(iOS 10.0, *), utterance?.voice == nil {
utterance = AVSpeechUtterance(attributedString: modifiedInstruction.attributedText(for: routeProgress!.currentLegProgress))
} else {
utterance = AVSpeechUtterance(string: modifiedInstruction.text)
}
// Only localized languages will have a proper fallback voice
if utterance?.voice == nil {
utterance?.voice = AVSpeechSynthesisVoice(language: Locale.preferredLocalLanguageCountryCode)
}
utterance?.volume = volume
if let utterance = utterance {
speechSynth.speak(utterance)
}
}
}
/**
The `VoiceControllerDelegate` protocol defines methods that allow an object to respond to significant events related to spoken instructions.
*/
@objc(MBVoiceControllerDelegate)
public protocol VoiceControllerDelegate {
/**
Called when the voice controller failed to speak an instruction.
- parameter voiceController: The voice controller that experienced the failure.
- parameter error: An error explaining the failure and its cause. The `MBSpokenInstructionErrorCodeKey` key of the error’s user info dictionary is a `SpokenInstructionErrorCode` indicating the cause of the failure.
*/
@objc(voiceController:spokenInstrucionsDidFailWithError:)
optional func voiceController(_ voiceController: RouteVoiceController, spokenInstructionsDidFailWith error: Error)
/**
Called when one spoken instruction interrupts another instruction currently being spoken.
- parameter voiceController: The voice controller that experienced the interruption.
- parameter interruptedInstruction: The spoken instruction currently in progress that has been interrupted.
- parameter interruptingInstruction: The spoken instruction that is interrupting the current instruction.
*/
@objc(voiceController:didInterruptSpokenInstruction:withInstruction:)
optional func voiceController(_ voiceController: RouteVoiceController, didInterrupt interruptedInstruction: SpokenInstruction, with interruptingInstruction: SpokenInstruction)
/** Called when a spoken is about to speak. Useful if it is necessary to give a custom instruction instead. Noting, changing the `distanceAlongStep` property on `SpokenInstruction` will have no impact on when the instruction will be said.
- parameter voiceController: The voice controller that will speak an instruction.
- parameter instruction: The spoken instruction that will be said.
- parameter routeProgress: The `RouteProgress` just before when the instruction is scheduled to be spoken.
**/
@objc(voiceController:willSpeakSpokenInstruction:routeProgress:)
optional func voiceController(_ voiceController: RouteVoiceController, willSpeak instruction: SpokenInstruction, routeProgress: RouteProgress) -> SpokenInstruction?
}