OOVoiceAIController.swift 12 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319
  1. //
  2. // OOVoiceAIController.swift
  3. // O2Platform
  4. //
  5. // Created by FancyLou on 2020/8/27.
  6. // Copyright © 2020年 zoneland. All rights reserved.
  7. //
  8. import UIKit
  9. import CocoaLumberjack
  10. import AVFoundation
  11. import Speech
  12. class OOVoiceAIController: UIViewController {
  13. @IBOutlet weak var voiceView: OOVoiceView!
  14. @IBOutlet weak var tipsLabel: UILabel!
  15. @IBOutlet weak var showLabel: UILabel!
  16. var closeVC = false
  17. var animation = false
  18. // 语音合成
  19. var synthersizer:AVSpeechSynthesizer!
  20. var voice: AVSpeechSynthesisVoice!
  21. //百度语音识别
  22. var bdmanager: BDSEventManager? = nil
  23. private lazy var viewModel: OOAIViewModel = {
  24. return OOAIViewModel()
  25. }()
  26. //MARK: - override
  27. override func viewDidLoad() {
  28. super.viewDidLoad()
  29. self.title = "语音助手"
  30. self.navigationItem.leftBarButtonItem = UIBarButtonItem(title: "关闭", style: .plain, target: self, action: #selector(closeWindow))
  31. SFSpeechRecognizer.requestAuthorization { (status) in
  32. if status != SFSpeechRecognizerAuthorizationStatus.authorized {
  33. DDLogError("错误,没有权限!!!!!!")
  34. }
  35. }
  36. viewModel.aiVoiceControllerDelegate = self
  37. initSpeak()
  38. configASR()
  39. }
  40. override func didReceiveMemoryWarning() {
  41. super.didReceiveMemoryWarning()
  42. // Dispose of any resources that can be recreated.
  43. }
  44. override func viewWillAppear(_ animated: Bool) {
  45. // 开始说话
  46. let people = O2AuthSDK.shared.myInfo()?.name ?? ""
  47. viewModel.speakMessage = "你好:\(people),需要我为您做些什么?"
  48. viewModel.activityStatus = .speak
  49. }
  50. override func viewWillDisappear(_ animated: Bool) {
  51. DDLogInfo("关闭语音助手.....viewWillDisappear.......")
  52. self.closeVC = true
  53. if animation {
  54. stopListen()
  55. }
  56. if synthersizer?.isSpeaking == true {
  57. synthersizer?.stopSpeaking(at: .immediate)
  58. }
  59. }
  60. @objc private func closeWindow() {
  61. self.navigationController?.popViewController(animated: true)
  62. }
  63. // MARK: - 语音识别
  64. ///初始化语音识别
  65. private func configASR() {
  66. self.bdmanager = BDSEventManager.createEventManager(withName: BDS_ASR_NAME)
  67. self.bdmanager?.setDelegate(self)
  68. //百度语音识别 appkey 和 secret
  69. self.bdmanager?.setParameter([BAIDU_ASR_APP_KEY, BAIDU_ASR_APP_SECRET], forKey: BDS_ASR_API_SECRET_KEYS)
  70. self.bdmanager?.setParameter(BAIDU_ASR_APP_ID, forKey: BDS_ASR_OFFLINE_APP_CODE)
  71. // 获取VAD模型的路径
  72. let path = Bundle.main.path(forResource: "bds_easr_basic_model", ofType: "dat")
  73. self.bdmanager?.setParameter(path, forKey: BDS_ASR_MODEL_VAD_DAT_FILE)
  74. self.bdmanager?.setParameter(true, forKey: BDS_ASR_ENABLE_MODEL_VAD)
  75. //关闭标点
  76. self.bdmanager?.setParameter(true, forKey: BDS_ASR_DISABLE_PUNCTUATION)
  77. }
  78. private func startListen() {
  79. guard !animation else {
  80. return
  81. }
  82. self.bdmanager?.sendCommand(BDS_ASR_CMD_START)
  83. self.showLabel.text = "等待命令中....."
  84. //根据当前状态 展现提示命令
  85. if self.viewModel.getAIStatus() == .normal {
  86. let taskCommand = AI_COMMAND_TASK.joined(separator: ",")
  87. let outCommand = AI_COMMAND_STOP.joined(separator: ",")
  88. self.tipsLabel.text = "可以使用如下命令:\(taskCommand) , \(outCommand)"
  89. }else if self.viewModel.getAIStatus() == .working {
  90. let ignoreCommand = AI_COMMAND_IGNORE.joined(separator: ",")
  91. let aiCommand = AI_COMMAND_TASK_NEURAL.joined(separator: ",")
  92. let outCommand = AI_COMMAND_STOP.joined(separator: ",")
  93. if let task = self.viewModel.getCurrentDealTask() {
  94. let routeList = task.routeNameList?.joined(separator: "或") ?? ""
  95. self.tipsLabel.text = "可以使用如下命令:\(routeList) , \(ignoreCommand), \(aiCommand)"
  96. }else {
  97. self.tipsLabel.text = "可以使用如下命令:\(outCommand)"
  98. }
  99. }else {
  100. self.tipsLabel.text = ""
  101. }
  102. animation = true
  103. voiceView.startAnimation()
  104. }
  105. private func stopListen() {
  106. guard animation else {
  107. return
  108. }
  109. self.tipsLabel.text = ""
  110. self.bdmanager?.sendCommand(BDS_ASR_CMD_STOP)
  111. animation = false
  112. voiceView.stopAnimation()
  113. }
  114. //MARK: - Speech Synthesizer
  115. //初始化语音合成
  116. private func initSpeak() {
  117. synthersizer = AVSpeechSynthesizer.init()
  118. synthersizer.delegate = self
  119. voice = AVSpeechSynthesisVoice(language: "zh_CN")
  120. }
  121. private func speak(txt: String) {
  122. DDLogInfo("speak:\(txt)")
  123. do {
  124. let audioSession = AVAudioSession.sharedInstance()
  125. try audioSession.setCategory(.ambient, mode: .default)
  126. }catch {
  127. DDLogError("异常:\(error)")
  128. }
  129. let utterance = AVSpeechUtterance(string: txt)
  130. utterance.voice = voice
  131. utterance.volume = 1
  132. synthersizer.speak(utterance)
  133. }
  134. }
  135. // MARK: - extension
  136. extension OOVoiceAIController: OOAIVoiceControllerDelegate {
  137. func changeSpeakMessage(message: String?) {
  138. self.showLabel.text = message
  139. }
  140. func changeActivityStatus(status: AIActivityStatus?) {
  141. switch status! {
  142. case .listen:
  143. DDLogInfo("开始听命令。。。。。。。。。。。。。。")
  144. DispatchQueue.main.async {
  145. self.startListen()
  146. }
  147. break
  148. case .speak:
  149. DDLogInfo("开始说话。。。。。。。。。。。。。。")
  150. if let message = viewModel.speakMessage {
  151. DispatchQueue.main.async {
  152. self.speak(txt: message)
  153. }
  154. }else {
  155. DDLogError("没有文字。 无法说话")
  156. }
  157. break
  158. }
  159. }
  160. func finishController() {
  161. DDLogInfo("关闭语音助手............")
  162. self.closeVC = true
  163. self.closeWindow()
  164. }
  165. }
  166. // 语音合成delegate
  167. extension OOVoiceAIController : AVSpeechSynthesizerDelegate {
  168. func speechSynthesizer(_ synthesizer: AVSpeechSynthesizer, didFinish utterance: AVSpeechUtterance) {
  169. DDLogInfo("speak finish ............")
  170. if !self.closeVC {
  171. viewModel.speakFinish()
  172. }
  173. }
  174. func speechSynthesizer(_ synthesizer: AVSpeechSynthesizer, didStart utterance: AVSpeechUtterance) {
  175. DDLogInfo("speak start .....")
  176. }
  177. func speechSynthesizer(_ synthesizer: AVSpeechSynthesizer, didCancel utterance: AVSpeechUtterance) {
  178. DDLogError("speak cancel.......")
  179. }
  180. func speechSynthesizer(_ synthesizer: AVSpeechSynthesizer, didPause utterance: AVSpeechUtterance) {
  181. DDLogError("speak pause.......")
  182. }
  183. func speechSynthesizer(_ synthesizer: AVSpeechSynthesizer, didContinue utterance: AVSpeechUtterance) {
  184. DDLogInfo("speak continue............")
  185. }
  186. }
  187. extension OOVoiceAIController: BDSClientASRDelegate {
  188. //EVoiceRecognitionClientWorkStatusStartWorkIng 识别工作开始,开始采集及处理数据 0
  189. //EVoiceRecognitionClientWorkStatusStart, // 检测到用户开始说话1
  190. // EVoiceRecognitionClientWorkStatusEnd, // 本地声音采集结束,等待识别结果返回并结束录音2
  191. // EVoiceRecognitionClientWorkStatusNewRecordData, // 录音数据回调3
  192. // EVoiceRecognitionClientWorkStatusFlushData, // 连续上屏4
  193. // EVoiceRecognitionClientWorkStatusFinish, // 语音识别功能完成,服务器返回正确结果5
  194. // EVoiceRecognitionClientWorkStatusMeterLevel, // 当前音量回调6
  195. // EVoiceRecognitionClientWorkStatusCancel, // 用户取消7
  196. // EVoiceRecognitionClientWorkStatusError, // 发生错误8
  197. // /* 离线引擎状态 */
  198. // EVoiceRecognitionClientWorkStatusLoaded, // 离线引擎加载完成9
  199. // EVoiceRecognitionClientWorkStatusUnLoaded, // 离线引擎卸载完成10
  200. // /* CHUNK状态 */
  201. // EVoiceRecognitionClientWorkStatusChunkThirdData, // CHUNK: 识别结果中的第三方数据11
  202. // EVoiceRecognitionClientWorkStatusChunkNlu, // CHUNK: 识别结果中的语义结果12
  203. // EVoiceRecognitionClientWorkStatusChunkEnd, // CHUNK: 识别过程结束13
  204. // /* LOG */
  205. // EVoiceRecognitionClientWorkStatusFeedback, // Feedback: 识别过程反馈的打点数据14
  206. // /* Only for iOS */
  207. // EVoiceRecognitionClientWorkStatusRecorderEnd, // 录音机关闭,页面跳转需检测此时间,规避状态条 (iOS) 15
  208. // /* LONG SPEECH END */
  209. // EVoiceRecognitionClientWorkStatusLongSpeechEnd // 长语音结束状态16
  210. func voiceRecognitionClientWorkStatus(_ workStatus: Int32, obj aObj: Any!) {
  211. switch workStatus {
  212. case 0: //EVoiceRecognitionClientWorkStatusStartWorkIng 识别工作开始,开始采集及处理数据
  213. DDLogInfo("开始识别。。。。。")
  214. break
  215. case 5: //EVoiceRecognitionClientWorkStatusFinish 语音识别功能完成,服务器返回正确结果
  216. if let resDic = aObj as? NSDictionary, let arr = resDic["results_recognition"] as? Array<String> {
  217. let first = arr[0]
  218. DDLogInfo("识别成功 返回结果 \(first)")
  219. let resultNoPunct = first.trimmingCharacters(in: .punctuationCharacters)
  220. DDLogInfo("去除标点符号后的结果 \(resultNoPunct)")
  221. DispatchQueue.main.async {
  222. if !self.closeVC {
  223. self.stopListen()
  224. self.viewModel.command = resultNoPunct
  225. }
  226. }
  227. }else {
  228. if !self.closeVC {
  229. self.stopListen()
  230. self.viewModel.command = nil
  231. }
  232. }
  233. break
  234. case 8://EVoiceRecognitionClientWorkStatusError, // 发生错误8
  235. let err = aObj as? Error
  236. DDLogError("err : \(String(describing: err?.localizedDescription))")
  237. DispatchQueue.main.async {
  238. if !self.closeVC {
  239. self.stopListen()
  240. self.viewModel.command = nil
  241. }
  242. }
  243. default:
  244. break
  245. }
  246. }
  247. }
  248. //
  249. //extension OOVoiceAIController: SFSpeechRecognitionTaskDelegate {
  250. // func speechRecognitionDidDetectSpeech(_ task: SFSpeechRecognitionTask) {
  251. // DDLogInfo("did detect speech。。。。。。。")
  252. // }
  253. // func speechRecognitionTaskWasCancelled(_ task: SFSpeechRecognitionTask) {
  254. // DDLogInfo("cancel recognizer....")
  255. // }
  256. // func speechRecognitionTask(_ task: SFSpeechRecognitionTask, didHypothesizeTranscription transcription: SFTranscription) {
  257. // DDLogInfo("hypothesize recognize .....")
  258. // let c = transcription.formattedString
  259. // DDLogInfo("选:\(c)")
  260. // let time = Int(Date().timeIntervalSince1970)
  261. // self.lastRecognizeTime = time
  262. // }
  263. // func speechRecognitionTaskFinishedReadingAudio(_ task: SFSpeechRecognitionTask) {
  264. // DDLogInfo("finish recognize end reading audio .....")
  265. // }
  266. // func speechRecognitionTask(_ task: SFSpeechRecognitionTask, didFinishRecognition recognitionResult: SFSpeechRecognitionResult) {
  267. // DDLogInfo("finish recognize result...........")
  268. // let best = recognitionResult.bestTranscription.formattedString
  269. // DDLogInfo("最佳:\(best)")
  270. // let removePunctuation = best.trimmingCharacters(in: CharacterSet.punctuationCharacters)
  271. // DDLogInfo("最佳去掉标点:\(removePunctuation)")
  272. // if !self.closeVC {
  273. // self.viewModel.command = removePunctuation
  274. // self.lastRecognizeTime = -1
  275. // }
  276. // }
  277. // func speechRecognitionTask(_ task: SFSpeechRecognitionTask, didFinishSuccessfully successfully: Bool) {
  278. // DDLogInfo("finish recognize task ....... \(successfully)")
  279. // if !successfully && !self.closeVC {
  280. // self.viewModel.command = nil
  281. // }
  282. // }
  283. //
  284. //}