KSMergeEnginePlayer.m 19 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560
  1. //
  2. // KSMergeEnginePlayer.m
  3. // MutiPlayDemo
  4. //
  5. // Created by 王智 on 2024/6/17.
  6. //
  7. #import "KSMergeEnginePlayer.h"
  8. #import <AVFoundation/AVFoundation.h>
  9. @interface KSMergeEnginePlayer ()
  10. /** 定时器 */
  11. @property (nonatomic, strong) NSTimer *timer;
  12. @property (nonatomic, strong) AVAudioEngine *audioEngine;
  13. @property (nonatomic, strong) AVAudioPlayerNode *nodePlayer;
  14. @property (nonatomic, strong) AVAudioFile *audioFile;
  15. @property (nonatomic, strong) AVAudioFormat *audioFormat;
  16. @property (nonatomic, strong) AVAudioFile *bgAudioFile;
  17. @property (nonatomic, strong) AVAudioFormat *bgAudioFormat;
  18. @property (nonatomic, assign) NSTimeInterval totalDuration;
  19. @property (nonatomic, assign) NSInteger offsetTime; // 延迟时间
  20. @property (nonatomic, assign) AVAudioFramePosition startPosition; // 开始位置
  21. @property (nonatomic, strong) dispatch_queue_t sourceQueue;
  22. @property (nonatomic, strong) AVAudioPCMBuffer *mixBuffer;
  23. @property (nonatomic, strong) AVAudioPCMBuffer *bgBuffer;
  24. @property (nonatomic, strong) AVAudioPCMBuffer *recordBuffer;
  25. @property (nonatomic, assign) AVAudioFramePosition currentFrame;
  26. @property (nonatomic, assign) double sampleRate;
  27. @property (nonatomic, assign) BOOL stopMix; // 是否停止mix
  28. @property (nonatomic, strong) dispatch_semaphore_t mixChangeSemaphore; // mix信号量
  29. @property (nonatomic, assign) BOOL stopChangeVolume; // 是否停止音量修改循环
  30. @property (nonatomic, strong) dispatch_semaphore_t volumeChangeSemaphore;
  31. @property (nonatomic, assign) BOOL isInterrupt; // 是否被打断
  32. @end
  33. @implementation KSMergeEnginePlayer
  34. - (instancetype)init {
  35. self = [super init];
  36. if (self) {
  37. [self configDefault];
  38. }
  39. return self;
  40. }
  41. - (void)configDefault {
  42. self.recordVolume = 1.0f;
  43. self.bgVolume = 1.0f;
  44. self.mixChangeSemaphore = dispatch_semaphore_create(1); // 初始化信号量
  45. self.volumeChangeSemaphore = dispatch_semaphore_create(1); // 初始化信号量,初始值为1
  46. }
  47. - (void)configEngine {
  48. [self setupAudioSession];
  49. self.audioEngine = [[AVAudioEngine alloc] init];
  50. self.nodePlayer = [[AVAudioPlayerNode alloc] init];
  51. // attach node
  52. [self.audioEngine attachNode:self.nodePlayer];
  53. }
  54. - (void)setupAudioSession {
  55. NSError *err = nil;
  56. AVAudioSession *audioSession = [AVAudioSession sharedInstance];
  57. @try {
  58. [audioSession setActive:YES error:&err];
  59. } @catch (NSException *exception) {
  60. } @finally {
  61. }
  62. [[NSNotificationCenter defaultCenter] addObserver:self selector:@selector(handleInterruption:) name:AVAudioSessionInterruptionNotification object:audioSession];
  63. [[NSNotificationCenter defaultCenter] addObserver:self selector:@selector(handleRouteChange:) name:AVAudioSessionRouteChangeNotification object:audioSession];
  64. }
  65. - (void)startEngine {
  66. // 启动engine
  67. NSError *error = nil;
  68. @try {
  69. [self.audioEngine startAndReturnError:&error];
  70. } @catch (NSException *exception) {
  71. NSLog(@"--------Exception: %@", exception);
  72. } @finally {
  73. if (error) {
  74. self.audioEngine = nil;
  75. // 错误回调
  76. [self sendInterruptError:error];
  77. }
  78. }
  79. }
  80. - (void)prepareNativeSongWithUrl:(NSURL *)recordAudioUrl bgMusic:(NSURL *)bgMusicUrl {
  81. [self loadAuidoFile:recordAudioUrl isBgm:NO];
  82. [self loadAuidoFile:bgMusicUrl isBgm:YES];
  83. self.sampleRate = self.audioFile.fileFormat.sampleRate;
  84. [self configEngine];
  85. AVAudioFormat *outputFormat = [self.audioEngine.mainMixerNode outputFormatForBus:0];
  86. [self.audioEngine connect:self.nodePlayer to:self.audioEngine.mainMixerNode format:outputFormat];
  87. [self startEngine];
  88. if (self.audioEngine && self.audioEngine.isRunning) {
  89. dispatch_async(self.sourceQueue, ^{
  90. [self prepareBufferFrame];
  91. });
  92. }
  93. }
  94. - (void)loadAuidoFile:(NSURL *)audioFileUrl isBgm:(BOOL)isBgm {
  95. dispatch_sync(self.sourceQueue, ^{
  96. NSError *error = nil;
  97. AVAudioFile *audioFile = nil;
  98. AVAudioFormat *audioFormat = nil;
  99. @try {
  100. audioFile = [[AVAudioFile alloc] initForReading:audioFileUrl error:&error];
  101. audioFormat = audioFile.processingFormat;
  102. } @catch (NSException *exception) {
  103. audioFile = nil;
  104. audioFormat = nil;
  105. } @finally {
  106. if (error) {
  107. // 错误回调
  108. }
  109. else { // 加载成功
  110. if (isBgm) {
  111. self.bgAudioFile = audioFile;
  112. self.bgAudioFormat = audioFormat;
  113. }
  114. else {
  115. self.audioFile = audioFile;
  116. self.audioFormat = audioFormat;
  117. }
  118. }
  119. }
  120. });
  121. }
  122. - (void)prepareBufferFrame {
  123. AVAudioFrameCount minFrameCount = (AVAudioFrameCount)MIN(self.bgAudioFile.length, self.audioFile.length);
  124. // mixBuffer
  125. AVAudioFormat *outputFormat = [[AVAudioFormat alloc] initStandardFormatWithSampleRate:self.bgAudioFormat.sampleRate channels:2];
  126. self.mixBuffer = [[AVAudioPCMBuffer alloc] initWithPCMFormat:outputFormat frameCapacity:minFrameCount];
  127. self.mixBuffer.frameLength = minFrameCount;
  128. self.bgBuffer = [self loadAudioSegment:self.bgAudioFile startFrame:0 frameCount:minFrameCount];
  129. self.recordBuffer = [self loadAudioSegment:self.audioFile startFrame:0 frameCount:minFrameCount];
  130. if (self.delegate && [self.delegate respondsToSelector:@selector(enginePlayerIsReadyPlay:)]) {
  131. self.isReady = YES;
  132. [self.delegate enginePlayerIsReadyPlay:self];
  133. }
  134. }
  135. - (AVAudioPCMBuffer *)loadAudioSegment:(AVAudioFile *)audioFile startFrame:(AVAudioFramePosition)startFrame frameCount:(AVAudioFrameCount)frameCount {
  136. AVAudioFormat *audioFromat = audioFile.processingFormat;
  137. AVAudioFrameCount frameToRead = (AVAudioFrameCount)MIN(frameCount, (AVAudioFrameCount)audioFile.length - startFrame);
  138. if (startFrame > audioFile.length) {
  139. return nil;
  140. }
  141. AVAudioPCMBuffer *buffer = [[AVAudioPCMBuffer alloc] initWithPCMFormat:audioFromat frameCapacity:frameToRead];
  142. buffer.frameLength = frameToRead;
  143. audioFile.framePosition = startFrame;
  144. if (frameToRead > 0) {
  145. @try {
  146. [audioFile readIntoBuffer:buffer frameCount:frameToRead error:nil];
  147. } @catch (NSException *exception) {
  148. } @finally {
  149. }
  150. }
  151. return buffer;
  152. }
  153. - (void)mixBuffers:(AVAudioPCMBuffer *)bgBuffer bgBufferVolume:(float)bgBufferVolume withRecordBuffer:(AVAudioPCMBuffer *)recordBuffer recordVolume:(float)recordVolume offset:(NSInteger)offsetTime startPosition:(AVAudioFrameCount)startPosition {
  154. if (!bgBuffer && !recordBuffer) {
  155. return;
  156. }
  157. NSLog(@"------- start");
  158. AVAudioFrameCount minFrameCount = MIN(bgBuffer.frameLength, recordBuffer.frameLength);
  159. AVAudioFrameCount offsetFrame = labs(offsetTime)/1000.0 * recordBuffer.format.sampleRate;
  160. float *bgLeftChannel = bgBuffer.floatChannelData[0];
  161. float *bgRightChannel = bgBuffer.floatChannelData[1];
  162. // 录音文件未单声道
  163. float *recordLeftChannel = recordBuffer.floatChannelData[0];
  164. float *mixLeftChannel = self.mixBuffer.floatChannelData[0];
  165. float *mixRightChannel = self.mixBuffer.floatChannelData[1];
  166. for (int frame = 0; frame < minFrameCount; frame++) {
  167. if (self.stopMix) {
  168. NSLog(@"------- stop mix");
  169. dispatch_semaphore_signal(self.mixChangeSemaphore); // 释放信号量
  170. return;
  171. }
  172. int bgFrame = frame+startPosition;
  173. float leftChannel = (bgFrame < bgBuffer.frameLength) ? bgLeftChannel[bgFrame] : 0;
  174. float rightChannel = (bgFrame < bgBuffer.frameLength) ? bgRightChannel[bgFrame] : 0;
  175. int recordFrame = (offsetTime < 0) ? (bgFrame - offsetFrame) : (bgFrame + offsetFrame);
  176. float recordData = (recordFrame >= 0 && recordFrame < recordBuffer.frameLength) ? recordLeftChannel[recordFrame] : 0;
  177. float mixLeftData = [self mixChannelData:leftChannel bgVolume:bgBufferVolume recordData:recordData recordVolume:recordVolume];
  178. float mixRightData = [self mixChannelData:rightChannel bgVolume:bgBufferVolume recordData:recordData recordVolume:recordVolume];
  179. mixLeftChannel[frame] = MAX(-1.0, MIN(1.0, mixLeftData));
  180. mixRightChannel[frame] = MAX(-1.0, MIN(1.0, mixRightData));
  181. }
  182. NSLog(@"---------finish");
  183. }
  184. - (float)mixChannelData:(float)bgData bgVolume:(float)bgVolume recordData:(float)recordData recordVolume:(float)recordVolume {
  185. return (bgData * bgVolume + recordData * recordVolume) / 2;
  186. }
  187. - (void)changeVolume:(float)bgVolume recordVolume:(float)recordVolume {
  188. NSLog(@"bg volume ---- %f, record volume ---- %f", bgVolume, recordVolume);
  189. self.bgVolume = bgVolume;
  190. self.recordVolume = recordVolume;
  191. if (self.bgBuffer && self.recordBuffer) {
  192. self.stopChangeVolume = YES;
  193. // 停止上一次修改音量
  194. dispatch_async(self.sourceQueue, ^{
  195. // 等待上一次的操作完成
  196. dispatch_semaphore_wait(self.volumeChangeSemaphore, DISPATCH_TIME_FOREVER);
  197. self.stopChangeVolume = NO;
  198. // 开始新的音量修改操作
  199. AVAudioFramePosition startFrame = self.currentFrame;
  200. NSLog(@"----- current frame -----%lld", startFrame);
  201. [self modifyMixBuffer:self.bgBuffer bgBufferVolume:bgVolume withRecordBuffer:self.recordBuffer recordVolume:recordVolume offset:self.offsetTime startPosition:startFrame tagIndex:0];
  202. // 释放信号量,标记音量修改操作完成
  203. dispatch_semaphore_signal(self.volumeChangeSemaphore);
  204. });
  205. }
  206. }
  207. - (void)modifyMixBuffer:(AVAudioPCMBuffer *)bgBuffer bgBufferVolume:(float)bgBufferVolume withRecordBuffer:(AVAudioPCMBuffer *)recordBuffer recordVolume:(float)recordVolume offset:(NSInteger)offsetTime startPosition:(AVAudioFramePosition)startFrame tagIndex:(NSInteger)tagIndex {
  208. AVAudioFrameCount minFrameCount = MIN(bgBuffer.frameLength, recordBuffer.frameLength);
  209. AVAudioFrameCount offsetFrame = labs(offsetTime)/1000.0 * recordBuffer.format.sampleRate;
  210. float *bgLeftChannel = bgBuffer.floatChannelData[0];
  211. float *bgRightChannel = bgBuffer.floatChannelData[1];
  212. // 录音文件未单声道
  213. float *recordLeftChannel = recordBuffer.floatChannelData[0];
  214. float *mixLeftChannel = self.mixBuffer.floatChannelData[0];
  215. float *mixRightChannel = self.mixBuffer.floatChannelData[1];
  216. // 先处理后续播放的buffer
  217. NSLog(@"------- volume change start");
  218. for (int frame = (int)startFrame; frame < minFrameCount; frame++) {
  219. if (self.stopChangeVolume) {
  220. NSLog(@"------- stop volume change");
  221. dispatch_semaphore_signal(self.volumeChangeSemaphore); // 释放信号量
  222. return;
  223. }
  224. float leftChannel = bgLeftChannel[frame];
  225. float rightChannel = bgRightChannel[frame];
  226. int recordFrame = (offsetTime < 0) ? (frame - offsetFrame) : (frame + offsetFrame);
  227. float recordData = (recordFrame >= 0 && recordFrame < recordBuffer.frameLength) ? recordLeftChannel[recordFrame] : 0;
  228. float mixLeftData = [self mixChannelData:leftChannel bgVolume:bgBufferVolume recordData:recordData recordVolume:recordVolume];
  229. float mixRightData = [self mixChannelData:rightChannel bgVolume:bgBufferVolume recordData:recordData recordVolume:recordVolume];
  230. mixLeftChannel[frame-self.startPosition] = MAX(-1.0, MIN(1.0, mixLeftData));
  231. mixRightChannel[frame-self.startPosition] = MAX(-1.0, MIN(1.0, mixRightData));
  232. }
  233. NSLog(@"------- volume change end");
  234. }
  235. - (void)scheduleBufferFromPosition:(AVAudioFramePosition)startPosition {
  236. self.stopMix = YES;
  237. self.startPosition = startPosition;
  238. dispatch_async(self.sourceQueue, ^{
  239. // 等待上一次的操作完成
  240. dispatch_semaphore_wait(self.mixChangeSemaphore, DISPATCH_TIME_FOREVER);
  241. self.stopMix = NO;
  242. [self mixBuffers:self.bgBuffer bgBufferVolume:self.bgVolume withRecordBuffer:self.recordBuffer recordVolume:self.recordVolume offset:self.offsetTime startPosition:(AVAudioFrameCount)startPosition];
  243. // 释放信号量,标记修改操作完成
  244. dispatch_semaphore_signal(self.mixChangeSemaphore);
  245. // 加载缓冲区
  246. [self.nodePlayer scheduleBuffer:self.mixBuffer atTime:nil options:AVAudioPlayerNodeBufferInterruptsAtLoop completionHandler:^{
  247. }];
  248. });
  249. }
  250. // 打断处理
  251. - (void)handleInterruption:(NSNotification *)notification {
  252. NSDictionary *info = notification.userInfo;
  253. AVAudioSessionInterruptionType type = [info[AVAudioSessionInterruptionTypeKey] unsignedIntegerValue];
  254. if (type == AVAudioSessionInterruptionTypeBegan) {
  255. //Handle InterruptionBegan
  256. if (self.isInterrupt == NO) {
  257. // 停止播放
  258. if (self.nodePlayer.isPlaying) {
  259. [self.nodePlayer stop];
  260. }
  261. self.isInterrupt = YES;
  262. NSError *error = [[NSError alloc] initWithDomain:NSCocoaErrorDomain code:99999 userInfo:@{@"errorDesc" : @"播放被打断"}];
  263. [self sendInterruptError:error];
  264. }
  265. }
  266. else if (type == AVAudioSessionInterruptionTypeEnded) {
  267. AVAudioSessionInterruptionOptions options = [info[AVAudioSessionInterruptionOptionKey] unsignedIntegerValue];
  268. if (options == AVAudioSessionInterruptionOptionShouldResume && self.isInterrupt) {
  269. //Handle Resume
  270. NSError *error = nil;
  271. [[AVAudioSession sharedInstance] setActive:YES error:&error];
  272. if (error) {
  273. NSLog(@"------ error desc %@", error.description);
  274. }
  275. NSLog(@"---- 播放恢复");
  276. self.isInterrupt = NO;
  277. }
  278. }
  279. }
  280. - (void)handleRouteChange:(NSNotification *)notification {
  281. NSDictionary *info = notification.userInfo;
  282. AVAudioSessionRouteChangeReason reason = [info[AVAudioSessionRouteChangeReasonKey] unsignedIntegerValue];
  283. if (reason == AVAudioSessionRouteChangeReasonOldDeviceUnavailable) {
  284. // 耳机拔出时暂停音频
  285. if (self.nodePlayer.isPlaying) {
  286. NSError *error = nil;
  287. [self sendInterruptError:error];
  288. }
  289. } else if (reason == AVAudioSessionRouteChangeReasonNewDeviceAvailable) {
  290. // 耳机插入时恢复音频
  291. if (self.nodePlayer.isPlaying) {
  292. NSError *error = nil;
  293. [self sendInterruptError:error];
  294. }
  295. }
  296. }
  297. - (void)sendInterruptError:(NSError *)error {
  298. if (self.delegate && [self.delegate respondsToSelector:@selector(enginePlayerDidError:error:)]) {
  299. [self.delegate enginePlayerDidError:self error:error];
  300. }
  301. }
  302. #pragma mark ------ play action
  303. - (void)changeRecordDelay:(NSInteger)delayMs {
  304. self.offsetTime = delayMs;
  305. }
  306. - (void)seekToTimePlay:(NSInteger)time {
  307. if (self.isInterrupt) {
  308. [self sendInterruptError:nil];
  309. return;
  310. }
  311. if (self.audioEngine.isRunning == NO) {
  312. [self startEngine];
  313. }
  314. if (self.audioEngine.isRunning) {
  315. [self seekAudioWithStartTime:time needPlay:YES];
  316. }
  317. }
  318. - (void)stopPlay {
  319. self.stopMix = YES;
  320. self.stopChangeVolume = YES;
  321. if (self.nodePlayer.isPlaying) {
  322. [self.nodePlayer stop];
  323. }
  324. [self stopTimer];
  325. }
  326. - (void)seekToTime:(NSInteger)time {
  327. if (self.isInterrupt) {
  328. [self sendInterruptError:nil];
  329. return;
  330. }
  331. if (self.audioEngine.isRunning == NO) {
  332. [self startEngine];
  333. }
  334. if (self.audioEngine.isRunning) {
  335. [self seekAudioWithStartTime:time needPlay:NO];
  336. }
  337. }
  338. - (void)seekAudioWithStartTime:(NSTimeInterval)startTime needPlay:(BOOL)needPlay {
  339. if (self.audioEngine.isRunning == NO) {
  340. [self startEngine];
  341. }
  342. if (self.audioEngine.isRunning) {
  343. if (self.nodePlayer.isPlaying) {
  344. [self.nodePlayer stop];
  345. }
  346. }
  347. // 停止修改音量循环
  348. self.stopChangeVolume = YES;
  349. AVAudioFramePosition startFrame = startTime / 1000.0 * self.audioFormat.sampleRate;
  350. // 跳转进度
  351. self.currentFrame = startFrame;
  352. [self scheduleBufferFromPosition:startFrame];
  353. if (needPlay) {
  354. [self.nodePlayer play];
  355. [self startTimer];
  356. }
  357. }
  358. // 调整偏移
  359. - (void)seekOffsetTime:(NSInteger)offsetTime {
  360. self.offsetTime = offsetTime;
  361. NSTimeInterval currentTime = [self getCurrentPlayTime];
  362. [self seekToTimePlay:currentTime];
  363. }
  364. - (void)freePlayer {
  365. if (self.nodePlayer.isPlaying) {
  366. [self stopPlay];
  367. }
  368. [self.audioEngine stop];
  369. }
  370. - (void)startTimer {
  371. [self.timer setFireDate:[NSDate distantPast]];
  372. }
  373. - (void)stopTimer {
  374. [self.timer setFireDate:[NSDate distantFuture]];//暂停计时器
  375. }
  376. #pragma mark ---- lazying
  377. - (dispatch_queue_t)sourceQueue {
  378. if (!_sourceQueue) {
  379. _sourceQueue = dispatch_queue_create("ks_MutilSourceQueue", DISPATCH_QUEUE_SERIAL);
  380. }
  381. return _sourceQueue;
  382. }
  383. - (NSTimer *)timer {
  384. if (!_timer) {
  385. __weak typeof(self)weakSelf = self;
  386. _timer = [NSTimer scheduledTimerWithTimeInterval:0.1 repeats:YES block:^(NSTimer * _Nonnull timer) {
  387. [weakSelf timeFunction];
  388. }];
  389. [[NSRunLoop currentRunLoop] addTimer:_timer forMode:NSRunLoopCommonModes];
  390. [_timer setFireDate:[NSDate distantFuture]];
  391. }
  392. return _timer;
  393. }
  394. - (void)timeFunction {
  395. self.totalDuration = [self getTotalTime];
  396. NSTimeInterval currentTime = [self getCurrentPlayTime];
  397. float progress = currentTime/self.totalDuration;
  398. NSDate *date = [NSDate date];
  399. NSTimeInterval inteveral = [date timeIntervalSince1970];
  400. if (currentTime > self.totalDuration) {
  401. if (self.delegate && [self.delegate respondsToSelector:@selector(enginePlayFinished:)]) {
  402. [self.delegate enginePlayFinished:self];
  403. }
  404. }
  405. else {
  406. if (self.delegate && [self.delegate respondsToSelector:@selector(updatePlayProgress:andTotalTime:andProgress:currentInterval:inPlayer:)]) {
  407. [self.delegate updatePlayProgress:currentTime andTotalTime:self.totalDuration andProgress:progress currentInterval:inteveral*1000 inPlayer:self];
  408. }
  409. }
  410. }
  411. - (NSTimeInterval)getCurrentPlayTime {
  412. AVAudioTime *nodeTime = [self.nodePlayer lastRenderTime];
  413. if (nodeTime && self.bgAudioFile) {
  414. AVAudioTime *playerTime = [self.nodePlayer playerTimeForNodeTime:nodeTime];
  415. AVAudioFramePosition currentFrame = self.currentFrame;
  416. if (playerTime) {
  417. self.sampleRate = [playerTime sampleRate];
  418. AVAudioFramePosition currentFrame = [playerTime sampleTime];
  419. if (currentFrame <= 0) {
  420. currentFrame = 0;
  421. }
  422. currentFrame += self.startPosition;
  423. self.currentFrame = currentFrame;
  424. }
  425. else {
  426. NSLog(@"播放已停止");
  427. }
  428. double elapsedSamples = (double)currentFrame;
  429. NSTimeInterval currentTime = elapsedSamples / self.sampleRate;
  430. // NSLog(@"当前时间----- %f",currentTime*1000);
  431. return currentTime*1000;
  432. }
  433. else {
  434. return 0;
  435. }
  436. }
  437. - (NSTimeInterval)getTotalTime {
  438. NSTimeInterval recordTotalDuration = (AVAudioFramePosition)self.audioFile.length * 1000.0 / self.audioFormat.sampleRate;
  439. return recordTotalDuration;
  440. }
  441. - (BOOL)isPlaying {
  442. if (self.nodePlayer) {
  443. return self.nodePlayer.isPlaying;
  444. }
  445. return NO;
  446. }
  447. @end