Home Reference Source

src/controller/audio-stream-controller.js

  1. /*
  2. * Audio Stream Controller
  3. */
  4.  
  5. import BinarySearch from '../utils/binary-search';
  6. import { BufferHelper } from '../utils/buffer-helper';
  7. import Demuxer from '../demux/demuxer';
  8. import Event from '../events';
  9. import * as LevelHelper from './level-helper';
  10. import TimeRanges from '../utils/time-ranges';
  11. import { ErrorTypes, ErrorDetails } from '../errors';
  12. import { logger } from '../utils/logger';
  13. import { findFragWithCC } from '../utils/discontinuities';
  14. import { FragmentState } from './fragment-tracker';
  15. import { ElementaryStreamTypes } from '../loader/fragment';
  16. import BaseStreamController, { State } from './base-stream-controller';
  17. import { MAX_START_GAP_JUMP } from './gap-controller';
  18. const { performance } = window;
  19.  
  20. const TICK_INTERVAL = 100; // how often to tick in ms
  21.  
  22. class AudioStreamController extends BaseStreamController {
  23. constructor (hls, fragmentTracker) {
  24. super(hls,
  25. Event.MEDIA_ATTACHED,
  26. Event.MEDIA_DETACHING,
  27. Event.AUDIO_TRACKS_UPDATED,
  28. Event.AUDIO_TRACK_SWITCHING,
  29. Event.AUDIO_TRACK_LOADED,
  30. Event.KEY_LOADED,
  31. Event.FRAG_LOADED,
  32. Event.FRAG_PARSING_INIT_SEGMENT,
  33. Event.FRAG_PARSING_DATA,
  34. Event.FRAG_PARSED,
  35. Event.ERROR,
  36. Event.BUFFER_RESET,
  37. Event.BUFFER_CREATED,
  38. Event.BUFFER_APPENDED,
  39. Event.BUFFER_FLUSHED,
  40. Event.INIT_PTS_FOUND);
  41. this.fragmentTracker = fragmentTracker;
  42. this.config = hls.config;
  43. this.audioCodecSwap = false;
  44. this._state = State.STOPPED;
  45. this.initPTS = [];
  46. this.waitingFragment = null;
  47. this.videoTrackCC = null;
  48. }
  49.  
  50. // Signal that video PTS was found
  51. onInitPtsFound (data) {
  52. let demuxerId = data.id, cc = data.frag.cc, initPTS = data.initPTS;
  53. if (demuxerId === 'main') {
  54. // Always update the new INIT PTS
  55. // Can change due level switch
  56. this.initPTS[cc] = initPTS;
  57. this.videoTrackCC = cc;
  58. logger.log(`InitPTS for cc: ${cc} found from video track: ${initPTS}`);
  59.  
  60. // If we are waiting we need to demux/remux the waiting frag
  61. // With the new initPTS
  62. if (this.state === State.WAITING_INIT_PTS) {
  63. this.tick();
  64. }
  65. }
  66. }
  67.  
  68. startLoad (startPosition) {
  69. if (this.tracks) {
  70. let lastCurrentTime = this.lastCurrentTime;
  71. this.stopLoad();
  72. this.setInterval(TICK_INTERVAL);
  73. this.fragLoadError = 0;
  74. if (lastCurrentTime > 0 && startPosition === -1) {
  75. logger.log(`audio:override startPosition with lastCurrentTime @${lastCurrentTime.toFixed(3)}`);
  76. this.state = State.IDLE;
  77. } else {
  78. this.lastCurrentTime = this.startPosition ? this.startPosition : startPosition;
  79. this.state = State.STARTING;
  80. }
  81. this.nextLoadPosition = this.startPosition = this.lastCurrentTime;
  82. this.tick();
  83. } else {
  84. this.startPosition = startPosition;
  85. this.state = State.STOPPED;
  86. }
  87. }
  88.  
  89. set state (nextState) {
  90. if (this.state !== nextState) {
  91. const previousState = this.state;
  92. this._state = nextState;
  93. logger.log(`audio stream:${previousState}->${nextState}`);
  94. }
  95. }
  96.  
  97. get state () {
  98. return this._state;
  99. }
  100.  
  101. doTick () {
  102. let pos, track, trackDetails, hls = this.hls, config = hls.config;
  103. // logger.log('audioStream:' + this.state);
  104. switch (this.state) {
  105. case State.ERROR:
  106. // don't do anything in error state to avoid breaking further ...
  107. case State.PAUSED:
  108. // don't do anything in paused state either ...
  109. case State.BUFFER_FLUSHING:
  110. break;
  111. case State.STARTING:
  112. this.state = State.WAITING_TRACK;
  113. this.loadedmetadata = false;
  114. break;
  115. case State.IDLE:
  116. const tracks = this.tracks;
  117. // audio tracks not received => exit loop
  118. if (!tracks) {
  119. break;
  120. }
  121.  
  122. // if video not attached AND
  123. // start fragment already requested OR start frag prefetch disable
  124. // exit loop
  125. // => if media not attached but start frag prefetch is enabled and start frag not requested yet, we will not exit loop
  126. if (!this.media &&
  127. (this.startFragRequested || !config.startFragPrefetch)) {
  128. break;
  129. }
  130.  
  131. // determine next candidate fragment to be loaded, based on current position and
  132. // end of buffer position
  133. // if we have not yet loaded any fragment, start loading from start position
  134. if (this.loadedmetadata) {
  135. pos = this.media.currentTime;
  136. } else {
  137. pos = this.nextLoadPosition;
  138. if (pos === undefined) {
  139. break;
  140. }
  141. }
  142. let media = this.mediaBuffer ? this.mediaBuffer : this.media;
  143. const videoBuffer = this.videoBuffer ? this.videoBuffer : this.media;
  144. const maxBufferHole = pos < config.maxBufferHole ? Math.max(MAX_START_GAP_JUMP, config.maxBufferHole) : config.maxBufferHole;
  145. const bufferInfo = BufferHelper.bufferInfo(media, pos, maxBufferHole);
  146. const mainBufferInfo = BufferHelper.bufferInfo(videoBuffer, pos, maxBufferHole);
  147. const bufferLen = bufferInfo.len;
  148. let bufferEnd = bufferInfo.end;
  149. const fragPrevious = this.fragPrevious;
  150. // ensure we buffer at least config.maxBufferLength (default 30s) or config.maxMaxBufferLength (default: 600s)
  151. // whichever is smaller.
  152. // once we reach that threshold, don't buffer more than video (mainBufferInfo.len)
  153. const maxConfigBuffer = Math.min(config.maxBufferLength, config.maxMaxBufferLength);
  154. const maxBufLen = Math.max(maxConfigBuffer, mainBufferInfo.len);
  155. const audioSwitch = this.audioSwitch;
  156. const trackId = this.trackId;
  157.  
  158. // if buffer length is less than maxBufLen try to load a new fragment
  159. if ((bufferLen < maxBufLen || audioSwitch) && trackId < tracks.length) {
  160. trackDetails = tracks[trackId].details;
  161. // if track info not retrieved yet, switch state and wait for track retrieval
  162. if (typeof trackDetails === 'undefined') {
  163. this.state = State.WAITING_TRACK;
  164. break;
  165. }
  166.  
  167. if (!audioSwitch && this._streamEnded(bufferInfo, trackDetails)) {
  168. this.hls.trigger(Event.BUFFER_EOS, { type: 'audio' });
  169. this.state = State.ENDED;
  170. return;
  171. }
  172.  
  173. // find fragment index, contiguous with end of buffer position
  174. let fragments = trackDetails.fragments,
  175. fragLen = fragments.length,
  176. start = fragments[0].start,
  177. end = fragments[fragLen - 1].start + fragments[fragLen - 1].duration,
  178. frag;
  179.  
  180. // When switching audio track, reload audio as close as possible to currentTime
  181. if (audioSwitch) {
  182. if (trackDetails.live && !trackDetails.PTSKnown) {
  183. logger.log('switching audiotrack, live stream, unknown PTS,load first fragment');
  184. bufferEnd = 0;
  185. } else {
  186. bufferEnd = pos;
  187. // if currentTime (pos) is less than alt audio playlist start time, it means that alt audio is ahead of currentTime
  188. if (trackDetails.PTSKnown && pos < start) {
  189. // if everything is buffered from pos to start or if audio buffer upfront, let's seek to start
  190. if (bufferInfo.end > start || bufferInfo.nextStart) {
  191. logger.log('alt audio track ahead of main track, seek to start of alt audio track');
  192. this.media.currentTime = start + 0.05;
  193. } else {
  194. return;
  195. }
  196. }
  197. }
  198. }
  199. if (trackDetails.initSegment && !trackDetails.initSegment.data) {
  200. frag = trackDetails.initSegment;
  201. } // eslint-disable-line brace-style
  202. // if bufferEnd before start of playlist, load first fragment
  203. else if (bufferEnd <= start) {
  204. frag = fragments[0];
  205. if (this.videoTrackCC !== null && frag.cc !== this.videoTrackCC) {
  206. // Ensure we find a fragment which matches the continuity of the video track
  207. frag = findFragWithCC(fragments, this.videoTrackCC);
  208. }
  209. if (trackDetails.live && frag.loadIdx && frag.loadIdx === this.fragLoadIdx) {
  210. // we just loaded this first fragment, and we are still lagging behind the start of the live playlist
  211. // let's force seek to start
  212. const nextBuffered = bufferInfo.nextStart ? bufferInfo.nextStart : start;
  213. logger.log(`no alt audio available @currentTime:${this.media.currentTime}, seeking @${nextBuffered + 0.05}`);
  214. this.media.currentTime = nextBuffered + 0.05;
  215. return;
  216. }
  217. } else {
  218. let foundFrag;
  219. let maxFragLookUpTolerance = config.maxFragLookUpTolerance;
  220. const fragNext = fragPrevious ? fragments[fragPrevious.sn - fragments[0].sn + 1] : undefined;
  221. let fragmentWithinToleranceTest = (candidate) => {
  222. // offset should be within fragment boundary - config.maxFragLookUpTolerance
  223. // this is to cope with situations like
  224. // bufferEnd = 9.991
  225. // frag[Ø] : [0,10]
  226. // frag[1] : [10,20]
  227. // bufferEnd is within frag[0] range ... although what we are expecting is to return frag[1] here
  228. // frag start frag start+duration
  229. // |-----------------------------|
  230. // <---> <--->
  231. // ...--------><-----------------------------><---------....
  232. // previous frag matching fragment next frag
  233. // return -1 return 0 return 1
  234. // logger.log(`level/sn/start/end/bufEnd:${level}/${candidate.sn}/${candidate.start}/${(candidate.start+candidate.duration)}/${bufferEnd}`);
  235. // Set the lookup tolerance to be small enough to detect the current segment - ensures we don't skip over very small segments
  236. let candidateLookupTolerance = Math.min(maxFragLookUpTolerance, candidate.duration);
  237. if ((candidate.start + candidate.duration - candidateLookupTolerance) <= bufferEnd) {
  238. return 1;
  239. } else if (candidate.start - candidateLookupTolerance > bufferEnd && candidate.start) {
  240. // if maxFragLookUpTolerance will have negative value then don't return -1 for first element
  241. return -1;
  242. }
  243.  
  244. return 0;
  245. };
  246.  
  247. if (bufferEnd < end) {
  248. if (bufferEnd > end - maxFragLookUpTolerance) {
  249. maxFragLookUpTolerance = 0;
  250. }
  251.  
  252. // Prefer the next fragment if it's within tolerance
  253. if (fragNext && !fragmentWithinToleranceTest(fragNext)) {
  254. foundFrag = fragNext;
  255. } else {
  256. foundFrag = BinarySearch.search(fragments, fragmentWithinToleranceTest);
  257. }
  258. } else {
  259. // reach end of playlist
  260. foundFrag = fragments[fragLen - 1];
  261. }
  262. if (foundFrag) {
  263. frag = foundFrag;
  264. start = foundFrag.start;
  265. // logger.log('find SN matching with pos:' + bufferEnd + ':' + frag.sn);
  266. if (fragPrevious && frag.level === fragPrevious.level && frag.sn === fragPrevious.sn) {
  267. if (frag.sn < trackDetails.endSN) {
  268. frag = fragments[frag.sn + 1 - trackDetails.startSN];
  269. logger.log(`SN just loaded, load next one: ${frag.sn}`);
  270. } else {
  271. frag = null;
  272. }
  273. }
  274. }
  275. }
  276. if (frag) {
  277. // logger.log(' loading frag ' + i +',pos/bufEnd:' + pos.toFixed(3) + '/' + bufferEnd.toFixed(3));
  278. if (frag.encrypted) {
  279. logger.log(`Loading key for ${frag.sn} of [${trackDetails.startSN} ,${trackDetails.endSN}],track ${trackId}`);
  280. this.state = State.KEY_LOADING;
  281. hls.trigger(Event.KEY_LOADING, { frag: frag });
  282. } else {
  283. // only load if fragment is not loaded or if in audio switch
  284. // we force a frag loading in audio switch as fragment tracker might not have evicted previous frags in case of quick audio switch
  285. this.fragCurrent = frag;
  286. if (audioSwitch || this.fragmentTracker.getState(frag) === FragmentState.NOT_LOADED) {
  287. logger.log(`Loading ${frag.sn}, cc: ${frag.cc} of [${trackDetails.startSN} ,${trackDetails.endSN}],track ${trackId}, currentTime:${pos},bufferEnd:${bufferEnd.toFixed(3)}`);
  288.  
  289. if (frag.sn !== 'initSegment') {
  290. this.startFragRequested = true;
  291. }
  292. if (Number.isFinite(frag.sn)) {
  293. this.nextLoadPosition = frag.start + frag.duration;
  294. }
  295.  
  296. hls.trigger(Event.FRAG_LOADING, { frag });
  297. this.state = State.FRAG_LOADING;
  298. }
  299. }
  300. }
  301. }
  302. break;
  303. case State.WAITING_TRACK:
  304. track = this.tracks[this.trackId];
  305. // check if playlist is already loaded
  306. if (track && track.details) {
  307. this.state = State.IDLE;
  308. }
  309.  
  310. break;
  311. case State.FRAG_LOADING_WAITING_RETRY:
  312. var now = performance.now();
  313. var retryDate = this.retryDate;
  314. media = this.media;
  315. var isSeeking = media && media.seeking;
  316. // if current time is gt than retryDate, or if media seeking let's switch to IDLE state to retry loading
  317. if (!retryDate || (now >= retryDate) || isSeeking) {
  318. logger.log('audioStreamController: retryDate reached, switch back to IDLE state');
  319. this.state = State.IDLE;
  320. }
  321. break;
  322. case State.WAITING_INIT_PTS:
  323. const videoTrackCC = this.videoTrackCC;
  324. if (this.initPTS[videoTrackCC] === undefined) {
  325. break;
  326. }
  327.  
  328. // Ensure we don't get stuck in the WAITING_INIT_PTS state if the waiting frag CC doesn't match any initPTS
  329. const waitingFrag = this.waitingFragment;
  330. if (waitingFrag) {
  331. const waitingFragCC = waitingFrag.frag.cc;
  332. if (videoTrackCC !== waitingFragCC) {
  333. track = this.tracks[this.trackId];
  334. if (track.details && track.details.live) {
  335. logger.warn(`Waiting fragment CC (${waitingFragCC}) does not match video track CC (${videoTrackCC})`);
  336. this.waitingFragment = null;
  337. this.state = State.IDLE;
  338. }
  339. } else {
  340. this.state = State.FRAG_LOADING;
  341. this.onFragLoaded(this.waitingFragment);
  342. this.waitingFragment = null;
  343. }
  344. } else {
  345. this.state = State.IDLE;
  346. }
  347.  
  348. break;
  349. case State.STOPPED:
  350. case State.FRAG_LOADING:
  351. case State.PARSING:
  352. case State.PARSED:
  353. case State.ENDED:
  354. break;
  355. default:
  356. break;
  357. }
  358. }
  359.  
  360. onMediaAttached (data) {
  361. let media = this.media = this.mediaBuffer = data.media;
  362. this.onvseeking = this.onMediaSeeking.bind(this);
  363. this.onvended = this.onMediaEnded.bind(this);
  364. media.addEventListener('seeking', this.onvseeking);
  365. media.addEventListener('ended', this.onvended);
  366. let config = this.config;
  367. if (this.tracks && config.autoStartLoad) {
  368. this.startLoad(config.startPosition);
  369. }
  370. }
  371.  
  372. onMediaDetaching () {
  373. let media = this.media;
  374. if (media && media.ended) {
  375. logger.log('MSE detaching and video ended, reset startPosition');
  376. this.startPosition = this.lastCurrentTime = 0;
  377. }
  378.  
  379. // remove video listeners
  380. if (media) {
  381. media.removeEventListener('seeking', this.onvseeking);
  382. media.removeEventListener('ended', this.onvended);
  383. this.onvseeking = this.onvseeked = this.onvended = null;
  384. }
  385. this.media = this.mediaBuffer = this.videoBuffer = null;
  386. this.loadedmetadata = false;
  387. this.fragmentTracker.removeAllFragments();
  388. this.stopLoad();
  389. }
  390.  
  391. onAudioTracksUpdated (data) {
  392. logger.log('audio tracks updated');
  393. this.tracks = data.audioTracks;
  394. }
  395.  
  396. onAudioTrackSwitching (data) {
  397. // if any URL found on new audio track, it is an alternate audio track
  398. let altAudio = !!data.url;
  399. this.trackId = data.id;
  400.  
  401. this.fragCurrent = null;
  402. this.state = State.PAUSED;
  403. this.waitingFragment = null;
  404. // destroy useless demuxer when switching audio to main
  405. if (!altAudio) {
  406. if (this.demuxer) {
  407. this.demuxer.destroy();
  408. this.demuxer = null;
  409. }
  410. } else {
  411. // switching to audio track, start timer if not already started
  412. this.setInterval(TICK_INTERVAL);
  413. }
  414.  
  415. // should we switch tracks ?
  416. if (altAudio) {
  417. this.audioSwitch = true;
  418. // main audio track are handled by stream-controller, just do something if switching to alt audio track
  419. this.state = State.IDLE;
  420. }
  421. this.tick();
  422. }
  423.  
  424. onAudioTrackLoaded (data) {
  425. let newDetails = data.details,
  426. trackId = data.id,
  427. track = this.tracks[trackId],
  428. duration = newDetails.totalduration,
  429. sliding = 0;
  430.  
  431. logger.log(`track ${trackId} loaded [${newDetails.startSN},${newDetails.endSN}],duration:${duration}`);
  432.  
  433. if (newDetails.live) {
  434. let curDetails = track.details;
  435. if (curDetails && newDetails.fragments.length > 0) {
  436. // we already have details for that level, merge them
  437. LevelHelper.mergeDetails(curDetails, newDetails);
  438. sliding = newDetails.fragments[0].start;
  439. // TODO
  440. // this.liveSyncPosition = this.computeLivePosition(sliding, curDetails);
  441. if (newDetails.PTSKnown) {
  442. logger.log(`live audio playlist sliding:${sliding.toFixed(3)}`);
  443. } else {
  444. logger.log('live audio playlist - outdated PTS, unknown sliding');
  445. }
  446. } else {
  447. newDetails.PTSKnown = false;
  448. logger.log('live audio playlist - first load, unknown sliding');
  449. }
  450. } else {
  451. newDetails.PTSKnown = false;
  452. }
  453. track.details = newDetails;
  454.  
  455. // compute start position
  456. if (!this.startFragRequested) {
  457. // compute start position if set to -1. use it straight away if value is defined
  458. if (this.startPosition === -1) {
  459. // first, check if start time offset has been set in playlist, if yes, use this value
  460. let startTimeOffset = newDetails.startTimeOffset;
  461. if (Number.isFinite(startTimeOffset)) {
  462. logger.log(`start time offset found in playlist, adjust startPosition to ${startTimeOffset}`);
  463. this.startPosition = startTimeOffset;
  464. } else {
  465. if (newDetails.live) {
  466. this.startPosition = this.computeLivePosition(sliding, newDetails);
  467. logger.log(`compute startPosition for audio-track to ${this.startPosition}`);
  468. } else {
  469. this.startPosition = 0;
  470. }
  471. }
  472. }
  473. this.nextLoadPosition = this.startPosition;
  474. }
  475. // only switch batck to IDLE state if we were waiting for track to start downloading a new fragment
  476. if (this.state === State.WAITING_TRACK) {
  477. this.state = State.IDLE;
  478. }
  479.  
  480. // trigger handler right now
  481. this.tick();
  482. }
  483.  
  484. onKeyLoaded () {
  485. if (this.state === State.KEY_LOADING) {
  486. this.state = State.IDLE;
  487. this.tick();
  488. }
  489. }
  490.  
  491. onFragLoaded (data) {
  492. let fragCurrent = this.fragCurrent,
  493. fragLoaded = data.frag;
  494. if (this.state === State.FRAG_LOADING &&
  495. fragCurrent &&
  496. fragLoaded.type === 'audio' &&
  497. fragLoaded.level === fragCurrent.level &&
  498. fragLoaded.sn === fragCurrent.sn) {
  499. let track = this.tracks[this.trackId],
  500. details = track.details,
  501. duration = details.totalduration,
  502. trackId = fragCurrent.level,
  503. sn = fragCurrent.sn,
  504. cc = fragCurrent.cc,
  505. audioCodec = this.config.defaultAudioCodec || track.audioCodec || 'mp4a.40.2',
  506. stats = this.stats = data.stats;
  507. if (sn === 'initSegment') {
  508. this.state = State.IDLE;
  509.  
  510. stats.tparsed = stats.tbuffered = performance.now();
  511. details.initSegment.data = data.payload;
  512. this.hls.trigger(Event.FRAG_BUFFERED, { stats: stats, frag: fragCurrent, id: 'audio' });
  513. this.tick();
  514. } else {
  515. this.state = State.PARSING;
  516. // transmux the MPEG-TS data to ISO-BMFF segments
  517. this.appended = false;
  518. if (!this.demuxer) {
  519. this.demuxer = new Demuxer(this.hls, 'audio');
  520. }
  521.  
  522. // Check if we have video initPTS
  523. // If not we need to wait for it
  524. let initPTS = this.initPTS[cc];
  525. let initSegmentData = details.initSegment ? details.initSegment.data : [];
  526. if (details.initSegment || initPTS !== undefined) {
  527. this.pendingBuffering = true;
  528. logger.log(`Demuxing ${sn} of [${details.startSN} ,${details.endSN}],track ${trackId}`);
  529. // time Offset is accurate if level PTS is known, or if playlist is not sliding (not live)
  530. let accurateTimeOffset = false; // details.PTSKnown || !details.live;
  531. this.demuxer.push(data.payload, initSegmentData, audioCodec, null, fragCurrent, duration, accurateTimeOffset, initPTS);
  532. } else {
  533. logger.log(`unknown video PTS for continuity counter ${cc}, waiting for video PTS before demuxing audio frag ${sn} of [${details.startSN} ,${details.endSN}],track ${trackId}`);
  534. this.waitingFragment = data;
  535. this.state = State.WAITING_INIT_PTS;
  536. }
  537. }
  538. }
  539. this.fragLoadError = 0;
  540. }
  541.  
  542. onFragParsingInitSegment (data) {
  543. const fragCurrent = this.fragCurrent;
  544. const fragNew = data.frag;
  545. if (fragCurrent &&
  546. data.id === 'audio' &&
  547. fragNew.sn === fragCurrent.sn &&
  548. fragNew.level === fragCurrent.level &&
  549. this.state === State.PARSING) {
  550. let tracks = data.tracks, track;
  551.  
  552. // delete any video track found on audio demuxer
  553. if (tracks.video) {
  554. delete tracks.video;
  555. }
  556.  
  557. // include levelCodec in audio and video tracks
  558. track = tracks.audio;
  559. if (track) {
  560. track.levelCodec = track.codec;
  561. track.id = data.id;
  562. this.hls.trigger(Event.BUFFER_CODECS, tracks);
  563. logger.log(`audio track:audio,container:${track.container},codecs[level/parsed]=[${track.levelCodec}/${track.codec}]`);
  564. let initSegment = track.initSegment;
  565. if (initSegment) {
  566. let appendObj = { type: 'audio', data: initSegment, parent: 'audio', content: 'initSegment' };
  567. if (this.audioSwitch) {
  568. this.pendingData = [appendObj];
  569. } else {
  570. this.appended = true;
  571. // arm pending Buffering flag before appending a segment
  572. this.pendingBuffering = true;
  573. this.hls.trigger(Event.BUFFER_APPENDING, appendObj);
  574. }
  575. }
  576. // trigger handler right now
  577. this.tick();
  578. }
  579. }
  580. }
  581.  
  582. onFragParsingData (data) {
  583. const fragCurrent = this.fragCurrent;
  584. const fragNew = data.frag;
  585. if (fragCurrent &&
  586. data.id === 'audio' &&
  587. data.type === 'audio' &&
  588. fragNew.sn === fragCurrent.sn &&
  589. fragNew.level === fragCurrent.level &&
  590. this.state === State.PARSING) {
  591. let trackId = this.trackId,
  592. track = this.tracks[trackId],
  593. hls = this.hls;
  594.  
  595. if (!Number.isFinite(data.endPTS)) {
  596. data.endPTS = data.startPTS + fragCurrent.duration;
  597. data.endDTS = data.startDTS + fragCurrent.duration;
  598. }
  599.  
  600. fragCurrent.addElementaryStream(ElementaryStreamTypes.AUDIO);
  601.  
  602. logger.log(`parsed ${data.type},PTS:[${data.startPTS.toFixed(3)},${data.endPTS.toFixed(3)}],DTS:[${data.startDTS.toFixed(3)}/${data.endDTS.toFixed(3)}],nb:${data.nb}`);
  603. LevelHelper.updateFragPTSDTS(track.details, fragCurrent, data.startPTS, data.endPTS);
  604.  
  605. let audioSwitch = this.audioSwitch, media = this.media, appendOnBufferFlush = false;
  606. // Only flush audio from old audio tracks when PTS is known on new audio track
  607. if (audioSwitch) {
  608. if (media && media.readyState) {
  609. let currentTime = media.currentTime;
  610. logger.log('switching audio track : currentTime:' + currentTime);
  611. if (currentTime >= data.startPTS) {
  612. logger.log('switching audio track : flushing all audio');
  613. this.state = State.BUFFER_FLUSHING;
  614. hls.trigger(Event.BUFFER_FLUSHING, { startOffset: 0, endOffset: Number.POSITIVE_INFINITY, type: 'audio' });
  615. appendOnBufferFlush = true;
  616. // Lets announce that the initial audio track switch flush occur
  617. this.audioSwitch = false;
  618. hls.trigger(Event.AUDIO_TRACK_SWITCHED, { id: trackId });
  619. }
  620. } else {
  621. // Lets announce that the initial audio track switch flush occur
  622. this.audioSwitch = false;
  623. hls.trigger(Event.AUDIO_TRACK_SWITCHED, { id: trackId });
  624. }
  625. }
  626.  
  627. let pendingData = this.pendingData;
  628.  
  629. if (!pendingData) {
  630. logger.warn('Apparently attempt to enqueue media payload without codec initialization data upfront');
  631. hls.trigger(Event.ERROR, { type: ErrorTypes.MEDIA_ERROR, details: null, fatal: true });
  632. return;
  633. }
  634.  
  635. if (!this.audioSwitch) {
  636. [data.data1, data.data2].forEach(buffer => {
  637. if (buffer && buffer.length) {
  638. pendingData.push({ type: data.type, data: buffer, parent: 'audio', content: 'data' });
  639. }
  640. });
  641. if (!appendOnBufferFlush && pendingData.length) {
  642. pendingData.forEach(appendObj => {
  643. // only append in PARSING state (rationale is that an appending error could happen synchronously on first segment appending)
  644. // in that case it is useless to append following segments
  645. if (this.state === State.PARSING) {
  646. // arm pending Buffering flag before appending a segment
  647. this.pendingBuffering = true;
  648. this.hls.trigger(Event.BUFFER_APPENDING, appendObj);
  649. }
  650. });
  651. this.pendingData = [];
  652. this.appended = true;
  653. }
  654. }
  655. // trigger handler right now
  656. this.tick();
  657. }
  658. }
  659.  
  660. onFragParsed (data) {
  661. const fragCurrent = this.fragCurrent;
  662. const fragNew = data.frag;
  663. if (fragCurrent &&
  664. data.id === 'audio' &&
  665. fragNew.sn === fragCurrent.sn &&
  666. fragNew.level === fragCurrent.level &&
  667. this.state === State.PARSING) {
  668. this.stats.tparsed = performance.now();
  669. this.state = State.PARSED;
  670. this._checkAppendedParsed();
  671. }
  672. }
  673.  
  674. onBufferReset () {
  675. // reset reference to sourcebuffers
  676. this.mediaBuffer = this.videoBuffer = null;
  677. this.loadedmetadata = false;
  678. }
  679.  
  680. onBufferCreated (data) {
  681. let audioTrack = data.tracks.audio;
  682. if (audioTrack) {
  683. this.mediaBuffer = audioTrack.buffer;
  684. this.loadedmetadata = true;
  685. }
  686. if (data.tracks.video) {
  687. this.videoBuffer = data.tracks.video.buffer;
  688. }
  689. }
  690.  
  691. onBufferAppended (data) {
  692. if (data.parent === 'audio') {
  693. const state = this.state;
  694. if (state === State.PARSING || state === State.PARSED) {
  695. // check if all buffers have been appended
  696. this.pendingBuffering = (data.pending > 0);
  697. this._checkAppendedParsed();
  698. }
  699. }
  700. }
  701.  
  702. _checkAppendedParsed () {
  703. // trigger handler right now
  704. if (this.state === State.PARSED && (!this.appended || !this.pendingBuffering)) {
  705. let frag = this.fragCurrent, stats = this.stats, hls = this.hls;
  706. if (frag) {
  707. this.fragPrevious = frag;
  708. stats.tbuffered = performance.now();
  709. hls.trigger(Event.FRAG_BUFFERED, { stats: stats, frag: frag, id: 'audio' });
  710. let media = this.mediaBuffer ? this.mediaBuffer : this.media;
  711. if (media) {
  712. logger.log(`audio buffered : ${TimeRanges.toString(media.buffered)}`);
  713. }
  714. if (this.audioSwitch && this.appended) {
  715. this.audioSwitch = false;
  716. hls.trigger(Event.AUDIO_TRACK_SWITCHED, { id: this.trackId });
  717. }
  718. this.state = State.IDLE;
  719. }
  720. this.tick();
  721. }
  722. }
  723.  
  724. onError (data) {
  725. let frag = data.frag;
  726. // don't handle frag error not related to audio fragment
  727. if (frag && frag.type !== 'audio') {
  728. return;
  729. }
  730.  
  731. switch (data.details) {
  732. case ErrorDetails.FRAG_LOAD_ERROR:
  733. case ErrorDetails.FRAG_LOAD_TIMEOUT:
  734. const frag = data.frag;
  735. // don't handle frag error not related to audio fragment
  736. if (frag && frag.type !== 'audio') {
  737. break;
  738. }
  739.  
  740. if (!data.fatal) {
  741. let loadError = this.fragLoadError;
  742. if (loadError) {
  743. loadError++;
  744. } else {
  745. loadError = 1;
  746. }
  747.  
  748. const config = this.config;
  749. if (loadError <= config.fragLoadingMaxRetry) {
  750. this.fragLoadError = loadError;
  751. // exponential backoff capped to config.fragLoadingMaxRetryTimeout
  752. const delay = Math.min(Math.pow(2, loadError - 1) * config.fragLoadingRetryDelay, config.fragLoadingMaxRetryTimeout);
  753. logger.warn(`AudioStreamController: frag loading failed, retry in ${delay} ms`);
  754. this.retryDate = performance.now() + delay;
  755. // retry loading state
  756. this.state = State.FRAG_LOADING_WAITING_RETRY;
  757. } else {
  758. logger.error(`AudioStreamController: ${data.details} reaches max retry, redispatch as fatal ...`);
  759. // switch error to fatal
  760. data.fatal = true;
  761. this.state = State.ERROR;
  762. }
  763. }
  764. break;
  765. case ErrorDetails.AUDIO_TRACK_LOAD_ERROR:
  766. case ErrorDetails.AUDIO_TRACK_LOAD_TIMEOUT:
  767. case ErrorDetails.KEY_LOAD_ERROR:
  768. case ErrorDetails.KEY_LOAD_TIMEOUT:
  769. // when in ERROR state, don't switch back to IDLE state in case a non-fatal error is received
  770. if (this.state !== State.ERROR) {
  771. // if fatal error, stop processing, otherwise move to IDLE to retry loading
  772. this.state = data.fatal ? State.ERROR : State.IDLE;
  773. logger.warn(`AudioStreamController: ${data.details} while loading frag, now switching to ${this.state} state ...`);
  774. }
  775. break;
  776. case ErrorDetails.BUFFER_FULL_ERROR:
  777. // if in appending state
  778. if (data.parent === 'audio' && (this.state === State.PARSING || this.state === State.PARSED)) {
  779. const media = this.mediaBuffer,
  780. currentTime = this.media.currentTime,
  781. mediaBuffered = media && BufferHelper.isBuffered(media, currentTime) && BufferHelper.isBuffered(media, currentTime + 0.5);
  782. // reduce max buf len if current position is buffered
  783. if (mediaBuffered) {
  784. const config = this.config;
  785. if (config.maxMaxBufferLength >= config.maxBufferLength) {
  786. // reduce max buffer length as it might be too high. we do this to avoid loop flushing ...
  787. config.maxMaxBufferLength /= 2;
  788. logger.warn(`AudioStreamController: reduce max buffer length to ${config.maxMaxBufferLength}s`);
  789. }
  790. this.state = State.IDLE;
  791. } else {
  792. // current position is not buffered, but browser is still complaining about buffer full error
  793. // this happens on IE/Edge, refer to https://github.com/video-dev/hls.js/pull/708
  794. // in that case flush the whole audio buffer to recover
  795. logger.warn('AudioStreamController: buffer full error also media.currentTime is not buffered, flush audio buffer');
  796. this.fragCurrent = null;
  797. // flush everything
  798. this.state = State.BUFFER_FLUSHING;
  799. this.hls.trigger(Event.BUFFER_FLUSHING, { startOffset: 0, endOffset: Number.POSITIVE_INFINITY, type: 'audio' });
  800. }
  801. }
  802. break;
  803. default:
  804. break;
  805. }
  806. }
  807.  
  808. onBufferFlushed () {
  809. let pendingData = this.pendingData;
  810. if (pendingData && pendingData.length) {
  811. logger.log('AudioStreamController: appending pending audio data after buffer flushed');
  812. pendingData.forEach(appendObj => {
  813. this.hls.trigger(Event.BUFFER_APPENDING, appendObj);
  814. });
  815. this.appended = true;
  816. this.pendingData = [];
  817. this.state = State.PARSED;
  818. } else {
  819. // move to IDLE once flush complete. this should trigger new fragment loading
  820. this.state = State.IDLE;
  821. // reset reference to frag
  822. this.fragPrevious = null;
  823. this.tick();
  824. }
  825. }
  826. }
  827. export default AudioStreamController;