runtime.ts 25 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915
  1. import { ElMessage, ElMessageBox } from "element-plus";
  2. import { reactive, ref, Ref } from "vue";
  3. import * as RongIMLib from "@rongcloud/imlib-next";
  4. import * as RTC from "@rongcloud/plugin-rtc";
  5. import { debounce } from "throttle-debounce";
  6. import request from "/src/helpers/request";
  7. import { state } from "/src/state";
  8. import mitt from "mitt";
  9. import event, { LIVE_EVENT_MESSAGE } from "./event";
  10. import dayjs from "dayjs";
  11. import { removeToken } from "/src/utils/auth";
  12. import qs from "query-string";
  13. // import { SeatsCtrl } from './message-type'
  14. type imConnectStatus = "connecting" | "connected" | "disconnect";
  15. type VideoStatus =
  16. | "init"
  17. | "stream"
  18. | "liveing"
  19. | "stopped"
  20. | "error"
  21. | "loading";
  22. export type TrackType = "microphone" | "microphone2" | "camera" | "screen";
  23. let publishError = false;
  24. type ActiveTracks = {
  25. [key in TrackType]: RTC.RCLocalTrack | null;
  26. };
  27. type DeviceStatus = {
  28. [key in TrackType]: "init" | "granted" | "denied" | "closed" | "none";
  29. };
  30. export const START_LIVE_TIME = "start-live-time";
  31. export const START_LIVE_STATUS = "start-live-status";
  32. export const VIDEO_DEVICE_ID = "video-deviceId";
  33. export const AUDIO_DEVICE_ID = "audio-deviceId";
  34. export const AUDIO_DEVICE_ID2 = "audio-deviceId2";
  35. export const AUDIO_DEVICE_VOLUME = "audio-device-volume";
  36. const runtime = reactive({
  37. /** 房间id */
  38. roomUid: sessionStorage.getItem("roomUid") || "",
  39. /** IM连接状态 */
  40. imConnectStatus: "connecting" as imConnectStatus,
  41. // 屏幕分享状态
  42. screenShareStatus: false,
  43. // 视频节点
  44. videoRef: ref<HTMLVideoElement | null>(null),
  45. // RTC实例
  46. rtcClient: null as RTC.RCRTCClient | null,
  47. /** 加入房间实例 */
  48. joinedRoom: null as RTC.RCLivingRoom | null,
  49. // Tracks
  50. mediaStreamTrack: [] as MediaStreamTrack[],
  51. // 媒体流
  52. mediaStreams: null as MediaStream | null,
  53. // 视频状态
  54. videoStatus: "init" as VideoStatus,
  55. // 麦克风设备列表
  56. microphones: [] as MediaDeviceInfo[],
  57. // 摄像头设备列表
  58. cameras: [] as MediaDeviceInfo[],
  59. // 摄像头设备
  60. selectedCamera: null as MediaDeviceInfo | null,
  61. // 麦克风设备
  62. selectedMicrophone: null as MediaDeviceInfo | null,
  63. // 系统音频设备
  64. selectedMicrophone2: null as MediaDeviceInfo | null,
  65. // 点赞数量
  66. likeCount: 0,
  67. // 观看人数
  68. lookCount: 0,
  69. // 上一次点赞数量
  70. lastLikeCount: 0,
  71. /** 当前活跃的数据流 */
  72. activeTracks: {} as ActiveTracks,
  73. /** 是否关闭连麦 */
  74. allowSeatsCtrl: true,
  75. /** 是否关闭发言 */
  76. allowChatCtrl: true,
  77. /** 当前设备获取状态 */
  78. deviceStatus: {
  79. microphone: "init",
  80. camera: "init",
  81. screen: "init",
  82. } as DeviceStatus,
  83. syncLikeTimer: null as any,
  84. });
  85. export default runtime;
  86. // c9kqb3rdc451j 测试环境
  87. const RONG_IM_TOKEN = "c9kqb3rdc451j";
  88. // const RONG_IM_TOKEN = "6tnym1br6pv07";
  89. RongIMLib.init({
  90. appkey: RONG_IM_TOKEN,
  91. });
  92. // 注册自定义消息类型
  93. // 控制是否允许连麦
  94. const MessageSeatsCtrl = RongIMLib.registerMessageType(
  95. "RC:Chatroom:SeatsCtrl",
  96. true,
  97. true
  98. );
  99. // 控制是否允许发言
  100. const MessageChatBan = RongIMLib.registerMessageType(
  101. "RC:Chatroom:ChatBan",
  102. true,
  103. true
  104. );
  105. // 连麦消息
  106. const MessageSeatApply = RongIMLib.registerMessageType(
  107. "RC:Chatroom:SeatApply",
  108. true,
  109. true
  110. );
  111. // 响应连麦消息
  112. const MessageSeatResponse = RongIMLib.registerMessageType(
  113. "RC:Chatroom:SeatResponse",
  114. true,
  115. true
  116. );
  117. // 同步房间观众数量
  118. const MessageMemberCount = RongIMLib.registerMessageType(
  119. "RC:Chatroom:MemberCount",
  120. true,
  121. true
  122. );
  123. // 当前连麦人员同步
  124. const MessageSeatMember = RongIMLib.registerMessageType(
  125. "RC:Chatroom:SeatMember",
  126. true,
  127. true
  128. );
  129. // 当前点赞数量同步
  130. const MessageLikeCount = RongIMLib.registerMessageType(
  131. "RC:Chatroom:LikeCount",
  132. true,
  133. true
  134. );
  135. // 老师进入房间,刷新页面也会发送
  136. const MessageWelcome = RongIMLib.registerMessageType(
  137. "RC:Chatroom:Welcome",
  138. true,
  139. true
  140. );
  141. // 暂停直播消息
  142. const MessagePauseLive = RongIMLib.registerMessageType(
  143. "RC:Chatroom:PauseLive",
  144. true,
  145. true
  146. );
  147. type MessageProps = {
  148. messageType:
  149. | "RC:Chatroom:Welcome"
  150. | "RC:TxtMsg"
  151. | "RC:Chatroom:Barrage"
  152. | "RC:Chatroom:Like"
  153. | "RC:Chatroom:SeatsCtrl"
  154. | "RC:Chatroom:ChatBan"
  155. | "RC:Chatroom:SeatApply";
  156. content: any;
  157. senderUserId: any;
  158. };
  159. type MessageEvent = {
  160. messages: MessageProps[];
  161. };
  162. export const runtimeEvent = mitt();
  163. const Events = RongIMLib.Events;
  164. /**
  165. * 监听消息通知
  166. */
  167. const { MESSAGES, ...RestMessage } = Events;
  168. RongIMLib.addEventListener(Events.MESSAGES, (evt: MessageEvent) => {
  169. console.log(evt, "收到消息");
  170. const { messages } = evt;
  171. for (const message of messages) {
  172. // console.log(LIVE_EVENT_MESSAGE[message.messageType], message)
  173. const isSelf =
  174. message.senderUserId &&
  175. Number(message.senderUserId) === state.user?.speakerId;
  176. if (!isSelf && LIVE_EVENT_MESSAGE[message.messageType]) {
  177. event.emit(LIVE_EVENT_MESSAGE[message.messageType], {
  178. ...message.content,
  179. $EventMessage: message,
  180. });
  181. }
  182. }
  183. });
  184. for (const Message of Object.values(RestMessage)) {
  185. RongIMLib.addEventListener(Message, (evt: any) => {
  186. console.log(Message, evt);
  187. // chatroomDestroyed
  188. event.emit(Message, { $EventMessage: null });
  189. });
  190. }
  191. /**
  192. * 监听 IM 连接状态变化
  193. */
  194. RongIMLib.addEventListener(Events.CONNECTING, () => {
  195. console.log("connecting");
  196. runtime.imConnectStatus = "connecting";
  197. });
  198. RongIMLib.addEventListener(Events.CONNECTED, () => {
  199. console.log("connected");
  200. runtime.imConnectStatus = "connected";
  201. });
  202. RongIMLib.addEventListener(Events.DISCONNECT, () => {
  203. console.log("disconnect");
  204. runtime.imConnectStatus = "disconnect";
  205. closeLive(true, "IM");
  206. const search = qs.parse(window.location.search);
  207. console.log(search, "disconnect");
  208. window.location.href =
  209. window.location.origin +
  210. "/live?" +
  211. qs.stringify({ ...search, time: new Date().getTime() });
  212. // event.emit(LIVE_EVENT_MESSAGE["RC:ForcedOffline"])
  213. // if (runtime.joinedRoom && runtime.videoStatus === 'liveing') {
  214. // closeLive(true, 'IM')
  215. // }
  216. });
  217. export const connectIM = async (imToken: string) => {
  218. try {
  219. const user = await RongIMLib.connect(imToken);
  220. runtime.rtcClient = RongIMLib.installPlugin(RTC.installer, {});
  221. console.log("connect success", user.data?.userId);
  222. return user;
  223. } catch (error) {
  224. throw error;
  225. }
  226. };
  227. /**
  228. * 设置声音
  229. * @param video
  230. * @param Value 声音大小
  231. */
  232. export const setVolume = (value: number) => {
  233. localStorage.setItem(AUDIO_DEVICE_VOLUME, value.toString());
  234. if (runtime.videoRef) {
  235. runtime.videoRef.volume = value / 100;
  236. }
  237. const tempM = runtime.activeTracks.microphone;
  238. // runtime.activeTracks.microphone && runtime.activeTracks.microphone._element;
  239. // @ts-ignore
  240. const tempE = tempM._element;
  241. if (tempE) {
  242. // @ts-ignore
  243. runtime.activeTracks.microphone._element.volume = value / 100;
  244. }
  245. };
  246. /**
  247. * 设置video视频流
  248. */
  249. export const setVideoSrcObject = (
  250. video: HTMLVideoElement | null,
  251. mediaStreams: MediaStream | null
  252. ) => {
  253. if (video && mediaStreams) {
  254. video.srcObject = mediaStreams;
  255. video.onloadedmetadata = () => {
  256. video.play();
  257. };
  258. }
  259. };
  260. /**
  261. * 发起屏幕共享
  262. */
  263. export const shareScreenVideo = async () => {
  264. if (runtime.screenShareStatus) {
  265. ElMessage.error("正在屏幕共享中,请先关闭屏幕共享");
  266. return;
  267. }
  268. if (
  269. runtime.rtcClient &&
  270. !runtime.screenShareStatus &&
  271. runtime.videoStatus === "liveing"
  272. ) {
  273. let screenTrack: RTC.RCLocalTrack | undefined;
  274. try {
  275. screenTrack = await getTrack("screen");
  276. } catch (error) {
  277. ElMessage.error("屏幕分享失败,请检查是否授权");
  278. }
  279. if (!screenTrack) {
  280. return;
  281. }
  282. const oldTrack = runtime.activeTracks.camera as RTC.RCLocalTrack;
  283. // removeTrack([oldTrack], 'camera')
  284. if (oldTrack) {
  285. await runtime.joinedRoom?.unpublish([oldTrack]);
  286. }
  287. setTrack([screenTrack as RTC.RCLocalTrack], "screen");
  288. if (runtime.videoRef) {
  289. screenTrack?.play(runtime.videoRef);
  290. runtime.screenShareStatus = true;
  291. }
  292. screenTrack?.on(
  293. RTC.RCLocalTrack.EVENT_LOCAL_TRACK_END,
  294. (track: RTC.RCLocalTrack) => {
  295. runtime.screenShareStatus = false;
  296. track.destroy();
  297. // removeTrack([track], 'screen')
  298. if (oldTrack) {
  299. setTrack([oldTrack as RTC.RCLocalTrack], "camera");
  300. if (runtime.videoRef) {
  301. oldTrack.play(runtime.videoRef);
  302. }
  303. }
  304. // setVideoSrcObject(runtime.videoRef, this.mediaStreams)
  305. }
  306. );
  307. }
  308. };
  309. /**
  310. * 取消屏幕共享流的访问,会导致取消屏幕共享
  311. */
  312. export const closeShareScreenVideo = () => {
  313. document.exitPictureInPicture();
  314. const screenTrack = runtime.activeTracks.screen as RTC.RCLocalTrack;
  315. if (screenTrack) {
  316. screenTrack.destroy();
  317. runtime.screenShareStatus = false;
  318. }
  319. const oldTrack = runtime.activeTracks.camera as RTC.RCLocalTrack;
  320. if (oldTrack) {
  321. setTrack([oldTrack as RTC.RCLocalTrack], "camera");
  322. if (runtime.videoRef) {
  323. oldTrack.play(runtime.videoRef);
  324. }
  325. }
  326. };
  327. export const createVideoPictureInPicture = (ms: MediaStream) => {
  328. const video = document.createElement("video");
  329. video.style.display = "none";
  330. document.body.append(video);
  331. video.srcObject = ms;
  332. video.play();
  333. setTimeout(() => {
  334. video.requestPictureInPicture();
  335. }, 1000);
  336. };
  337. /**
  338. * 开启或关闭屏幕共享
  339. */
  340. export const toggleShareScreenVideo = async () => {
  341. if (runtime.screenShareStatus) {
  342. try {
  343. await ElMessageBox.confirm("是否确认取消屏幕共享?");
  344. closeShareScreenVideo();
  345. } catch (error) {}
  346. } else {
  347. shareScreenVideo();
  348. // @ts-ignore
  349. createVideoPictureInPicture(runtime.activeTracks.camera._msStream);
  350. console.log(runtime.activeTracks.camera);
  351. // runtime.videoRef?.requestPictureInPicture()
  352. }
  353. };
  354. /**
  355. *
  356. * 获取所有音频输入设备
  357. * @returns {Promise<void>}
  358. */
  359. export const getMicrophones = async () => {
  360. const microphones = await RTC.device.getMicrophones();
  361. runtime.microphones = microphones;
  362. return microphones;
  363. };
  364. /**
  365. *
  366. * 获取所有视频输入设备
  367. * @returns {Promise<void>}
  368. */
  369. export const getCameras = async () => {
  370. const cameras = await RTC.device.getCameras();
  371. runtime.cameras = cameras;
  372. return cameras;
  373. };
  374. /**
  375. *
  376. * 设置当前视频设备
  377. * @param camera MediaDeviceInfo
  378. */
  379. export const setSelectCamera = async (camera: MediaDeviceInfo) => {
  380. runtime.selectedCamera = camera;
  381. localStorage.setItem(VIDEO_DEVICE_ID, camera.deviceId);
  382. const oldTrack = runtime.activeTracks.camera as RTC.RCLocalTrack;
  383. if (oldTrack) {
  384. await removeTrack([oldTrack], "camera", oldTrack.isPublished());
  385. }
  386. const track = await getTrack("camera");
  387. setTrack([track], "camera", runtime.videoStatus === "liveing");
  388. };
  389. /**
  390. *
  391. * 设置当前麦克风设备
  392. * @param microphone MediaDeviceInfo
  393. */
  394. export const setSelectMicrophone = async (microphone: MediaDeviceInfo) => {
  395. runtime.selectedMicrophone = microphone;
  396. localStorage.setItem(AUDIO_DEVICE_ID, microphone.deviceId);
  397. const oldTrack = runtime.activeTracks.microphone as RTC.RCLocalTrack;
  398. if (oldTrack) {
  399. await removeTrack([oldTrack], "microphone", oldTrack.isPublished());
  400. }
  401. const track = await getTrack("microphone");
  402. setTrack([track], "microphone", runtime.videoStatus === "liveing");
  403. runtimeEvent.emit("microphoneChange", microphone);
  404. };
  405. /**
  406. *
  407. * 设置当前麦克风设备
  408. * @param microphone MediaDeviceInfo
  409. */
  410. export const setSelectMicrophone2 = async (microphone: MediaDeviceInfo) => {
  411. runtime.selectedMicrophone2 = microphone;
  412. localStorage.setItem(AUDIO_DEVICE_ID2, microphone.deviceId);
  413. const oldTrack = runtime.activeTracks.microphone2 as RTC.RCLocalTrack;
  414. if (oldTrack) {
  415. await removeTrack([oldTrack], "microphone2", oldTrack.isPublished());
  416. }
  417. const track = await getTrack("microphone2");
  418. setTrack([track], "microphone2", runtime.videoStatus === "liveing");
  419. runtimeEvent.emit("microphone2Change", microphone);
  420. };
  421. type TrackResult = {
  422. code: RTC.RCRTCCode;
  423. track:
  424. | RTC.RCMicphoneAudioTrack
  425. | RTC.RCCameraVideoTrack
  426. | RTC.RCScreenVideoTrack
  427. | undefined;
  428. };
  429. export const getTrack = async (
  430. trackType: TrackType
  431. ): Promise<RTC.RCLocalTrack> => {
  432. let res: TrackResult | undefined;
  433. let Track: RTC.RCLocalTrack | null = null;
  434. if (trackType === "microphone") {
  435. res = (await runtime.rtcClient?.createMicrophoneAudioTrack("RongCloudRTC", {
  436. micphoneId: runtime.selectedMicrophone?.deviceId,
  437. sampleRate:
  438. Number(localStorage.getItem("sampleRate") || "44100") || 44100,
  439. })) as TrackResult;
  440. } else if (trackType === "microphone2") {
  441. res = (await runtime.rtcClient?.createMicrophoneAudioTrack("RongCloudRTC", {
  442. micphoneId: runtime.selectedMicrophone2?.deviceId,
  443. })) as TrackResult;
  444. } else if (trackType === "camera") {
  445. // const sm = await requireMedia({
  446. // audio: true,
  447. // video: true,
  448. // })
  449. // console.log(sm.getTracks())
  450. res = (await runtime.rtcClient?.createCameraVideoTrack("RongCloudRTC", {
  451. cameraId: runtime.selectedCamera?.deviceId,
  452. faceMode: "user",
  453. frameRate: RTC.RCFrameRate.FPS_30,
  454. resolution: RTC.RCResolution.W1920_H1080,
  455. })) as TrackResult;
  456. } else {
  457. res = (await runtime?.rtcClient?.createScreenVideoTrack("screenshare", {
  458. frameRate: RTC.RCFrameRate.FPS_30,
  459. resolution: RTC.RCResolution.W1920_H1080,
  460. })) as TrackResult;
  461. }
  462. Track = res?.track as RTC.RCLocalTrack;
  463. if (trackType === "camera" && !runtime.cameras.length) {
  464. runtime.deviceStatus[trackType] = "none";
  465. } else if (
  466. (trackType === "microphone" || trackType === "microphone2") &&
  467. !runtime.microphones.length
  468. ) {
  469. runtime.deviceStatus[trackType] = "none";
  470. } else if (trackType === "screen" && !runtime.screenShareStatus) {
  471. runtime.deviceStatus[trackType] = "none";
  472. }
  473. if (res.code === RTC.RCRTCCode.PERMISSION_DENIED) {
  474. runtime.deviceStatus[trackType] = "denied";
  475. } else {
  476. runtime.deviceStatus[trackType] = "granted";
  477. }
  478. // if (res.code !== RTC.RCRTCCode.SUCCESS || !Track) {
  479. // throw new Error('获取数据流失败')
  480. // }
  481. if (res.code === RTC.RCRTCCode.GET_DISPLAY_MEDIA_FAILED) {
  482. throw new Error("获取屏幕共享失败");
  483. }
  484. return Track;
  485. };
  486. export type OnAudioProcess = (num: number, deviceId: string) => void;
  487. export const listenAudioChecker = (
  488. stream: MediaStream,
  489. onaudioprocess: OnAudioProcess
  490. ) => {
  491. const audioContext = window.AudioContext;
  492. const ac = new audioContext();
  493. const liveSource = ac.createMediaStreamSource(stream);
  494. const analyser = ac.createAnalyser();
  495. liveSource.connect(analyser);
  496. analyser.fftSize = 2048;
  497. analyser.minDecibels = -90;
  498. analyser.maxDecibels = -10;
  499. analyser.smoothingTimeConstant = 0.85;
  500. // setInterval(() => {
  501. // getVoiceSize(analyser)
  502. // }, 50)
  503. // return analyser
  504. const levelChecker = ac.createScriptProcessor(4096, 1, 1);
  505. levelChecker.connect(ac.destination);
  506. liveSource.connect(levelChecker);
  507. levelChecker.onaudioprocess = (e) =>
  508. debounce(200, () => {
  509. const buffer = e.inputBuffer.getChannelData(0);
  510. var maxVal = 0;
  511. for (var i = 0; i < buffer.length; i++) {
  512. if (maxVal < buffer[i]) {
  513. maxVal = buffer[i];
  514. }
  515. }
  516. // console.log(stream.getAudioTracks()[0])
  517. onaudioprocess(maxVal * 100, stream.getAudioTracks()[0]?.label);
  518. // console.log(maxVal * 100, stream.getAudioTracks()[0]?.label)
  519. // console.log(e.inputBuffer.getChannelData(0))
  520. })();
  521. return levelChecker;
  522. };
  523. const getVoiceSize = (analyser: AnalyserNode) => {
  524. const dataArray = new Uint8Array(analyser.frequencyBinCount);
  525. analyser.getByteFrequencyData(dataArray);
  526. const data = dataArray.slice(100, 1000);
  527. const sum = data.reduce((a, b) => a + b);
  528. // for(var i = 0; i < analyser.frequencyBinCount; i++) {
  529. // var v = dataArray[i] / 128.0
  530. // console.log(v)
  531. // }
  532. console.log(sum, 128 * analyser.frequencyBinCount);
  533. };
  534. /**
  535. * 添加视频流,会同步修改当先视频与推送的流
  536. * @param track
  537. */
  538. export const setTrack = async (
  539. tracks: RTC.RCLocalTrack[],
  540. trackType: TrackType,
  541. needPublish = true
  542. ) => {
  543. const filterTracks = tracks.filter((track) => !!track);
  544. for (const track of filterTracks) {
  545. // @ts-ignore
  546. // await runtime.mediaStreams?.addTrack(track._msTrack)
  547. if (trackType === "microphone") {
  548. // track?.play()
  549. }
  550. runtime.activeTracks[trackType] = track;
  551. }
  552. if (trackType === "camera" && runtime.videoRef) {
  553. runtime.activeTracks[trackType]?.play(runtime.videoRef);
  554. }
  555. if (needPublish) {
  556. // console.log('publish', runtime.joinedRoom)
  557. try {
  558. const res = await runtime.joinedRoom?.publish(
  559. tracks.filter((track) => !!track)
  560. );
  561. console.log(res, "pub");
  562. if (res?.code !== RTC.RCRTCCode.SUCCESS && !publishError) {
  563. publishError = true;
  564. window.onbeforeunload = null;
  565. ElMessageBox.alert("视频推流失败,请刷新页面重新开启?", "提示", {
  566. confirmButtonText: "确定",
  567. callback: () => {
  568. publishError = false;
  569. window.location.reload();
  570. },
  571. });
  572. }
  573. } catch (err: any) {
  574. console.log(err, "err");
  575. }
  576. }
  577. };
  578. /**
  579. * 删除视频流,会同步修改当先视频与推送的流
  580. * @param track
  581. */
  582. export const removeTrack = async (
  583. tracks: RTC.RCLocalTrack[],
  584. trackType: TrackType,
  585. needPublish = true
  586. ) => {
  587. const filterTracks = tracks.filter((track) => !!track);
  588. if (needPublish) {
  589. await runtime.joinedRoom?.unpublish(filterTracks);
  590. }
  591. for (const track of filterTracks) {
  592. // @ts-ignore
  593. // await runtime.mediaStreams?.removeTrack(track._msTrack)
  594. // runtime.activeTracks[trackType].destroy()
  595. // console.log(runtime.activeTracks[trackType])
  596. track?.destroy();
  597. runtime.activeTracks[trackType] = null;
  598. }
  599. };
  600. export const joinIMRoom = async (
  601. roomId: string,
  602. type: RTC.RCLivingType,
  603. listenEvents: RTC.IRoomEventListener | null
  604. ) => {
  605. await RongIMLib.joinChatRoom(roomId, { count: -1 });
  606. const join = await runtime.rtcClient?.joinLivingRoom(roomId, type);
  607. if (join?.code != RTC.RCRTCCode.SUCCESS) throw Error("加入房间失败");
  608. join.room?.registerRoomEventListener(listenEvents);
  609. return join;
  610. };
  611. export const joinRoom = async (
  612. roomId: string,
  613. type: RTC.RCLivingType,
  614. listenEvents: RTC.IRoomEventListener | null
  615. ) => {
  616. // try {
  617. // await request.get('/api-web/imLiveBroadcastRoom/joinRoom', {
  618. // params: {
  619. // roomUid: runtime.roomUid,
  620. // userId: state.user?.speakerId,
  621. // }
  622. // })
  623. // } catch (error) {}
  624. return await joinIMRoom(roomId, type, listenEvents);
  625. };
  626. /**
  627. * 开始直播
  628. */
  629. export const startLive = async (resetTime = true) => {
  630. if (runtime.videoStatus !== "stream") {
  631. const errorMessage = "请确定摄像头已经开启";
  632. ElMessage.error(errorMessage);
  633. throw Error(errorMessage);
  634. }
  635. const room = runtime.joinedRoom;
  636. if (room) {
  637. // const microphoneAudioTrack = await getTrack('microphone')
  638. // const cameraVideoTrack = await getTrack('camera')
  639. await setTrack(
  640. [runtime.activeTracks.camera as RTC.RCLocalVideoTrack],
  641. "camera"
  642. );
  643. await setTrack(
  644. [runtime.activeTracks.microphone as RTC.RCLocalAudioTrack],
  645. "microphone"
  646. );
  647. // const builder = await runtime.joinedRoom?.getMCUConfigBuilder()
  648. // // @ts-ignore
  649. // await builder.setOutputVideoRenderMode?.(RTC.MixVideoRenderMode.WHOLE)
  650. // // @ts-ignore
  651. // await builder.flush()
  652. // console.log(runtime.activeTracks)
  653. await request.get("/api-web/imLiveBroadcastRoom/opsLiveVideo", {
  654. params: {
  655. type: "1",
  656. roomUid: runtime.roomUid,
  657. userId: state.user?.speakerId,
  658. },
  659. });
  660. runtime.videoStatus = "liveing";
  661. }
  662. if (resetTime) {
  663. sessionStorage.setItem(START_LIVE_TIME, dayjs().valueOf().toString());
  664. }
  665. sessionStorage.setItem(START_LIVE_STATUS, "liveing");
  666. };
  667. /**
  668. * 关闭直播
  669. */
  670. export const closeLive = async (
  671. remove = false,
  672. source: "IM" | "Logout" = "Logout"
  673. ) => {
  674. // removeMedia(runtime.mediaStreams, runtime.mediaStreamTrack)
  675. try {
  676. if (source === "Logout") {
  677. await request.get("/api-web/imLiveBroadcastRoom/opsLiveVideo", {
  678. params: {
  679. type: "2",
  680. roomUid: runtime.roomUid,
  681. userId: state.user?.speakerId,
  682. },
  683. });
  684. }
  685. } catch {}
  686. sessionStorage.removeItem(START_LIVE_TIME);
  687. sessionStorage.removeItem(START_LIVE_STATUS);
  688. // 关闭房间仅移除推流即可
  689. for (const key in runtime.activeTracks) {
  690. if (Object.prototype.hasOwnProperty.call(runtime.activeTracks, key)) {
  691. const track = runtime.activeTracks[key as TrackType] as RTC.RCLocalTrack;
  692. if (track) {
  693. await runtime.joinedRoom?.unpublish([track]);
  694. if (remove) {
  695. await removeTrack([track], key as TrackType);
  696. }
  697. }
  698. }
  699. }
  700. runtime.videoStatus = "stream";
  701. };
  702. /**
  703. * 同步点赞数量
  704. */
  705. export const loopSyncLike = async () => {
  706. // (runtime.likeCount !== runtime.lastLikeCount || runtime.likeCount === 0) &&
  707. if (state.user && runtime.joinedRoom) {
  708. try {
  709. await request.get("/api-web/imLiveBroadcastRoom/syncLike", {
  710. hideLoading: true,
  711. hideMessage: true,
  712. params: {
  713. likeNum: runtime.likeCount,
  714. roomUid: runtime.roomUid,
  715. },
  716. });
  717. runtime.lastLikeCount = runtime.likeCount;
  718. sendMessage({ count: runtime.likeCount }, "LikeCount");
  719. } catch (error) {}
  720. }
  721. runtime.syncLikeTimer = setTimeout(() => {
  722. loopSyncLike();
  723. }, 1000 * 10);
  724. };
  725. type SendMessageType =
  726. | "text"
  727. | "image"
  728. | "audio"
  729. | "video"
  730. | "file"
  731. | "SeatsCtrl"
  732. | "ChatBan"
  733. | "SeatApply"
  734. | "SeatResponse"
  735. | "MemberCount"
  736. | "SeatMember"
  737. | "LikeCount"
  738. | "Welcome"
  739. | "PauseLive";
  740. export const getSendMessageUser = () => {
  741. return {
  742. id: String(state.user?.speakerId),
  743. name: state.user?.speakerName,
  744. userId: String(state.user?.speakerId),
  745. userName: state.user?.speakerName,
  746. };
  747. };
  748. /**
  749. *
  750. * @param msg 消息内容
  751. * @param type 消息类型
  752. * @returns null 或者 发送消息的结果
  753. */
  754. export const sendMessage = async (msg: any, type: SendMessageType = "text") => {
  755. let message: RongIMLib.BaseMessage<unknown> | null = null;
  756. if (!msg) return;
  757. const conversation = {
  758. conversationType: RongIMLib.ConversationType.CHATROOM,
  759. targetId: runtime.joinedRoom?.getRoomId() as string,
  760. };
  761. if (type === "text") {
  762. message = new RongIMLib.TextMessage({
  763. user: getSendMessageUser(),
  764. content: msg,
  765. });
  766. } else if (type === "SeatsCtrl") {
  767. message = new MessageSeatsCtrl(msg);
  768. } else if (type === "ChatBan") {
  769. message = new MessageChatBan(msg);
  770. } else if (type === "SeatApply") {
  771. message = new MessageSeatApply(msg);
  772. } else if (type === "SeatResponse") {
  773. message = new MessageSeatResponse(msg);
  774. } else if (type === "MemberCount") {
  775. message = new MessageMemberCount(msg);
  776. } else if (type === "SeatMember") {
  777. message = new MessageSeatMember(msg);
  778. } else if (type === "LikeCount") {
  779. message = new MessageLikeCount(msg);
  780. } else if (type === "Welcome") {
  781. message = new MessageWelcome(msg);
  782. } else if (type === "PauseLive") {
  783. message = new MessagePauseLive(msg);
  784. }
  785. if (!message) return;
  786. console.log(message);
  787. return await RongIMLib.sendMessage(conversation, message);
  788. };
  789. export const openDevice = async (trackType: TrackType, needPublish = true) => {
  790. if (
  791. (trackType === "microphone" || trackType === "microphone2") &&
  792. runtime.activeTracks[trackType]
  793. ) {
  794. runtime.activeTracks[trackType]?.unmute();
  795. } else {
  796. const track = await getTrack(trackType);
  797. await setTrack([track], trackType, needPublish);
  798. if (runtime.videoRef) {
  799. track?.play(runtime.videoRef);
  800. }
  801. }
  802. };
  803. export const closeDevice = async (trackType: TrackType, needPublish = true) => {
  804. const track = runtime.activeTracks[trackType];
  805. if (trackType !== "microphone" && trackType !== "microphone2") {
  806. // console.log('closeDevice', track)
  807. // track?.destroy()
  808. await removeTrack([track] as RTC.RCLocalTrack[], trackType, needPublish);
  809. } else {
  810. track?.mute();
  811. }
  812. };
  813. export const toggleDevice = async (trackType: TrackType) => {
  814. if (runtime.screenShareStatus) {
  815. await toggleShareScreenVideo();
  816. return;
  817. }
  818. const track = runtime.activeTracks[trackType];
  819. const needPublish = runtime.videoStatus === "liveing";
  820. if (track) {
  821. if (trackType === "camera") {
  822. runtime.deviceStatus.camera = "closed";
  823. }
  824. closeDevice(trackType, needPublish);
  825. } else {
  826. if (trackType === "camera") {
  827. runtime.deviceStatus.camera = "granted";
  828. }
  829. openDevice(trackType, needPublish);
  830. }
  831. };
  832. export const leaveIMRoom = async (source: "IM" | "Logout" = "Logout") => {
  833. await closeLive(true, source);
  834. if (runtime.joinedRoom) {
  835. // @ts-ignore
  836. await runtime.rtcClient?.leaveRoom(runtime.joinedRoom);
  837. runtime.joinedRoom = null;
  838. await RongIMLib.disconnect();
  839. runtime.imConnectStatus = "disconnect";
  840. }
  841. };