runtime.ts 16 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558
  1. import { ElMessage } from 'element-plus';
  2. import { reactive, ref, Ref } from 'vue'
  3. import * as RongIMLib from '@rongcloud/imlib-next'
  4. import * as RTC from '@rongcloud/plugin-rtc'
  5. import request from '/src/helpers/request'
  6. import { state } from '/src/state'
  7. import event, { LIVE_EVENT_MESSAGE } from './event'
  8. import dayjs from 'dayjs'
  9. // import { SeatsCtrl } from './message-type'
  10. type imConnectStatus = 'connecting' | 'connected' | 'disconnect'
  11. type VideoStatus = 'init' | 'stream' | 'liveing' | 'stopped' | 'error' | 'loading'
  12. export type TrackType = 'microphone' | 'camera' | 'screen'
  13. type ActiveTracks = {
  14. [key in TrackType]: RTC.RCLocalTrack | null
  15. }
  16. type DeviceStatus = {
  17. [key in TrackType]: 'init' | 'granted' | 'denied' | 'closed' | 'none'
  18. }
  19. export const START_LIVE_TIME = 'start-live-time'
  20. export const START_LIVE_STATUS = 'start-live-status'
  21. export const VIDEO_DEVICE_ID = 'video-deviceId'
  22. export const AUDIO_DEVICE_ID = 'audio-deviceId'
  23. export const AUDIO_DEVICE_VOLUME = 'audio-device-volume'
  24. const runtime = reactive({
  25. /** 房间id */
  26. roomUid: sessionStorage.getItem('roomUid') || '',
  27. /** IM连接状态 */
  28. imConnectStatus: 'connecting' as imConnectStatus,
  29. // 屏幕分享状态
  30. screenShareStatus: false,
  31. // 视频节点
  32. videoRef: ref<HTMLVideoElement | null>(null),
  33. // RTC实例
  34. rtcClient: null as RTC.RCRTCClient | null,
  35. /** 加入房间实例 */
  36. joinedRoom: null as RTC.RCLivingRoom | null,
  37. // Tracks
  38. mediaStreamTrack: [] as MediaStreamTrack[],
  39. // 媒体流
  40. mediaStreams: null as MediaStream | null,
  41. // 视频状态
  42. videoStatus: 'init' as VideoStatus,
  43. // 麦克风设备列表
  44. microphones: [] as MediaDeviceInfo[],
  45. // 摄像头设备列表
  46. cameras: [] as MediaDeviceInfo[],
  47. // 摄像头设备
  48. selectedCamera: null as MediaDeviceInfo | null,
  49. // 麦克风设备
  50. selectedMicrophone: null as MediaDeviceInfo | null,
  51. // 点赞数量
  52. likeCount: 0,
  53. // 观看人数
  54. lookCount: 0,
  55. // 上一次点赞数量
  56. lastLikeCount: 0,
  57. /** 当前活跃的数据流 */
  58. activeTracks: {} as ActiveTracks,
  59. /** 是否关闭连麦 */
  60. allowSeatsCtrl: true,
  61. /** 是否关闭发言 */
  62. allowChatCtrl: true,
  63. /** 当前设备获取状态 */
  64. deviceStatus: {
  65. microphone: 'init',
  66. camera: 'init',
  67. screen: 'init'
  68. } as DeviceStatus
  69. })
  70. export default runtime
  71. const RONG_IM_TOKEN = 'c9kqb3rdc451j'
  72. RongIMLib.init({
  73. appkey: RONG_IM_TOKEN,
  74. })
  75. // 注册自定义消息类型
  76. // 控制是否允许连麦
  77. const MessageSeatsCtrl = RongIMLib.registerMessageType('RC:Chatroom:SeatsCtrl', true, true)
  78. // 控制是否允许发言
  79. const MessageChatBan = RongIMLib.registerMessageType('RC:Chatroom:ChatBan', true, true)
  80. // 连麦消息
  81. const MessageSeatApply = RongIMLib.registerMessageType('RC:Chatroom:SeatApply', true, true)
  82. // 响应连麦消息
  83. const MessageSeatResponse = RongIMLib.registerMessageType('RC:Chatroom:SeatResponse', true, true)
  84. type MessageProps = {
  85. messageType: 'RC:Chatroom:Welcome' | 'RC:TxtMsg' | 'RC:Chatroom:Barrage' | 'RC:Chatroom:Like' | 'RC:Chatroom:SeatsCtrl' | 'RC:Chatroom:ChatBan' | 'RC:Chatroom:SeatApply',
  86. content: any,
  87. }
  88. type MessageEvent = {
  89. messages: MessageProps[],
  90. }
  91. const Events = RongIMLib.Events
  92. /**
  93. * 监听消息通知
  94. */
  95. const { MESSAGES, ...RestMessage } = Events
  96. RongIMLib.addEventListener(Events.MESSAGES, (evt: MessageEvent) => {
  97. console.log(evt, '收到消息')
  98. const { messages } = evt
  99. for (const message of messages) {
  100. // console.log(LIVE_EVENT_MESSAGE[message.messageType], message)
  101. if (LIVE_EVENT_MESSAGE[message.messageType]) {
  102. event.emit(LIVE_EVENT_MESSAGE[message.messageType], {...message.content, $EventMessage: message})
  103. }
  104. }
  105. })
  106. for (const Message of Object.values(RestMessage)) {
  107. RongIMLib.addEventListener(Message, (evt: any) => {
  108. console.log(Message, evt)
  109. // chatroomDestroyed
  110. event.emit(Message, {$EventMessage: null})
  111. })
  112. }
  113. /**
  114. * 监听 IM 连接状态变化
  115. */
  116. RongIMLib.addEventListener(Events.CONNECTING, () => {
  117. console.log('connecting')
  118. runtime.imConnectStatus = 'connecting'
  119. })
  120. RongIMLib.addEventListener(Events.CONNECTED, () => {
  121. console.log('connected')
  122. runtime.imConnectStatus = 'connected'
  123. })
  124. RongIMLib.addEventListener(Events.DISCONNECT, () => {
  125. console.log('disconnect')
  126. runtime.imConnectStatus = 'disconnect'
  127. })
  128. export const connectIM = async (imToken: string) => {
  129. try {
  130. const user = await RongIMLib.connect(imToken)
  131. runtime.rtcClient = RongIMLib.installPlugin(RTC.installer, {})
  132. console.log('connect success', user.data?.userId)
  133. return user
  134. } catch (error) {
  135. throw error
  136. }
  137. }
  138. /**
  139. * 设置声音
  140. * @param video
  141. * @param Value 声音大小
  142. */
  143. export const setVolume = (value: number) => {
  144. localStorage.setItem(AUDIO_DEVICE_VOLUME, value.toString())
  145. if(runtime.videoRef) {
  146. runtime.videoRef.volume = value / 100
  147. }
  148. // @ts-ignore
  149. if (runtime.activeTracks.microphone && runtime.activeTracks.microphone._element) {
  150. // @ts-ignore
  151. runtime.activeTracks.microphone._element.volume = value / 100
  152. }
  153. }
  154. /**
  155. * 设置video视频流
  156. */
  157. export const setVideoSrcObject = (video: HTMLVideoElement | null, mediaStreams: MediaStream | null) => {
  158. if (video && mediaStreams) {
  159. video.srcObject = mediaStreams
  160. video.onloadedmetadata = () => {
  161. video.play()
  162. }
  163. }
  164. }
  165. /**
  166. * 发起屏幕共享
  167. */
  168. export const shareScreenVideo = async () => {
  169. if (runtime.rtcClient && !runtime.screenShareStatus) {
  170. const screenTrack = await getTrack('screen')
  171. const oldTrack = runtime.activeTracks.camera as RTC.RCLocalTrack
  172. // removeTrack([oldTrack], 'camera')
  173. runtime.joinedRoom?.unpublish([oldTrack])
  174. setTrack([screenTrack as RTC.RCLocalTrack], 'screen')
  175. if (runtime.videoRef) {
  176. screenTrack.play(runtime.videoRef)
  177. runtime.screenShareStatus = true
  178. }
  179. screenTrack?.on(RTC.RCLocalTrack.EVENT_LOCAL_TRACK_END, (track: RTC.RCLocalTrack) => {
  180. runtime.screenShareStatus = false
  181. track.destroy()
  182. // removeTrack([track], 'screen')
  183. if (oldTrack) {
  184. setTrack([oldTrack as RTC.RCLocalTrack], 'camera')
  185. if (runtime.videoRef) {
  186. oldTrack.play(runtime.videoRef)
  187. }
  188. }
  189. // setVideoSrcObject(runtime.videoRef, this.mediaStreams)
  190. })
  191. }
  192. }
  193. /**
  194. *
  195. * 获取所有音频输入设备
  196. * @returns {Promise<void>}
  197. */
  198. export const getMicrophones = async () => {
  199. const microphones = await RTC.device.getMicrophones()
  200. runtime.microphones = microphones
  201. return microphones
  202. }
  203. /**
  204. *
  205. * 获取所有视频输入设备
  206. * @returns {Promise<void>}
  207. */
  208. export const getCameras = async () => {
  209. const cameras = await RTC.device.getCameras()
  210. runtime.cameras = cameras
  211. return cameras
  212. }
  213. /**
  214. *
  215. * 设置当前视频设备
  216. * @param camera MediaDeviceInfo
  217. */
  218. export const setSelectCamera = async (camera: MediaDeviceInfo) => {
  219. runtime.selectedCamera = camera
  220. localStorage.setItem(VIDEO_DEVICE_ID, camera.deviceId)
  221. const oldTrack = runtime.activeTracks.camera as RTC.RCLocalTrack
  222. if (oldTrack) {
  223. await removeTrack([oldTrack], 'camera', oldTrack.isPublished())
  224. }
  225. const track = await getTrack('camera')
  226. setTrack([track], 'camera', runtime.videoStatus === 'liveing')
  227. }
  228. /**
  229. *
  230. * 设置当前麦克风设备
  231. * @param microphone MediaDeviceInfo
  232. */
  233. export const setSelectMicrophone = async (microphone: MediaDeviceInfo) => {
  234. runtime.selectedMicrophone = microphone
  235. localStorage.setItem(AUDIO_DEVICE_ID, microphone.deviceId)
  236. const oldTrack = runtime.activeTracks.microphone as RTC.RCLocalTrack
  237. if (oldTrack) {
  238. await removeTrack([oldTrack], 'microphone', oldTrack.isPublished())
  239. }
  240. const track = await getTrack('microphone')
  241. setTrack([track], 'microphone', runtime.videoStatus === 'liveing')
  242. }
  243. type TrackResult = {
  244. code: RTC.RCRTCCode,
  245. track: RTC.RCMicphoneAudioTrack | RTC.RCCameraVideoTrack | RTC.RCScreenVideoTrack | undefined
  246. }
  247. export const getTrack = async (trackType: TrackType): Promise<RTC.RCLocalTrack> => {
  248. let res: TrackResult | undefined
  249. let Track: RTC.RCLocalTrack | null = null
  250. if (trackType === 'microphone') {
  251. res = await runtime.rtcClient?.createMicrophoneAudioTrack('RongCloudRTC', {
  252. micphoneId: runtime.selectedMicrophone?.deviceId,
  253. }) as TrackResult
  254. } else if (trackType === 'camera') {
  255. res = await runtime.rtcClient?.createCameraVideoTrack('RongCloudRTC', {
  256. cameraId: runtime.selectedCamera?.deviceId,
  257. faceMode: 'user',
  258. frameRate: RTC.RCFrameRate.FPS_24,
  259. resolution: RTC.RCResolution.W1920_H1080,
  260. }) as TrackResult
  261. } else {
  262. res = await runtime?.rtcClient?.createScreenVideoTrack() as TrackResult
  263. }
  264. Track = res?.track as RTC.RCLocalTrack
  265. if (trackType === 'camera' && !runtime.cameras.length) {
  266. runtime.deviceStatus[trackType] = 'none'
  267. } else if (trackType === 'microphone' && !runtime.microphones.length) {
  268. runtime.deviceStatus[trackType] = 'none'
  269. } else if (trackType === 'screen' && !runtime.screenShareStatus) {
  270. runtime.deviceStatus[trackType] = 'none'
  271. }
  272. if (res.code === RTC.RCRTCCode.PERMISSION_DENIED) {
  273. runtime.deviceStatus[trackType] = 'denied'
  274. } else {
  275. runtime.deviceStatus[trackType] = 'granted'
  276. }
  277. // if (res.code !== RTC.RCRTCCode.SUCCESS || !Track) {
  278. // throw new Error('获取数据流失败')
  279. // }
  280. return Track
  281. }
  282. /**
  283. * 添加视频流,会同步修改当先视频与推送的流
  284. * @param track
  285. */
  286. export const setTrack = async (tracks: RTC.RCLocalTrack[], trackType: TrackType, needPublish = true) => {
  287. for (const track of tracks) {
  288. // @ts-ignore
  289. // await runtime.mediaStreams?.addTrack(track._msTrack)
  290. if (trackType === 'microphone') {
  291. console.log('添加麦克风')
  292. track?.play()
  293. }
  294. runtime.activeTracks[trackType] = track
  295. }
  296. if (needPublish) {
  297. // console.log('publish', runtime.joinedRoom)
  298. await runtime.joinedRoom?.publish(tracks.filter(track => !!track))
  299. }
  300. }
  301. /**
  302. * 删除视频流,会同步修改当先视频与推送的流
  303. * @param track
  304. */
  305. export const removeTrack = async (tracks: RTC.RCLocalTrack[], trackType: TrackType, needPublish = true) => {
  306. if (needPublish) {
  307. await runtime.joinedRoom?.unpublish(tracks.filter(track => !!track))
  308. }
  309. for (const track of tracks) {
  310. // @ts-ignore
  311. // await runtime.mediaStreams?.removeTrack(track._msTrack)
  312. // runtime.activeTracks[trackType].destroy()
  313. // console.log(runtime.activeTracks[trackType])
  314. track?.destroy()
  315. runtime.activeTracks[trackType] = null
  316. }
  317. }
  318. export const joinIMRoom = async (roomId: string, type: RTC.RCLivingType, listenEvents: RTC.IRoomEventListener | null) => {
  319. await RongIMLib.joinChatRoom(roomId, {count: -1})
  320. const join = await runtime.rtcClient?.joinLivingRoom(roomId, type)
  321. if (join?.code != RTC.RCRTCCode.SUCCESS) throw Error('加入房间失败')
  322. join.room?.registerRoomEventListener(listenEvents)
  323. return join
  324. }
  325. export const joinRoom = async (roomId: string, type: RTC.RCLivingType, listenEvents: RTC.IRoomEventListener | null) => {
  326. // try {
  327. // await request.get('/api-web/imLiveBroadcastRoom/joinRoom', {
  328. // params: {
  329. // roomUid: runtime.roomUid,
  330. // userId: state.user?.speakerId,
  331. // }
  332. // })
  333. // } catch (error) {}
  334. return await joinIMRoom(roomId, type, listenEvents)
  335. }
  336. /**
  337. * 开始直播
  338. */
  339. export const startLive = async (resetTime = true) => {
  340. if (runtime.videoStatus !== 'stream') {
  341. const errorMessage = '请确定摄像头已经开启'
  342. ElMessage.error(errorMessage)
  343. throw Error(errorMessage)
  344. }
  345. const room = runtime.joinedRoom
  346. if (room) {
  347. // const microphoneAudioTrack = await getTrack('microphone')
  348. // const cameraVideoTrack = await getTrack('camera')
  349. await setTrack([runtime.activeTracks.camera as RTC.RCLocalVideoTrack], 'camera')
  350. await setTrack([runtime.activeTracks.microphone as RTC.RCLocalAudioTrack], 'microphone')
  351. // const builder = await runtime.joinedRoom?.getMCUConfigBuilder()
  352. // // @ts-ignore
  353. // await builder.setOutputVideoRenderMode?.(RTC.MixVideoRenderMode.WHOLE)
  354. // // @ts-ignore
  355. // await builder.flush()
  356. // console.log(runtime.activeTracks)
  357. await request.get('/api-web/imLiveBroadcastRoom/opsLiveVideo', {
  358. params: {
  359. type: '1',
  360. roomUid: runtime.roomUid,
  361. userId: state.user?.speakerId,
  362. },
  363. })
  364. runtime.videoStatus = 'liveing'
  365. }
  366. if (resetTime) {
  367. sessionStorage.setItem(START_LIVE_TIME, dayjs().valueOf().toString())
  368. }
  369. sessionStorage.setItem(START_LIVE_STATUS, 'liveing')
  370. }
  371. /**
  372. * 关闭直播
  373. */
  374. export const closeLive = async (remove = false) => {
  375. // removeMedia(runtime.mediaStreams, runtime.mediaStreamTrack)
  376. await request.get('/api-web/imLiveBroadcastRoom/opsLiveVideo', {
  377. params: {
  378. type: '2',
  379. roomUid: runtime.roomUid,
  380. userId: state.user?.speakerId,
  381. }
  382. })
  383. sessionStorage.removeItem(START_LIVE_TIME)
  384. sessionStorage.removeItem(START_LIVE_STATUS)
  385. // 关闭房间仅移除推流即可
  386. for (const key in runtime.activeTracks) {
  387. if (Object.prototype.hasOwnProperty.call(runtime.activeTracks, key)) {
  388. const track = runtime.activeTracks[key as TrackType] as RTC.RCLocalTrack
  389. if (track) {
  390. await runtime.joinedRoom?.unpublish([track])
  391. if (remove) {
  392. await removeTrack([track], key as TrackType)
  393. }
  394. }
  395. }
  396. }
  397. runtime.videoStatus = 'stream'
  398. }
  399. /**
  400. * 同步点赞数量
  401. */
  402. export const loopSyncLike = async () => {
  403. if ((runtime.likeCount !== runtime.lastLikeCount || runtime.likeCount === 0) && state.user) {
  404. try {
  405. await request.get('/api-web/imLiveBroadcastRoom/syncLike', {
  406. hideLoading: true,
  407. hideMessage: true,
  408. params: {
  409. likeNum: runtime.likeCount,
  410. roomUid: runtime.roomUid,
  411. }
  412. })
  413. runtime.lastLikeCount = runtime.likeCount
  414. } catch (error) {}
  415. }
  416. setTimeout(() => {
  417. loopSyncLike()
  418. }, 1000 * 30)
  419. }
  420. type SendMessageType = 'text' | 'image' | 'audio' | 'video' | 'file' | 'SeatsCtrl' | 'ChatBan' | 'SeatApply' | 'SeatResponse'
  421. export const getSendMessageUser = () => {
  422. return {
  423. id: state.user?.speakerId,
  424. name: state.user?.speakerName,
  425. userId: state.user?.speakerId,
  426. userName: state.user?.speakerName,
  427. }
  428. }
  429. /**
  430. *
  431. * @param msg 消息内容
  432. * @param type 消息类型
  433. * @returns null 或者 发送消息的结果
  434. */
  435. export const sendMessage = async (msg: any, type: SendMessageType = 'text') => {
  436. let message: RongIMLib.BaseMessage<unknown> | null = null
  437. if (!msg) return
  438. const conversation = {
  439. conversationType: RongIMLib.ConversationType.CHATROOM,
  440. targetId: runtime.joinedRoom?._roomId as string,
  441. }
  442. if (type === 'text') {
  443. message = new RongIMLib.TextMessage({
  444. user: getSendMessageUser(),
  445. content: msg
  446. })
  447. } else if (type === 'SeatsCtrl') {
  448. message = new MessageSeatsCtrl(msg)
  449. } else if (type === 'ChatBan') {
  450. message = new MessageChatBan(msg)
  451. } else if (type === 'SeatApply') {
  452. message = new MessageSeatApply(msg)
  453. } else if (type === 'SeatResponse') {
  454. message = new MessageSeatResponse(msg)
  455. }
  456. if (!message) return
  457. console.log(message)
  458. return await RongIMLib.sendMessage(conversation, message)
  459. }
  460. export const openDevice = async (trackType: TrackType, needPublish = true) => {
  461. if (trackType === 'microphone' && runtime.activeTracks[trackType]) {
  462. runtime.activeTracks[trackType]?.unmute()
  463. } else {
  464. const track = await getTrack(trackType)
  465. await setTrack([track], trackType, needPublish)
  466. if (runtime.videoRef) {
  467. track?.play(runtime.videoRef)
  468. }
  469. }
  470. }
  471. export const closeDevice = async (trackType: TrackType, needPublish = true) => {
  472. const track = runtime.activeTracks[trackType]
  473. if (trackType !== 'microphone') {
  474. // console.log('closeDevice', track)
  475. // track?.destroy()
  476. await removeTrack([track] as RTC.RCLocalTrack[], trackType, needPublish)
  477. } else {
  478. track?.mute()
  479. }
  480. }
  481. export const toggleDevice = async (trackType: TrackType) => {
  482. const track = runtime.activeTracks[trackType]
  483. const needPublish = runtime.videoStatus === 'liveing'
  484. if (track) {
  485. if (trackType === 'camera') {
  486. runtime.deviceStatus.camera = 'closed'
  487. }
  488. closeDevice(trackType, needPublish)
  489. } else {
  490. if (trackType === 'camera') {
  491. runtime.deviceStatus.camera = 'granted'
  492. }
  493. openDevice(trackType, needPublish)
  494. }
  495. }
  496. export const leaveIMRoom = async () => {
  497. await closeLive(true)
  498. if (runtime.joinedRoom) {
  499. // @ts-ignore
  500. await runtime.rtcClient?.leaveRoom(runtime.joinedRoom)
  501. runtime.joinedRoom = null
  502. }
  503. }