import { ElMessage, ElMessageBox } from 'element-plus'; import { reactive, ref, Ref } from 'vue' import * as RongIMLib from '@rongcloud/imlib-next' import * as RTC from '@rongcloud/plugin-rtc' import request from '/src/helpers/request' import { state } from '/src/state' import event, { LIVE_EVENT_MESSAGE } from './event' import dayjs from 'dayjs' import { removeToken } from '/src/utils/auth'; import qs from 'query-string' // import { SeatsCtrl } from './message-type' type imConnectStatus = 'connecting' | 'connected' | 'disconnect' type VideoStatus = 'init' | 'stream' | 'liveing' | 'stopped' | 'error' | 'loading' export type TrackType = 'microphone' | 'camera' | 'screen' let publishError = false type ActiveTracks = { [key in TrackType]: RTC.RCLocalTrack | null } type DeviceStatus = { [key in TrackType]: 'init' | 'granted' | 'denied' | 'closed' | 'none' } export const START_LIVE_TIME = 'start-live-time' export const START_LIVE_STATUS = 'start-live-status' export const VIDEO_DEVICE_ID = 'video-deviceId' export const AUDIO_DEVICE_ID = 'audio-deviceId' export const AUDIO_DEVICE_VOLUME = 'audio-device-volume' const runtime = reactive({ /** 房间id */ roomUid: sessionStorage.getItem('roomUid') || '', /** IM连接状态 */ imConnectStatus: 'connecting' as imConnectStatus, // 屏幕分享状态 screenShareStatus: false, // 视频节点 videoRef: ref(null), // RTC实例 rtcClient: null as RTC.RCRTCClient | null, /** 加入房间实例 */ joinedRoom: null as RTC.RCLivingRoom | null, // Tracks mediaStreamTrack: [] as MediaStreamTrack[], // 媒体流 mediaStreams: null as MediaStream | null, // 视频状态 videoStatus: 'init' as VideoStatus, // 麦克风设备列表 microphones: [] as MediaDeviceInfo[], // 摄像头设备列表 cameras: [] as MediaDeviceInfo[], // 摄像头设备 selectedCamera: null as MediaDeviceInfo | null, // 麦克风设备 selectedMicrophone: null as MediaDeviceInfo | null, // 点赞数量 likeCount: 0, // 观看人数 lookCount: 0, // 上一次点赞数量 lastLikeCount: 0, /** 当前活跃的数据流 */ activeTracks: {} as ActiveTracks, /** 是否关闭连麦 */ allowSeatsCtrl: true, /** 是否关闭发言 */ allowChatCtrl: true, /** 当前设备获取状态 */ deviceStatus: { microphone: 'init', camera: 'init', screen: 'init' } as DeviceStatus, syncLikeTimer: null as any }) export default runtime // c9kqb3rdc451j 测试环境 // const RONG_IM_TOKEN = 'c9kqb3rdc451j' const RONG_IM_TOKEN = '6tnym1br6pv07' RongIMLib.init({ appkey: RONG_IM_TOKEN, }) // 注册自定义消息类型 // 控制是否允许连麦 const MessageSeatsCtrl = RongIMLib.registerMessageType('RC:Chatroom:SeatsCtrl', true, true) // 控制是否允许发言 const MessageChatBan = RongIMLib.registerMessageType('RC:Chatroom:ChatBan', true, true) // 连麦消息 const MessageSeatApply = RongIMLib.registerMessageType('RC:Chatroom:SeatApply', true, true) // 响应连麦消息 const MessageSeatResponse = RongIMLib.registerMessageType('RC:Chatroom:SeatResponse', true, true) // 同步房间观众数量 const MessageMemberCount = RongIMLib.registerMessageType('RC:Chatroom:MemberCount', true, true) // 当前连麦人员同步 const MessageSeatMember = RongIMLib.registerMessageType('RC:Chatroom:SeatMember', true, true) // 当前点赞数量同步 const MessageLikeCount = RongIMLib.registerMessageType('RC:Chatroom:LikeCount', true, true) type MessageProps = { messageType: 'RC:Chatroom:Welcome' | 'RC:TxtMsg' | 'RC:Chatroom:Barrage' | 'RC:Chatroom:Like' | 'RC:Chatroom:SeatsCtrl' | 'RC:Chatroom:ChatBan' | 'RC:Chatroom:SeatApply', content: any, senderUserId: any } type MessageEvent = { messages: MessageProps[], } const Events = RongIMLib.Events /** * 监听消息通知 */ const { MESSAGES, ...RestMessage } = Events RongIMLib.addEventListener(Events.MESSAGES, (evt: MessageEvent) => { console.log(evt, '收到消息') const { messages } = evt for (const message of messages) { // console.log(LIVE_EVENT_MESSAGE[message.messageType], message) const isSelf = message.senderUserId && Number(message.senderUserId) === state.user?.speakerId if (!isSelf && LIVE_EVENT_MESSAGE[message.messageType]) { event.emit(LIVE_EVENT_MESSAGE[message.messageType], {...message.content, $EventMessage: message}) } } }) for (const Message of Object.values(RestMessage)) { RongIMLib.addEventListener(Message, (evt: any) => { console.log(Message, evt) // chatroomDestroyed event.emit(Message, {$EventMessage: null}) }) } /** * 监听 IM 连接状态变化 */ RongIMLib.addEventListener(Events.CONNECTING, () => { console.log('connecting') runtime.imConnectStatus = 'connecting' }) RongIMLib.addEventListener(Events.CONNECTED, () => { console.log('connected') runtime.imConnectStatus = 'connected' }) RongIMLib.addEventListener(Events.DISCONNECT, () => { console.log('disconnect') runtime.imConnectStatus = 'disconnect' closeLive(true, 'IM') const search = qs.parse(window.location.search) console.log(search, 'disconnect') window.location.href = window.location.origin +'/live?' + qs.stringify({ ...search, time: new Date().getTime() }) // event.emit(LIVE_EVENT_MESSAGE["RC:ForcedOffline"]) // if (runtime.joinedRoom && runtime.videoStatus === 'liveing') { // closeLive(true, 'IM') // } }) export const connectIM = async (imToken: string) => { try { const user = await RongIMLib.connect(imToken) runtime.rtcClient = RongIMLib.installPlugin(RTC.installer, {}) console.log('connect success', user.data?.userId) return user } catch (error) { throw error } } /** * 设置声音 * @param video * @param Value 声音大小 */ export const setVolume = (value: number) => { localStorage.setItem(AUDIO_DEVICE_VOLUME, value.toString()) if(runtime.videoRef) { runtime.videoRef.volume = value / 100 } // @ts-ignore if (runtime.activeTracks.microphone && runtime.activeTracks.microphone._element) { // @ts-ignore runtime.activeTracks.microphone._element.volume = value / 100 } } /** * 设置video视频流 */ export const setVideoSrcObject = (video: HTMLVideoElement | null, mediaStreams: MediaStream | null) => { if (video && mediaStreams) { video.srcObject = mediaStreams video.onloadedmetadata = () => { video.play() } } } /** * 发起屏幕共享 */ export const shareScreenVideo = async () => { if (runtime.screenShareStatus) { ElMessage.error('正在屏幕共享中,请先关闭屏幕共享') return } if (runtime.rtcClient && !runtime.screenShareStatus && runtime.videoStatus === 'liveing') { let screenTrack: RTC.RCLocalTrack | undefined try { screenTrack = await getTrack('screen') } catch (error) { ElMessage.error('屏幕分享失败,请检查是否授权') } if (!screenTrack) { return } const oldTrack = runtime.activeTracks.camera as RTC.RCLocalTrack // removeTrack([oldTrack], 'camera') if (oldTrack) { await runtime.joinedRoom?.unpublish([oldTrack]) } setTrack([screenTrack as RTC.RCLocalTrack], 'screen') if (runtime.videoRef) { screenTrack?.play(runtime.videoRef) runtime.screenShareStatus = true } screenTrack?.on(RTC.RCLocalTrack.EVENT_LOCAL_TRACK_END, (track: RTC.RCLocalTrack) => { runtime.screenShareStatus = false track.destroy() // removeTrack([track], 'screen') if (oldTrack) { setTrack([oldTrack as RTC.RCLocalTrack], 'camera') if (runtime.videoRef) { oldTrack.play(runtime.videoRef) } } // setVideoSrcObject(runtime.videoRef, this.mediaStreams) }) } } /** * 取消屏幕共享流的访问,会导致取消屏幕共享 */ export const closeShareScreenVideo = () => { const screenTrack = runtime.activeTracks.screen as RTC.RCLocalTrack if (screenTrack) { screenTrack.destroy() runtime.screenShareStatus = false } const oldTrack = runtime.activeTracks.camera as RTC.RCLocalTrack if (oldTrack) { setTrack([oldTrack as RTC.RCLocalTrack], 'camera') if (runtime.videoRef) { oldTrack.play(runtime.videoRef) } } } export const toggleShareScreenVideo = async () => { if (runtime.screenShareStatus) { try { await ElMessageBox.confirm('是否确认取消屏幕共享?') closeShareScreenVideo() } catch (error) {} } else { shareScreenVideo() } } /** * * 获取所有音频输入设备 * @returns {Promise} */ export const getMicrophones = async () => { const microphones = await RTC.device.getMicrophones() runtime.microphones = microphones return microphones } /** * * 获取所有视频输入设备 * @returns {Promise} */ export const getCameras = async () => { const cameras = await RTC.device.getCameras() runtime.cameras = cameras return cameras } /** * * 设置当前视频设备 * @param camera MediaDeviceInfo */ export const setSelectCamera = async (camera: MediaDeviceInfo) => { runtime.selectedCamera = camera localStorage.setItem(VIDEO_DEVICE_ID, camera.deviceId) const oldTrack = runtime.activeTracks.camera as RTC.RCLocalTrack if (oldTrack) { await removeTrack([oldTrack], 'camera', oldTrack.isPublished()) } const track = await getTrack('camera') setTrack([track], 'camera', runtime.videoStatus === 'liveing') } /** * * 设置当前麦克风设备 * @param microphone MediaDeviceInfo */ export const setSelectMicrophone = async (microphone: MediaDeviceInfo) => { runtime.selectedMicrophone = microphone localStorage.setItem(AUDIO_DEVICE_ID, microphone.deviceId) const oldTrack = runtime.activeTracks.microphone as RTC.RCLocalTrack if (oldTrack) { await removeTrack([oldTrack], 'microphone', oldTrack.isPublished()) } const track = await getTrack('microphone') setTrack([track], 'microphone', runtime.videoStatus === 'liveing') } type TrackResult = { code: RTC.RCRTCCode, track: RTC.RCMicphoneAudioTrack | RTC.RCCameraVideoTrack | RTC.RCScreenVideoTrack | undefined } export const getTrack = async (trackType: TrackType): Promise => { let res: TrackResult | undefined let Track: RTC.RCLocalTrack | null = null if (trackType === 'microphone') { res = await runtime.rtcClient?.createMicrophoneAudioTrack('RongCloudRTC', { micphoneId: runtime.selectedMicrophone?.deviceId, }) as TrackResult } else if (trackType === 'camera') { res = await runtime.rtcClient?.createCameraVideoTrack('RongCloudRTC', { cameraId: runtime.selectedCamera?.deviceId, faceMode: 'user', frameRate: RTC.RCFrameRate.FPS_24, resolution: RTC.RCResolution.W1920_H1080, }) as TrackResult } else { res = await runtime?.rtcClient?.createScreenVideoTrack('screenshare', { frameRate: RTC.RCFrameRate.FPS_24, resolution: RTC.RCResolution.W1920_H1080, }) as TrackResult } Track = res?.track as RTC.RCLocalTrack if (trackType === 'camera' && !runtime.cameras.length) { runtime.deviceStatus[trackType] = 'none' } else if (trackType === 'microphone' && !runtime.microphones.length) { runtime.deviceStatus[trackType] = 'none' } else if (trackType === 'screen' && !runtime.screenShareStatus) { runtime.deviceStatus[trackType] = 'none' } if (res.code === RTC.RCRTCCode.PERMISSION_DENIED) { runtime.deviceStatus[trackType] = 'denied' } else { runtime.deviceStatus[trackType] = 'granted' } // if (res.code !== RTC.RCRTCCode.SUCCESS || !Track) { // throw new Error('获取数据流失败') // } if (res.code === RTC.RCRTCCode.GET_DISPLAY_MEDIA_FAILED) { throw new Error('获取屏幕共享失败') } return Track } /** * 添加视频流,会同步修改当先视频与推送的流 * @param track */ export const setTrack = async (tracks: RTC.RCLocalTrack[], trackType: TrackType, needPublish = true) => { for (const track of tracks) { // @ts-ignore // await runtime.mediaStreams?.addTrack(track._msTrack) // if (trackType === 'microphone') { // console.log('添加麦克风') // track?.play() // } runtime.activeTracks[trackType] = track } console.log(needPublish) if (needPublish) { // console.log('publish', runtime.joinedRoom) try { const res = await runtime.joinedRoom?.publish(tracks.filter(track => !!track)) console.log(res, 'pub') if(res?.code !== RTC.RCRTCCode.SUCCESS && !publishError) { publishError = true window.onbeforeunload = null ElMessageBox.alert('视频就发送失败,请刷新页面重新开启?', '提示', { confirmButtonText: '确定', callback: () => { publishError = false window.location.reload() }}) } } catch(err: any) { console.log(err, 'err') } } } /** * 删除视频流,会同步修改当先视频与推送的流 * @param track */ export const removeTrack = async (tracks: RTC.RCLocalTrack[], trackType: TrackType, needPublish = true) => { if (needPublish) { await runtime.joinedRoom?.unpublish(tracks.filter(track => !!track)) } for (const track of tracks) { // @ts-ignore // await runtime.mediaStreams?.removeTrack(track._msTrack) // runtime.activeTracks[trackType].destroy() // console.log(runtime.activeTracks[trackType]) track?.destroy() runtime.activeTracks[trackType] = null } } export const joinIMRoom = async (roomId: string, type: RTC.RCLivingType, listenEvents: RTC.IRoomEventListener | null) => { await RongIMLib.joinChatRoom(roomId, {count: -1}) const join = await runtime.rtcClient?.joinLivingRoom(roomId, type) if (join?.code != RTC.RCRTCCode.SUCCESS) throw Error('加入房间失败') join.room?.registerRoomEventListener(listenEvents) return join } export const joinRoom = async (roomId: string, type: RTC.RCLivingType, listenEvents: RTC.IRoomEventListener | null) => { // try { // await request.get('/api-web/imLiveBroadcastRoom/joinRoom', { // params: { // roomUid: runtime.roomUid, // userId: state.user?.speakerId, // } // }) // } catch (error) {} return await joinIMRoom(roomId, type, listenEvents) } /** * 开始直播 */ export const startLive = async (resetTime = true) => { if (runtime.videoStatus !== 'stream') { const errorMessage = '请确定摄像头已经开启' ElMessage.error(errorMessage) throw Error(errorMessage) } const room = runtime.joinedRoom if (room) { // const microphoneAudioTrack = await getTrack('microphone') // const cameraVideoTrack = await getTrack('camera') await setTrack([runtime.activeTracks.camera as RTC.RCLocalVideoTrack], 'camera') await setTrack([runtime.activeTracks.microphone as RTC.RCLocalAudioTrack], 'microphone') // const builder = await runtime.joinedRoom?.getMCUConfigBuilder() // // @ts-ignore // await builder.setOutputVideoRenderMode?.(RTC.MixVideoRenderMode.WHOLE) // // @ts-ignore // await builder.flush() // console.log(runtime.activeTracks) await request.get('/api-web/imLiveBroadcastRoom/opsLiveVideo', { params: { type: '1', roomUid: runtime.roomUid, userId: state.user?.speakerId, }, }) runtime.videoStatus = 'liveing' } if (resetTime) { sessionStorage.setItem(START_LIVE_TIME, dayjs().valueOf().toString()) } sessionStorage.setItem(START_LIVE_STATUS, 'liveing') } /** * 关闭直播 */ export const closeLive = async (remove = false, source: 'IM' | 'Logout' = 'Logout') => { // removeMedia(runtime.mediaStreams, runtime.mediaStreamTrack) try { if(source === 'Logout') { await request.get('/api-web/imLiveBroadcastRoom/opsLiveVideo', { params: { type: '2', roomUid: runtime.roomUid, userId: state.user?.speakerId, } }) } } catch {} sessionStorage.removeItem(START_LIVE_TIME) sessionStorage.removeItem(START_LIVE_STATUS) // 关闭房间仅移除推流即可 for (const key in runtime.activeTracks) { if (Object.prototype.hasOwnProperty.call(runtime.activeTracks, key)) { const track = runtime.activeTracks[key as TrackType] as RTC.RCLocalTrack if (track) { await runtime.joinedRoom?.unpublish([track]) if (remove) { await removeTrack([track], key as TrackType) } } } } runtime.videoStatus = 'stream' } /** * 同步点赞数量 */ export const loopSyncLike = async () => { // (runtime.likeCount !== runtime.lastLikeCount || runtime.likeCount === 0) && if (state.user) { try { await request.get('/api-web/imLiveBroadcastRoom/syncLike', { hideLoading: true, hideMessage: true, params: { likeNum: runtime.likeCount, roomUid: runtime.roomUid, } }) runtime.lastLikeCount = runtime.likeCount sendMessage({ count: runtime.likeCount }, 'LikeCount') } catch (error) {} } runtime.syncLikeTimer = setTimeout(() => { loopSyncLike() }, 1000 * 10) } type SendMessageType = 'text' | 'image' | 'audio' | 'video' | 'file' | 'SeatsCtrl' | 'ChatBan' | 'SeatApply' | 'SeatResponse' | 'MemberCount' | 'SeatMember' | 'LikeCount' export const getSendMessageUser = () => { return { id: String(state.user?.speakerId), name: state.user?.speakerName, userId: String(state.user?.speakerId), userName: state.user?.speakerName, } } /** * * @param msg 消息内容 * @param type 消息类型 * @returns null 或者 发送消息的结果 */ export const sendMessage = async (msg: any, type: SendMessageType = 'text') => { let message: RongIMLib.BaseMessage | null = null if (!msg) return const conversation = { conversationType: RongIMLib.ConversationType.CHATROOM, targetId: runtime.joinedRoom?._roomId as string, } if (type === 'text') { message = new RongIMLib.TextMessage({ user: getSendMessageUser(), content: msg }) } else if (type === 'SeatsCtrl') { message = new MessageSeatsCtrl(msg) } else if (type === 'ChatBan') { message = new MessageChatBan(msg) } else if (type === 'SeatApply') { message = new MessageSeatApply(msg) } else if (type === 'SeatResponse') { message = new MessageSeatResponse(msg) } else if(type === 'MemberCount') { message = new MessageMemberCount(msg) } else if(type === 'SeatMember') { message = new MessageSeatMember(msg) } else if(type === 'LikeCount') { message = new MessageLikeCount(msg) } if (!message) return console.log(message) return await RongIMLib.sendMessage(conversation, message) } export const openDevice = async (trackType: TrackType, needPublish = true) => { if (trackType === 'microphone' && runtime.activeTracks[trackType]) { runtime.activeTracks[trackType]?.unmute() } else { const track = await getTrack(trackType) await setTrack([track], trackType, needPublish) if (runtime.videoRef) { track?.play(runtime.videoRef) } } } export const closeDevice = async (trackType: TrackType, needPublish = true) => { const track = runtime.activeTracks[trackType] if (trackType !== 'microphone') { // console.log('closeDevice', track) // track?.destroy() await removeTrack([track] as RTC.RCLocalTrack[], trackType, needPublish) } else { track?.mute() } } export const toggleDevice = async (trackType: TrackType) => { if (runtime.screenShareStatus) { await toggleShareScreenVideo() return } const track = runtime.activeTracks[trackType] const needPublish = runtime.videoStatus === 'liveing' if (track) { if (trackType === 'camera') { runtime.deviceStatus.camera = 'closed' } closeDevice(trackType, needPublish) } else { if (trackType === 'camera') { runtime.deviceStatus.camera = 'granted' } openDevice(trackType, needPublish) } } export const leaveIMRoom = async (source: 'IM' | 'Logout' = 'Logout') => { await closeLive(true, source) if (runtime.joinedRoom) { // @ts-ignore await runtime.rtcClient?.leaveRoom(runtime.joinedRoom) runtime.joinedRoom = null await RongIMLib.disconnect() runtime.imConnectStatus = 'disconnect' } }