From 3836ddde90512711aadcc2dbf590afb84e263719 Mon Sep 17 00:00:00 2001 From: wenlele Date: Tue, 19 Jul 2022 17:30:09 +0800 Subject: [PATCH] =?UTF-8?q?=E6=92=AD=E6=94=BE=E7=BB=84=E4=BB=B6=E7=9A=84?= =?UTF-8?q?=E4=BF=AE=E6=94=B9?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../components/videoPlayer/videoOperation.jsx | 248 +++++++++--------- .../videoOperationCloudControl.jsx | 158 +++++------ .../videoPlayer/videoOperationTalk.jsx | 152 +++++------ .../src/components/videoPlayer/videoPlay.jsx | 148 +++++++---- .../src/components/videoPlayer/videoPlay.less | 7 + .../components/videoPlayer/videoPlayModal.jsx | 47 ++-- .../components/videoPlayer/voiceHeader.jsx | 155 +++++------ .../web/client/src/index.less | 5 +- .../components/sideSheet.jsx | 4 +- .../equipmentWarehouse/containers/nvr.jsx | 1 - .../monitor/containers/videoPlayCross.jsx | 104 +++++--- 11 files changed, 565 insertions(+), 464 deletions(-) diff --git a/code/VideoAccess-VCMP/web/client/src/components/videoPlayer/videoOperation.jsx b/code/VideoAccess-VCMP/web/client/src/components/videoPlayer/videoOperation.jsx index 63baf16..4d79544 100644 --- a/code/VideoAccess-VCMP/web/client/src/components/videoPlayer/videoOperation.jsx +++ b/code/VideoAccess-VCMP/web/client/src/components/videoPlayer/videoOperation.jsx @@ -13,140 +13,142 @@ import './videoPlay.less'; const timeFormat = 'YYYY-MM-DD HH:mm:ss' const VideoOperation = ({ - ToastInCustom, - operationState, operation, - voiceDisY, setVoiceDisY, - processDisX, setProcessDisX, - isAdjustProcess, setIsAdjustProcess, - resolution, setResolution, - histroyTime, setHistroyTime, - histroyBegain, - play, pause, closeSound, openSound, - isPlaying, - videoObj, - setYingshiPrepare, yingshiPrepareRef + ToastInCustom, + operationState, operation, + voiceDisY, setVoiceDisY, + processDisX, setProcessDisX, + isAdjustProcess, setIsAdjustProcess, + resolution, setResolution, + histroyTime, setHistroyTime, + histroyBegain, + play, pause, closeSound, openSound, + isPlaying, + videoObj, + setYingshiPrepare, yingshiPrepareRef, + slideDown, }) => { - const [showTimeSelect, setShowTimeSelect] = useState(false) + const [showTimeSelect, setShowTimeSelect] = useState(false) - const butStyle = { - border: '1px solid #fff', color: '#fff', padding: '0 10px', - display: 'flex', alignItems: 'center', height: '64%', marginLeft: 12, cursor: 'pointer', - position: 'relative' - } + const butStyle = { + border: '1px solid #fff', color: '#fff', padding: '0 10px', + display: 'flex', alignItems: 'center', height: '64%', marginLeft: 12, cursor: 'pointer', + position: 'relative' + } - const changeResolution = () => { - if (yingshiPrepareRef.current) { - return - } - if (videoObj.type == 'yingshi') { - yingshiPrepareRef.current = 'yingshi' - setYingshiPrepare('play') - } - setResolution(resolution == 'sd' ? 'hd' : 'sd') - } + const changeResolution = () => { + if (yingshiPrepareRef.current) { + return + } + if (videoObj.type == 'yingshi') { + yingshiPrepareRef.current = 'yingshi' + setYingshiPrepare('play') + } + setResolution(resolution == 'sd' ? 'hd' : 'sd') + } - const histroySelected = operationState && operationState.histroy.select - return ( - <> - { - operationState ? - operationState.control.select ? - : - operationState.talk.select ? - : - '' : '' - } - { - showTimeSelect ? - { setShowTimeSelect(false) }} histroyTime={histroyTime} setHistroyTime={setHistroyTime} setProcessDisX={setProcessDisX} - /> - : '' - } - {/* 下方操作 */} -
+ const histroySelected = operationState && operationState.histroy.select + + return ( + <> + { + operationState ? + operationState.control.select ? + : + operationState.talk.select ? + : + '' : '' + } { - histroySelected && histroyTime.length ? - : '' + showTimeSelect ? + { setShowTimeSelect(false) }} histroyTime={histroyTime} setHistroyTime={setHistroyTime} setProcessDisX={setProcessDisX} + /> + : '' } -
- { - operationState ? - histroySelected ? - <> -
- { - videoObj.type != 'yingshi' ? - isPlaying ? - - : - : '' - } - - {histroyTime.length ? `${moment(histroyTime[0]).format(timeFormat)} / ${moment(histroyTime[1]).format(timeFormat)}` : ''} - -
-
- - { - videoObj.type != 'yingshi' ? - - : '' - } -
{ - setShowTimeSelect(!showTimeSelect) - }}>时间设置
-
- - : - <> -
- { - operationState ? - operation.map(p => { - if (p.hide) return null; - return - }) : '' - } -
-
- { - videoObj.playUrlSd && videoObj.playUrlHd ? - resolution == 'sd' ? -
标清
: -
高清
- : '' - } -
- : '' - } + { + histroySelected && histroyTime.length ? + : '' + } +
+ { + operationState ? + histroySelected ? + <> +
+ { + videoObj.type != 'yingshi' ? + isPlaying ? + + : + : '' + } + + {histroyTime.length ? `${moment(histroyTime[0]).format(timeFormat)} / ${moment(histroyTime[1]).format(timeFormat)}` : ''} + +
+
+ + { + videoObj.type != 'yingshi' ? + + : '' + } +
{ + setShowTimeSelect(!showTimeSelect) + }}>时间设置
+
+ + : + <> +
+ { + operationState ? + operation.map(p => { + if (p.hide) return null; + return { p.click(e) }} + /> + }) : '' + } +
+
+ { + videoObj.playUrlSd && videoObj.playUrlHd ? + resolution == 'sd' ? +
标清
: +
高清
+ : '' + } +
+ : '' + } +
-
- - ) + + ) } function mapStateToProps (state) { - const { auth } = state; - return { - user: auth.user, - }; + const { auth } = state; + return { + user: auth.user, + }; } export default connect(mapStateToProps)(VideoOperation); \ No newline at end of file diff --git a/code/VideoAccess-VCMP/web/client/src/components/videoPlayer/videoOperationCloudControl.jsx b/code/VideoAccess-VCMP/web/client/src/components/videoPlayer/videoOperationCloudControl.jsx index 0f83549..32f6573 100644 --- a/code/VideoAccess-VCMP/web/client/src/components/videoPlayer/videoOperationCloudControl.jsx +++ b/code/VideoAccess-VCMP/web/client/src/components/videoPlayer/videoOperationCloudControl.jsx @@ -4,91 +4,93 @@ import { ysptz, gbptz } from '$utils' import './videoPlay.less'; const VideoOperationCloudControl = ({ - videoObj, iotVideoServer + videoObj, iotVideoServer }) => { - const cloudControl = (ac) => { - if (videoObj.type == 'yingshi') { - ysptz(ac, videoObj) - } else { - gbptz(ac, videoObj, iotVideoServer) - } - } - - return ( -
-
- { - [{ - style: { top: 12, left: (148 - 24) / 2, }, - click: () => { cloudControl('up') } - }, { - style: { right: 12, top: (148 - 24) / 2, }, - click: () => { cloudControl('right') }, - }, { - style: { bottom: 12, left: (148 - 24) / 2, }, - click: () => { cloudControl('down') }, - }, { - style: { left: 12, top: (148 - 24) / 2 }, - click: () => { cloudControl('left') }, - }].map((s, i) => { - return ( - - ) - }) - } + const cloudControl = (ac) => { + if (videoObj.type == 'yingshi') { + ysptz(ac, videoObj) + } else { + gbptz(ac, videoObj, iotVideoServer) + } + } + const vcmpVideo = document.getElementById('vcmp_videoplay')?.offsetHeight; + console.log(vcmpVideo); + return (
-
- { - [ - [{ n: '+', click: () => { cloudControl('focus_in') }, }, { n: '焦距' }, { n: '-', click: () => { cloudControl('focus_out') }, }], - [{ n: '+', click: () => { cloudControl('zoom_in') }, }, { n: '缩放' }, { n: '-', click: () => { cloudControl('zoom_out') }, }], - ].map(s => { - return ( -
- { - s.map((m, mi) => { - return ( -
{ m.click ? m.click() : null }} - >{m.n}
- ) + position: 'absolute', top: 'calc(50% - 105px)', left: 'calc(50% - 125px)', + width: 210, height: 250, backgroundColor: '#00000014', borderRadius: 68, transform: `scale(${vcmpVideo / 400 > 2.5 ? 2.5 : vcmpVideo / 400})`, + }}> +
+ { + [{ + style: { top: 12, left: (148 - 24) / 2, }, + click: () => { cloudControl('up') } + }, { + style: { right: 12, top: (148 - 24) / 2, }, + click: () => { cloudControl('right') }, + }, { + style: { bottom: 12, left: (148 - 24) / 2, }, + click: () => { cloudControl('down') }, + }, { + style: { left: 12, top: (148 - 24) / 2 }, + click: () => { cloudControl('left') }, + }].map((s, i) => { + return ( + + ) }) - } -
- ) - }) - } -
- ) + } +
+
+ { + [ + [{ n: '+', click: () => { cloudControl('focus_in') }, }, { n: '焦距' }, { n: '-', click: () => { cloudControl('focus_out') }, }], + [{ n: '+', click: () => { cloudControl('zoom_in') }, }, { n: '缩放' }, { n: '-', click: () => { cloudControl('zoom_out') }, }], + ].map(s => { + return ( +
+ { + s.map((m, mi) => { + return ( +
{ m.click ? m.click() : null }} + >{m.n}
+ ) + }) + } +
+ ) + }) + } +
+ ) + } function mapStateToProps (state) { - const { auth, global } = state; - return { - user: auth.user, - iotVideoServer: global.iotVideoServer, - }; + const { auth, global } = state; + return { + user: auth.user, + iotVideoServer: global.iotVideoServer, + }; } export default connect(mapStateToProps)(VideoOperationCloudControl); \ No newline at end of file diff --git a/code/VideoAccess-VCMP/web/client/src/components/videoPlayer/videoOperationTalk.jsx b/code/VideoAccess-VCMP/web/client/src/components/videoPlayer/videoOperationTalk.jsx index 1b153d9..ac80ba3 100644 --- a/code/VideoAccess-VCMP/web/client/src/components/videoPlayer/videoOperationTalk.jsx +++ b/code/VideoAccess-VCMP/web/client/src/components/videoPlayer/videoOperationTalk.jsx @@ -10,93 +10,95 @@ import { checkAudioVideo, uploadVoice2Yingshi, sendVoice2YingshiCamera } from '$ import AudioRecoder, { RecordState } from "./audioRecoder" const VideoOperationTalk = ({ - videoObj, + videoObj, }) => { - const [recordState, setRecordState] = useState(RecordState.NONE) + const [recordState, setRecordState] = useState(RecordState.NONE) - const ToastInCustomContainer = ToastFactory.create({ - getPopupContainer: () => document.getElementById('vcmp_videoplay'), - }); + const ToastInCustomContainer = ToastFactory.create({ + getPopupContainer: () => document.getElementById('vcmp_videoplay'), + }); - useEffect(() => { + useEffect(() => { - }, []) + }, []) - const startTalk = () => { - setRecordState(RecordState.START) - } + const startTalk = () => { + setRecordState(RecordState.START) + } - const stopTalk = () => { - setRecordState(RecordState.STOP) - } + const stopTalk = () => { + setRecordState(RecordState.STOP) + } - const onStopTalk = async (data) => { - setRecordState(RecordState.STOP) - const { blob: audioData } = data; - if (!audioData) return; - let buffer = await audioData.arrayBuffer(); - let file = new File([buffer], Date.now() + "", { - type: "audio/mpeg" - }); + const onStopTalk = async (data) => { + setRecordState(RecordState.STOP) + const { blob: audioData } = data; + if (!audioData) return; + let buffer = await audioData.arrayBuffer(); + let file = new File([buffer], Date.now() + "", { + type: "audio/mpeg" + }); - try { - let uploadRes = await uploadVoice2Yingshi({ voiceFile: file, accessToken: videoObj.yingshiToken, }) - const { url } = uploadRes - let sendRes = await sendVoice2YingshiCamera({ - accessToken: videoObj.yingshiToken, - deviceSerial: videoObj.serialNo, - channelNo: videoObj.channelNo, - fileUrl: url - }); - ToastInCustomContainer.success('已发送'); - } catch (error) { - if (error.msg) { - ToastInCustomContainer.error(error.msg); - } else { - ToastInCustomContainer.error('发送失败'); - } - } - } + try { + let uploadRes = await uploadVoice2Yingshi({ voiceFile: file, accessToken: videoObj.yingshiToken, }) + const { url } = uploadRes + let sendRes = await sendVoice2YingshiCamera({ + accessToken: videoObj.yingshiToken, + deviceSerial: videoObj.serialNo, + channelNo: videoObj.channelNo, + fileUrl: url + }); + ToastInCustomContainer.success('已发送'); + } catch (error) { + if (error.msg) { + ToastInCustomContainer.error(error.msg); + } else { + ToastInCustomContainer.error('发送失败'); + } + } + } + const vcmpVideo = document.getElementById('vcmp_videoplay')?.offsetHeight; + console.log(vcmpVideo); - return ( -
- -
{ - checkAudioVideo({ audio: true }).then(res => { - // console.log('已点击允许,开启成功'); - if (recordState === RecordState.START) { - stopTalk() - } else { - startTalk() - } - }).catch(err => { - ToastInCustomContainer.destroyAll() - if (err.code && err.code == 404) { - ToastInCustomContainer.error("浏览器不支持") - } else { - ToastInCustomContainer.error("请检查是否存在麦克风,或是否禁用麦克风") - } - }) - }} - >{recordState == RecordState.START ? '结束' : '开始'}讲话
- -
- ) + return ( +
1.5 ? 1.5 : vcmpVideo / 400})`, + }}> + +
{ + checkAudioVideo({ audio: true }).then(res => { + // console.log('已点击允许,开启成功'); + if (recordState === RecordState.START) { + stopTalk() + } else { + startTalk() + } + }).catch(err => { + ToastInCustomContainer.destroyAll() + if (err.code && err.code == 404) { + ToastInCustomContainer.error("浏览器不支持") + } else { + ToastInCustomContainer.error("请检查是否存在麦克风,或是否禁用麦克风") + } + }) + }} + >{recordState == RecordState.START ? '结束' : '开始'}讲话
+ +
+ ) } function mapStateToProps (state) { - const { auth } = state; - return { - user: auth.user, - }; + const { auth } = state; + return { + user: auth.user, + }; } export default connect(mapStateToProps)(VideoOperationTalk); \ No newline at end of file diff --git a/code/VideoAccess-VCMP/web/client/src/components/videoPlayer/videoPlay.jsx b/code/VideoAccess-VCMP/web/client/src/components/videoPlayer/videoPlay.jsx index 206629b..7fe6048 100644 --- a/code/VideoAccess-VCMP/web/client/src/components/videoPlayer/videoPlay.jsx +++ b/code/VideoAccess-VCMP/web/client/src/components/videoPlayer/videoPlay.jsx @@ -18,9 +18,13 @@ const VideoPlay = ({ height, width, containerId = 'myPlayer', // playUrl, name, - + global, + videoStyle, + changeData, videoObj, - + sizeWh, + slideDown, //视频下方操作是否滑动 + IsSize, //是否按照16:9的比例播放 // videoObj = { // type: 'yingshi', // audio: false, @@ -44,7 +48,8 @@ const VideoPlay = ({ // }, // - iotVideoServer + iotVideoServer, + local, //是否本地调用视频 }) => { const [jessibuca, setjessibuca] = useState(null) const [isPlaying, setIsPlaying] = useState(false) @@ -55,7 +60,10 @@ const VideoPlay = ({ const [histroyTime, setHistroyTime] = useState([]) const [histroyBegain, setHistroyBegain] = useState() const [roll, setRoll] = useState()//滚动备注 + const [size, setSize] = useState(sizeWh) //视频本身和父级尺寸 const [resolution, setResolution] = useState('sd') // 标清 sd 高清 hd + // const [frequency, setFrequency] = useState(true) + // 标记萤石操作中,等待ifream返回信息后清空 const [yingshiPrepare, setYingshiPrepare] = useState('') @@ -69,6 +77,11 @@ const VideoPlay = ({ setRoll(false) }, [resolution]); + + useEffect(() => { + setResolution(changeData?.hdChecked ? 'hd' : 'sd') + }, [changeData?.hdChecked]) + const changeSelectState = (key) => { if (videoObj.type == 'yingshi' && yingshiPrepareRef.current) { return @@ -104,7 +117,8 @@ const VideoPlay = ({ }, { key: 'talk', hide: !(videoObj.type == 'yingshi'), - click: () => { + click: (e) => { + console.log(e); changeSelectState('talk') } }, { @@ -115,6 +129,7 @@ const VideoPlay = ({ } changeSelectState('fullScreen') let player = document.getElementById('vcmp_videoplay') + console.log(player); if (screenfull.isEnabled) { screenfull.toggle(player); } @@ -158,7 +173,6 @@ const VideoPlay = ({ // 萤石 ifream 信息监听 const listenYingshiMessage = async (e) => { const { data, origin } = e - console.log(e); if (origin !== 'https://open.ys7.com') return if (data.type == "handleSuccess") { setRoll(true) @@ -306,13 +320,48 @@ const VideoPlay = ({ } } - + useEffect(() => { + if (local) { + //弹框视频自适应 + const VideoLocal = document.getElementById('video_local') + setSize({ parentWidth: VideoLocal?.clientWidth, parentHeight: '100%', width: width, height: height }) + } else { + setSize(sizeWh) + } + }, [sizeWh]) + useEffect(() => { + if (size?.width || size?.height) { + // if (size?.parentWidth >= size?.width && size?.parentHeight >= size?.height) { + if (size?.width && size?.height) { + if (size?.height / size?.width < 0.55 || size?.height / size?.width > 0.57) { + if (size?.width / 16 > size?.height / 9) { + setSize({ ...size, width: (size?.height / 9) * 16, }) + } else { + setSize({ ...size, height: (size?.width / 16) * 9 }) + } + } + } else if (size?.width) { + setSize({ ...size, height: (size?.width / 16) * 9, }) + } else if (size?.height) { + setSize({ ...size, width: (size?.height / 9) * 16, }) + } + // } else { + // setSize({ ...size, width: size?.parentWidth, height: size?.parentHeight }) + // } + } else { + if (size && size?.width / 16 > size?.height / 9) { + setSize({ ...size, height: size?.parentHeight }) + } else { + setSize({ ...size, width: size?.parentWidth, }) + } + } + }, [size]) return ( <> -
-
+
+
{/* 顶部信息 */} - + videoStyle={videoStyle} + />} {/* 视频内容 */} - { - videoObj.type == 'yingshi' ? -