Newer
Older
DH_Apicture / src / views / voice / recordPage / index.vue
@ZZJ ZZJ 7 days ago 15 KB update
<template>
  <!-- <div class="titlename">4.示例参考样式【通过webSocket方法传输音频给服务器】</div> -->
  <div class="record-page">
    <img v-if="!data.showModal" src="@/assets/images/voice/jqr.png" class="img" @click="STARTVOICE()" />
    <img v-else src="@/assets/images/voice/iflytek.gif" class="img" @click="STARTVOICE()" />
    <div class="duihuak" :class="{ show: data.showModal }">
      <div class="tip">剩余录音时间:{{ showSecond }}秒</div>
      <div class="msg">
        <div class="item"><span>机器人:</span>{{ nowword }}</div>
      </div>
      <div class="close" @click="closedia">
        <el-icon :size="20"><Close /></el-icon>
      </div>
      <div class="bottombtn">
        <el-button class="chongzhi" type="primary" @click="chongzhi">重置</el-button>
        <el-button class="queding" type="primary" @click="queding">确定</el-button>
      </div>
      <div class="searchResult" v-if="isGuess && data.showModal">
        <div class="searchtext">猜您想找:</div>
        <div class="Resultlist" v-if="guessArr.dataList?.length > 0">
          <div class="eachMay" v-for="item in guessArr.dataList" :key="item.id" @click="resultClick(item)">
            {{ item.hotWordName }}
          </div>
        </div>
      </div>
    </div>
  </div>
</template>
<script setup>
import lamejs from 'lamejs';
const { proxy } = getCurrentInstance();

import Recorder from 'js-audio-recorder';
import useUserStore from '@/store/modules/user';
const appStore = useUserStore();
import { nextTick, ref } from 'vue';
const userStore = useUserStore();
const lockReconnect = ref(null);
const timeoutnum = ref(null);
import bus from '@/bus';
import { findDictObj2 } from '@/utils/ruoyi';

var recorder = new Recorder({
  sampleBits: 16, // 采样位数,支持 8 或 16,默认是16
  sampleRate: 48000, // 采样率,支持 11025、16000、22050、24000、44100、48000,根据浏览器默认值,我的chrome是48000
  numChannels: 1, // 声道,支持 1 或 2, 默认是1
  // compiling: false,(0.x版本中生效,1.x增加中)  // 是否边录边转换,默认是false
});

const nowword = ref('你好,请点击【开始录制】,进行语音录制!'); //当前指令进度位置
const uploadbusinessSourceCode = ref('dpyysb'); //上传用的业务类型code 用来返回后指令匹配
const process = ref('1'); //录音开始和结束的状态

const showSecond = ref(8);
const timEnd = ref(null);
const settimEnd = ref(null);

const judgmentClose = ref(null); //记录10秒后的定时器
const jiluTime = ref(null); //记录传过来的时候当前的时间戳

const data = reactive({
  showModal: false, //展示对话框
  recordStatus: null, //录音进程的当前位置
  miao: 0,
  recognizeWs: null,
});
const isGuess = ref(false);
const guessArr = ref({});

onMounted(() => {
  // 绑定事件-打印的是当前录音数据
  initRecognizeWs();
  recorder.onprogress = function (params) {
    // console.log('--------------START---------------', params);
    data.miao = params.duration.toFixed(3);
  };
});

onBeforeUnmount(() => {
  data.recognizeWs && data.recognizeWs.close();
});

//初始化语音调度websocket服务
function initRecognizeWs() {
  data.recognizeWs && data.recognizeWs.onclose();
  data.recognizeWs && data.recognizeWs.close();

  let wsuri;
  if (window.location.protocol.includes('https')) {
    wsuri = `wss://${window.location.host}/voiceWebsocket`;
  } else {
    wsuri = `ws://192.168.20.43:8102/voiceWebsocket`;
    // wsuri = `ws://139.155.49.237:8201/voiceWebsocket`;
    // wsuri = `ws://${window.location.host}/voiceWebsocket`;
    // wsuri = `ws://server1.wh-nf.cn:8201/voiceWebsocket`;
  }
  data.recognizeWs = new WebSocket(wsuri);

  //连接建立
  data.recognizeWs.onopen = function (e) {
    console.log('连接成功', e);
  };

  //连接建立失败
  data.recognizeWs.onerror = function (evt) {
    console.log('连接失败', evt);
    reconnect();
  };

  data.recognizeWs.onmessage = function (e) {
    if (e.data == 'notice') {
      jiluTime.value = new Date().getTime();
      GetTimeClose(e);
      // 书写一个11秒后的定时器 如果10秒后收到信息就清除定时器 否则11秒后就重新连接
    } else if (e.data != '客户端连接成功' && e.data != 'notice') {
      console.log('eeee', e);

      let data = JSON.parse(e.data);
      let params = data.data;
      console.log('Websocket接收值', data);
      console.log('接收的data内部的data', params);

      if (uploadbusinessSourceCode.value == data.type) {
        // 将返回的type数据与语音指令的业务code进行精准匹配 如果匹配上了 才说明是发送的这条数据
        nowword.value = params.recognitionResult;
        if (params.recognitionResult == '') {
          nowword.value = `指令未识别,请重试`;
          return;
        }
        if (params.recognitionActionCode == 'error' || params.recognitionDataSourceCode == 'error') {
          // nowword.value = `指令未识别,请您再说一遍`;
          isGuess.value = true;
          guessArr.value = params;
          console.log('guessArr.value', guessArr.value);
        } else {
          // 匹配业务来源编号去区分相关的操作
          switchRecognitionActionCode(params);
        }
      }
    }
  };
  //关闭连接
  data.recognizeWs.onclose = function (e) {
    console.log('断开连接');
  };
}

function GetTimeClose(e) {
  console.log('进入定时器', e);
  judgmentClose.value && clearTimeout(judgmentClose.value);
  judgmentClose.value = setTimeout(() => {
    initRecognizeWs();
  }, 11000);
}

// 匹配业务来源编号去区分相关的操作
function switchRecognitionActionCode(params) {
  console.log('params', params);
  switch (params.recognitionActionCode) {
    case 'open':
      // 打开的操作
      if (params.recognitionDataSourceCode == 'path' && params.recognitionDataId) {
        // 大屏的头部跳转水务资产/防洪安全/防汛排涝/监测分析/河湖生态/舆情分析的语音跳转方法
        bus.emit('changeHeadePath', params.recognitionDataId);
      }
      break;
    case 'detail':
      // 查看的操作
      // 查看动态弹框
      dynamicPopUp(params);
      break;

    default:
      proxy.$modal.msgWarning('该指令操作有误,请重新录音');
      nowword.value = `成功识别语音,返回的指令为:${params.recognitionResult}`;
      break;
  }
}
function resultClick(item) {
  console.log('item', item);
  let at = guessArr.value;
  let data = {
    recognitionDataId: item.businessDataId,
    recognitionActionCode: item.businessSourceCode == 'path' ? 'open' : 'detail',
    // recognitionActionCode: at.recognitionActionCode == 'error'?at.actionList[0].businessSourceCode:at.recognitionActionCode,
    recognitionDataSourceCode: item.businessSourceCode,
    recognitionResult: at.recognitionResult,
  };
  switchRecognitionActionCode(data);
}
async function dynamicPopUp(params) {
  console.log('params111', params);
  let allData = [];
  await appStore.MapData.map(item => {
    allData = allData.concat(item.data);
  });
  let item = await findDictObj2(allData, 'id', params.recognitionDataId);
  console.log('item匹配到', item);
  if (item.id) {
    item.stType = item.pointType;
    let geometry = item.geometry;
    let feature = turf.feature(Terraformer.WKT.parse(geometry), _.cloneDeep(item));
    newfiberMap.map.easeTo({ center: turf.getCoords(turf.center(feature)), zoom: 15 });
    bus.emit('setGeoJSON', { json: turf.featureCollection([feature]), key: 'temporary' });
    if (feature) {
      feature.properties.type = feature.properties.pointType;
      bus.emit('xiaofeifei', feature);
    }
  }
}

//重新连接
function reconnect() {
  if (lockReconnect.value) {
    return;
  }

  lockReconnect.value = true;
  //没连接上会一直重连,设置延迟避免请求过多
  timeoutnum.value && clearTimeout(timeoutnum.value);
  timeoutnum.value = setTimeout(() => {
    initRecognizeWs();
    lockReconnect.value = false;
  }, 8000);
}
/**
 *  录音的具体操作功能
 * */
// 开始录音
function startRecorder(val) {
  // if (new Date().getTime() - jiluTime.value > 10000) {
  //   initRecognizeWs();
  // }

  data.recordStatus = val;
  // 获取麦克风权限
  Recorder.getPermission().then(
    () => {
      // proxy.$modal.msgSuccess("获取权限成功,开始录音");
      recorder.start().then(() => {
        data.showModal = true;
        nowword.value = '开始录音,正在录音...';
        process.value = 2;
        showSecond.value = 8;

        settimEnd.value = setTimeout(() => {
          stopRecorderAndupload('stop');
        }, showSecond.value * 1000);

        timEnd.value = setInterval(setTime, 1000);
      });
    },
    error => {
      proxy.$modal.msgError('请先允许该网页使用麦克风');
      // console.log(`${error.name} : ${error.message}`);
    }
  );
}
// /**
//  *  录音的具体操作功能
//  * */
// // 开始录音
// function startRecorder(val) {
//   data.showModal = true;
//   nowword.value = '开始录音,正在录音...';
//   data.recordStatus = val;
//   // 获取麦克风权限
//   Recorder.getPermission().then(
//     () => {
//       proxy.$modal.msgSuccess('获取权限成功,开始录音');
//       recorder.start();
//     },
//     error => {
//       proxy.$modal.msgError('请先允许该网页使用麦克风');
//       // console.log(`${error.name} : ${error.message}`);
//     }
//   );
// }

// 结束录音
function stopRecorder(val) {
  process.value = 1;
  nowword.value = '录音结束';
  data.recordStatus = val;
  recorder.stop();
}

// 结束录音并自动上传
function stopRecorderAndupload(val) {
  process.value = 1;
  nowword.value = '录音结束,正在识别...';
  data.recordStatus = val;
  recorder.stop();
  uploadaudioformwebSocket();
}

// 重置
function chongzhi() {
  // 先结束录音
  stopRecorder();
  // 清除两个定时器
  clearTimeout(settimEnd.value);
  clearInterval(timEnd.value);
  process.value = 1;
  STARTVOICE();
}

// 确定
function queding() {
  STARTVOICE();
}

// 机器人所用方法
function STARTVOICE() {
  console.log('process.value', process.value);
  isGuess.value = false;
  clearInterval(timEnd.value);
  if (process.value == 1) {
    startRecorder('begin');
  } else {
    showSecond.value = 0;
    clearTimeout(settimEnd.value);
    clearInterval(timEnd.value);
    stopRecorderAndupload('stop');
  }
}

function setTime() {
  if (showSecond.value) {
    showSecond.value = showSecond.value - 1;
  } else {
    clearInterval(setTime);
  }
}

function closedia() {
  showSecond.value = 8;
  data.showModal = false;
  // nowword.value = '你好,请点击【开始录制】,进行语音录制!';
  nowword.value = '语音录制中...';
  stopRecorder();
}

// 将获取到的音频文件上传到服务器[通过webSocket方式]
function uploadaudioformwebSocket(type) {
  const mp3Blob = convertToMp3(recorder.getWAV());
  // recorder.download(mp3Blob, 'recorder', 'mp3');
  mp3ToBase64(mp3Blob).then(stream => {
    // console.log('语音打印', stream);
    // 下面发送数据
    let parms = {
      createBy: userStore.userInfo.userName,
      voiceType: 'mp3',
      data: stream,
      businessSourceCode: type ? type : uploadbusinessSourceCode.value, //根据业务进行改变 具体与后台对接
    };
    data.recognizeWs.send(JSON.stringify(parms));
  });
}

function convertToMp3(wavDataView) {
  // 获取wav头信息
  const wav = lamejs.WavHeader.readHeader(wavDataView); // 此处其实可以不用去读wav头信息,毕竟有对应的config配置
  const { channels, sampleRate } = wav;
  const mp3enc = new lamejs.Mp3Encoder(channels, sampleRate, 128);
  // 获取左右通道数据
  const result = recorder.getChannelData();
  const buffer = [];
  const leftData = result.left && new Int16Array(result.left.buffer, 0, result.left.byteLength / 2);
  const rightData = result.right && new Int16Array(result.right.buffer, 0, result.right.byteLength / 2);
  const remaining = leftData.length + (rightData ? rightData.length : 0);
  const maxSamples = 1152;
  for (let i = 0; i < remaining; i += maxSamples) {
    const left = leftData.subarray(i, i + maxSamples);
    let right = null;
    let mp3buf = null;
    if (channels === 2) {
      right = rightData.subarray(i, i + maxSamples);
      mp3buf = mp3enc.encodeBuffer(left, right);
    } else {
      mp3buf = mp3enc.encodeBuffer(left);
    }
    if (mp3buf.length > 0) {
      buffer.push(mp3buf);
    }
  }
  const enc = mp3enc.flush();
  if (enc.length > 0) {
    buffer.push(enc);
  }
  return new Blob(buffer, { type: 'audio/mp3' });
}

function mp3ToBase64(blob) {
  return new Promise((resolve, reject) => {
    const fileReader = new FileReader();
    fileReader.onload = e => {
      resolve(e.target.result);
    };
    fileReader.readAsDataURL(blob);
    fileReader.onerror = () => {
      reject(new Error('blobToBase64 error'));
    };
  });
}
</script>
<style lang="scss" scoped>
$text-color: #fff;
$form-item-margin-bottom: 20px;
$border-color-base: fade($text-color, 50%);
$primary-color: #1890ff;
$font-size-base: 15px;
$error-color: #f5222d;
$highlight-color: #f5222d;
$form-item-margin-bottom: 18px;
$popover-bg: #1d1f4a;
$select-item-selected-bg: $primary-color;

$theme-color-1: #9ec3de;
$theme-color-2: #36e2f7;
$theme-color-3: #072e7a;

$zindex-modal: 1009;
$zindex-modal-mask: 1009;

$tooltip-max-width: 300px;
$tooltip-bg: #1d1f4a;

.titlename {
  position: relative;
  margin: 10px 0;
  font-size: 16px;
  font-weight: bold;
}

.record-page {
  position: fixed;
  z-index: 100;
  top: 13px;
  right: 110px;
  animation: hideLeftMenu 0.75s ease-in-out;
  animation-fill-mode: forwards;

  &.show {
    animation-fill-mode: forwards;
    animation: showLeftMenu 0.75s ease-in-out;
  }
  .img {
    width: 24px;
    cursor: pointer;
  }
  .duihuak {
    position: absolute;
    width: 320px;
    height: 166px;
    background: url('@/assets/images/voice/duihuak.png') no-repeat;
    top: 50px;
    right: -37px;
    padding: 50px 0 20px 0;
    font-size: 14px;
    transform: scale(0);
    transform-origin: left bottom;
    transition: all 0.25s ease-in-out;
    &.show {
      transform: scale(1);
    }
    .close {
      position: absolute;
      top: 0;
      right: 0;
      cursor: pointer;
    }
    .tip {
      position: absolute;
      top: 25px;
      left: 20px;
      font-size: 12px;
      color: #ccc;
    }
    .msg {
      overflow: hidden;
      overflow-y: auto;
      max-height: 60px;
      position: absolute;
      top: 50px;
      left: 20px;
      font-size: 14px;
      color: #ffffff;
      span {
        color: $primary-color;
        font-weight: 600;
      }
      .message {
        margin-top: 5px;
        padding-right: 10px;
        padding-left: 20px;
        font-size: 15px;
        font-weight: 600;
        overflow: hidden;
        // text-align: right;
        text-overflow: ellipsis;
        cursor: pointer;
        display: -webkit-box;
        -webkit-box-orient: vertical;
        color: $primary-color;
        span {
          color: #ccc;
        }
      }
    }
    .searchResult {
      width: 100%;
      height: 200px;
      position: absolute;
      left: 0;
      bottom: -200px;
      background: url('@/assets/images/pictureOnMap/contentBgc.png') no-repeat center;
      background-size: 100% 100%;
      .searchtext {
        padding: 5px;
      }
      .Resultlist {
        height: 170px;
        overflow-y: auto;
        padding: 0 10px;

        .eachMay {
          margin-bottom: 5px;
          cursor: pointer;
          line-height: 22px;
          padding-left: 5px;
        }
        .eachMay:hover {
          background: rgba(255, 255, 255, 0.2);
        }
      }
    }
  }
}

.bottombtn {
  z-index: 99;
  position: absolute;
  bottom: 20px;
  left: 50%;
  transform: translate(-50%);

  .chongzhi {
    background: #09bcd7;
    height: 26px;
  }

  .queding {
    background: #18a7f2;
    height: 26px;
  }
}
</style>