ITPub博客

首页 > 应用开发 > Javascript > Golang 基于chrome浏览器语音识别web演示系统WebHTK开发之 UI篇

Golang 基于chrome浏览器语音识别web演示系统WebHTK开发之 UI篇

原创 Javascript 作者:油窃 时间:2020-08-15 16:15:08 0 删除 编辑

  没啥好说的,直接上现阶段的HTML代码,后续修改,再更新该篇博客。

  record.html:

  <!DOCTYPE html>

  <html>

  <head>

  <meta charset="utf-8"/>

  <meta http-equiv="X-UA-Compatible" content="IE=edge"/>

  <meta name="viewport" content="width=device-width, initial-scale=1.0, maximum-scale=1.0, user-scalable=no"/>

  <meta name="keywords" content="PONPON,HTK,Go语言"/>

  <meta name="description" content="基于Beego开发语音识别演示系统"/>

  <meta name="generator" content="PONPON" />

  <link href="/static/css/bootstrap.min.css" rel="stylesheet">

  <link href="/static/css/docs.css" rel="stylesheet">

  <link href=" rel="stylesheet">

  <link rel="shortcut icon" href="/static/img/Logoicon.jpg">

  <link rel="stylesheet" href="

  <link rel="alternate" type="application/rss+xml" href="/rss.xml"/>

  <script type="text/javascript" src="/static/lib/recorder.js"> </script>

  <script type="text/javascript" src="/static/lib/jquery-1.10.1.min.js"> </script>

  <script type="text/javascript" src="/static/lib/recController.js"> </script>

  <title>WebHTK 演示系统</title>

  </head>

  <body>

  <nav id="header">

  <div class="container960 text-center" >

  <h3 id="header-h" class="center" >闽南语 - 语音识别演示系统</h3>

  <ul id="resultbox" class="center" style="padding-top:425px;font-size:20px;text-align: center;color:#FFC8B6;font-family:'微软雅黑'">

  <li >识别结果</li>

  </ul>

  <form style="padding-top:20px;">

  <a id="img" href="javascript://"  >

  <img  src="/static/img/aa.png" style="width:85px;height:85px;" alt=""/>

  </a>

  </form>

  <div id="message" style="padding-top:10px;font-size:16px;color:#F5FFFA;text-align: center;font-family:'微软雅黑'">点击麦克风,开始录音!</div>

  <script type="text/javascript">

  var recording = false;

  function test() {

  if (!recording) {

  document.getElementById("img").innerHTML="<img  src='/static/img/a1.png' style='width:85px;height:85px;'' alt=''/>";

  toRecord();

  recording=true;

  }else{

  document.getElementById("img").innerHTML="<img  src='/static/img/aa.png' style='width:85px;height:85px;'' alt=''/>";

  toSend();

  recording = false;

  }

  };

  function toRecord(){

  rec.record();

  var dd = ws.send("start");

  $("#message").text("再次点击,结束录音!");

  intervalKey = setInterval(function() {

  rec.exportWAV(function(blob) {

  rec.clear();

  ws.send(blob);

  });

  }, 300);

  }

  function toSend(){

  rec.stop();

  if (intervalKey == null) {

  $("#message").text("请先录音再发送!");

  return

  };

  ws.send(sampleRate);

  ws.send(channels);

  ws.send("stop");

  rec.clear();

  clearInterval(intervalKey);

  intervalKey = null;

  }

  </script>

  </div>

  </nav>

  <audio class="hide" controls autoplay></audio>

  </body>

  </html>

  recorder.js:

  (function(window) {

  var WORKER_PATH = '/static/lib/recorderWorker.js';

  var Recorder = function(source, chan, cfg) {

  var config = cfg || {};

  var channels = chan || 1;

  var bufferLen = config.bufferLen || 8192;

  this.context = source.context;

  this.node = this.context.createJavaScriptNode(bufferLen, channels, channels);

  var worker = new Worker(config.workerPath || WORKER_PATH);

  worker.postMessage({

  command: 'init',

  config: {

  sampleRate: this.context.sampleRate

  }

  });

  var recording = false,

  currCallback;

  this.node.{

  if (!recording) return;

  worker.postMessage({

  command: 'record',

  buffer: [

  e.inputBuffer.getChannelData(0)

  ]

  });

  }

  this.configure = function(cfg) {

  for (var prop in cfg) {

  if (cfg.hasOwnProperty(prop)) {

  config[prop] = cfg[prop];

  }

  }

  }

  this.record = function() {

  recording = true;

  }

  this.stop = function() {

  recording = false;

  }

  this.clear = function() {

  worker.postMessage({

  command: 'clear'

  });

  }

  this.getBuffer = function(cb) {

  currCallback = cb || config.callback;

  worker.postMessage({

  command: 'getBuffer'

  })

  }

  this.exportWAV = function(cb, type) {

  currCallback = cb || config.callback;

  type = type || config.type || 'audio/wav';

  if (!currCallback) throw new Error('Callback not set');

  worker.postMessage({

  command: 'exportWAV',

  type: type

  });

  }

  worker.{

  var blob = e.data;

  currCallback(blob);

  }

  source.connect(this.node);

  this.node.connect(this.context.destination);

  };

  window.Recorder = Recorder;

  })(window);

  recorderWorker.js:

  var recLength = 0,

  recBuffersL = [],

  sampleRate;

  this.{

  switch (e.data.command) {

  case 'init':

  init(e.data.config);

  break;

  case 'record':

  record(e.data.buffer);

  break;

  case 'exportWAV':

  exportWAV(e.data.type);

  break;

  case 'getBuffer':

  getBuffer();

  break;

  case 'clear':

  clear();

  break;

  }

  };

  function init(config) {

  sampleRate = config.sampleRate;

  }

  function record(inputBuffer) {

  recBuffersL.push(inputBuffer[0]);

  recLength += inputBuffer[0].length;

  }

  function exportWAV(type) {

  var bufferL = mergeBuffers(recBuffersL, recLength);

  var interleaved = interleave(bufferL);

  var dataview = encodeWAV(interleaved);

  var audioBlob = new Blob([dataview], {

  type: type

  });

  this.postMessage(audioBlob);

  }

  function getBuffer() {

  var buffers = [];

  buffers.push(mergeBuffers(recBuffersL, recLength));

  this.postMessage(buffers);

  }

  function clear(inputBuffer) {

  recLength = 0;

  recBuffersL = [];

  }

  function mergeBuffers(recBuffers, recLength) {

  var result = new Float32Array(recLength);

  var offset = 0;

  for (var i = 0; i < recBuffers.length; i++) {

  result.set(recBuffers[i], offset);

  offset += recBuffers[i].length;

  }

  return result;

  }

  function interleave(inputL) {

  var length;

  var result;

  var index = 0,

  inputIndex = 0;

  if (sampleRate == 48000) {

  length = inputL.length / 6;

  result = new Float32Array(length);

  while (index < length) {

  result[index++] = (inputL[inputIndex++] + inputL[inputIndex++] +

  inputL[inputIndex++] + inputL[inputIndex++] +

  inputL[inputIndex++] + inputL[inputIndex++]) / 6;

  }

  } else if (sampleRate == 44100) {

  length = inputL.length / 6;

  result = new Float32Array(length);

  while (index < length) {

  if (inputIndex % 12 == 0) {

  result[index++] = (inputL[inputIndex] + inputL[inputIndex++] +

  inputL[inputIndex++] + inputL[inputIndex++] +

  inputL[inputIndex++] + inputL[inputIndex++] +

  inputL[inputIndex++]) / 7;

  } else {

  result[index++] = (inputL[inputIndex++] + inputL[inputIndex++] +

  inputL[inputIndex++] + inputL[inputIndex++] +

  inputL[inputIndex++] + inputL[inputIndex++]) / 6;

  };

  }

  } else {

  length = inputL.length;

  result = new Float32Array(length);

  while (index < length) {

  result[index++] = inputL[inputIndex++];

  }

  };

  return result;

  }

  function floatTo16BitPCM(output, offset, input) {

  for (var i = 0; i < input.length; i++, offset += 2) {

  var s = Math.max(-1, Math.min(1, input[i]));

  output.setInt16(offset, s < 0 ? s * 0x8000 : s * 0x7FFF, true);

  }

  }

  function writeString(view, offset, string) {

  for (var i = 0; i < string.length; i++) {

  view.setUint8(offset + i, string.charCodeAt(i));

  }

  }

  function encodeWAV(samples) {

  var buffer = new ArrayBuffer(samples.length * 2);

  var view = new DataView(buffer);

  floatTo16BitPCM(view, 0, samples);

  return view;

  }

  recController.js:

  var {

  console.log('Rejected!', e);

  };

  var {

  var context = new webkitAudioContext();

  var mediaStreamSource = context.createMediaStreamSource(s);

  rec = new Recorder(mediaStreamSource, channels);

  sampleRate = 8000;

  }

  navigator.getUserMedia = navigator.getUserMedia || navigator.webkitGetUserMedia || navigator.mozGetUserMedia || navigator.msGetUserMedia;

  var rec;

  var intervalKey = null;

  var audio = document.querySelector('#audio');

  var sampleRate;

  var channels = 1;

  function startRecording() {

  if (navigator.getUserMedia) {

  navigator.getUserMedia({

  audio: true

  }, onSuccess, onFail);

  } else {

  console.log('navigator.getUserMedia not present');

  }

  }

  startRecording();

  //--------------------

  var ws = new WebSocket('ws://' + window.location.host + '/join');

  ws.{

  console.log("Openened connection to websocket");

  };

  ws.{

  console.log("Close connection to websocket");

  }

  ws.{

  console.log("Cannot connection to websocket");

  }

  ws.{

  var data = JSON.parse(result.data);

  console.log('识别结果:' + data.Pinyin);

  var result = document.getElementById("resultbox")

  result.getElementsByTagName("li")[0].innerHTML = data.Hanzi;

  document.getElementById("message").innerHTML = "点击麦克风,开始录音!";

  }


来自 “ ITPUB博客 ” ,链接:http://blog.itpub.net/69920822/viewspace-2712011/,如需转载,请注明出处,否则将追究法律责任。

请登录后发表评论 登录
全部评论

注册时间:2020-05-22

  • 博文量
    11
  • 访问量
    6736