1、前言
2、后端代码
@RequestMapping(value = "/coalAudio")
@ResponseBody
public void getAudio(HttpServletResponse response) throws Exception {
String path = paramSetMapper.getParamByCode("coalAudioPath").getParamValue();
File file = new File(path);
if (!file.exists()) {
throw new RuntimeException("音频文件不存在 --> 404");
}
OutputStream os = response.getOutputStream();
FileInputStream fis = new FileInputStream(file);
long length = file.length();
// 播放进度
int count = 0;
// 播放百分比
int percent = (int) (length * 1);
String range = "0";
int irange = Integer.parseInt(range);
length = length - irange;
response.addHeader("Accept-Ranges", "bytes");
response.addHeader("Content-Length", length + "");
response.addHeader("Content-Range", "bytes " + range + "-" + length + "/" + length);
response.addHeader("Content-Type", "audio/mpeg;charset=UTF-8");
int len = 0;
byte[] b = new byte[1024];
while ((len = fis.read(b)) != -1) {
os.write(b, 0, len);
count += len;
if (count >= percent) {
break;
}
}
fis.close();
os.close();
}
3、前端代码
function draw_coalA1_audio_canvas() {
var coalUrl = "/tcc/front/coalAudio";
var gangueUrl = "/tcc/front/gangueAudio";
var canvas = document.getElementById("coalA1_audio_canvas");
var canvasCtx = canvas.getContext("2d");
var audioContext = new AudioContext();
var filter = audioContext.createBiquadFilter();
let url2 = "";
if (pickup_isBreak == 0 && audio_type == 0) {
url2 = coalUrl;
} else if (pickup_isBreak == 0 && audio_type == 1) {
url2 = gangueUrl;
}
drawAudio(url2, canvas, canvasCtx, audioContext, filter);
setInterval(function () {
let url = "";
if (pickup_isBreak == 0 && audio_type == 0) {
url = coalUrl;
} else if (pickup_isBreak == 0 && audio_type == 1) {
url = gangueUrl;
}
drawAudio(url, canvas, canvasCtx, audioContext, filter);
}, 1000);
}
function drawAudio(url, canvas, canvasCtx, audioContext, filter) {
var request = new XMLHttpRequest(); //开一个请求
request.open('POST', url, true); //往url请求数据
request.responseType = 'arraybuffer'; //设置返回数据类型
request.onload = function () {
var audioData = request.response;
if (audioData.byteLength < (1000 * 100)) {
return 1;
}
audioContext.decodeAudioData(audioData, function (buffer) {
var audioBufferSourceNode = audioContext.createBufferSource();
var analyser = audioContext.createAnalyser();
analyser.fftSize = 2048;
audioBufferSourceNode.connect(analyser);
analyser.connect(audioContext.destination);
audioBufferSourceNode.buffer = buffer; //回调函数传入的参数
audioBufferSourceNode.start(0); //部分浏览器是noteOn()函数,用法相同
document.documentElement.removeEventListener('mouseenter', null, false);
document.documentElement.addEventListener('mouseenter', () => {
if (audioBufferSourceNode.context.state !== 'running')
audioBufferSourceNode.context.resume();
});
filter.type = 'highpass';
filter.frequency.value = 600;
filter.Q.value = 800;
var bufferLength = analyser.frequencyBinCount;
var dataArray = new Uint8Array(bufferLength);
canvasCtx.clearRect(0, 0, 300, 300);
function draw() {
drawVisual = requestAnimationFrame(draw);
analyser.getByteTimeDomainData(dataArray);
canvasCtx.fillStyle = '#0e1a3b';
canvasCtx.fillRect(0, 0, 300, 400);
canvasCtx.lineWidth = 2;
canvasCtx.strokeStyle = '#ffffff';
canvasCtx.beginPath();
var sliceWidth = 300 * 1.0 / bufferLength;
var x = 0;
for (var i = 0; i < bufferLength; i++) {
var v = dataArray[i] / 128.0;
var y = v * 200 / 3; //控制音频线在图的位置
if (i === 0) {
canvasCtx.moveTo(x, y);
} else {
canvasCtx.lineTo(x, y);
}
x += sliceWidth;
}
canvasCtx.lineTo(canvas.width, canvas.height / 2);
canvasCtx.stroke();
};
draw();
}, function (err) {
console.log("!Fail to decode the file!" + url, err)
});
};
request.send();
}
function playCoalAudio1() {
var url = "/tcc/front/coalAudio";
var canvas = document.getElementById("coalA1_audio_canvas");
var canvasCtx = canvas.getContext("2d");
//首先实例化AudioContext对象 很遗憾浏览器不兼容,只能用兼容性写法;audioContext用于音频处理的接口,并且工作原理是将AudioContext创建出来的各种节点(AudioNode)相互连接,音频数据流经这些节点并作出相应处理。
//总结就一句话 AudioContext 是音频对象,就像 new Date()是一个时间对象一样
var AudioContext = window.AudioContext || window.webkitAudioContext || window.mozAudioContext;
if (!AudioContext) {
alert("您的浏览器不支持audio API,请更换浏览器(chrome、firefox)再尝试,另外本人强烈建议使用谷歌浏览器!")
}
var audioContext = new AudioContext(); //实例化
var filter = audioContext.createBiquadFilter();
// 总结一下接下来的步骤
// 1 先获取音频文件(目前只支持单个上传)
// 2 读取音频文件,读取后,获得二进制类型的音频文件
// 3 对读取后的二进制文件进行解码
function getData() {
var request = new XMLHttpRequest(); //开一个请求
request.open('POST', url, true); //往url请求数据
request.responseType = 'arraybuffer'; //设置返回数据类型
request.onload = function () {
var audioData = request.response;
//数据缓冲完成之后,进行解码
audioContext.decodeAudioData(audioData, function (buffer) {
//source.buffer = buffer; //将解码出来的数据放入source中
// 创建AudioBufferSourceNode 用于播放解码出来的buffer的节点
var audioBufferSourceNode = audioContext.createBufferSource();
// 创建AnalyserNode 用于分析音频频谱的节点
var analyser = audioContext.createAnalyser();
//fftSize (Fast Fourier Transform) 是快速傅里叶变换,一般情况下是固定值2048。具体作用是什么我也不太清除,但是经过研究,
// 这个值可以决定音频频谱的密集程度。值大了,频谱就松散,值小就密集。
// analyser.fftSize = 256;
analyser.fftSize = 2048;
// 连接节点,audioContext.destination是音频要最终输出的目标,
// 我们可以把它理解为声卡。所以所有节点中的最后一个节点应该再
// 连接到audioContext.destination才能听到声音。
audioBufferSourceNode.connect(analyser);
analyser.connect(audioContext.destination);
//console.log(audioContext.destination)
// 播放音频
audioBufferSourceNode.buffer = buffer; //回调函数传入的参数
audioBufferSourceNode.start(0); //部分浏览器是noteOn()函数,用法相同
document.documentElement.removeEventListener('mouseenter', null, false);
document.documentElement.addEventListener('mouseenter', () => {
if (audioBufferSourceNode.context.state !== 'running')
audioBufferSourceNode.context.resume();
});
//波形设置
filter.type = 'highpass';
filter.frequency.value = 600;
filter.Q.value = 800;
//可视化 创建数据
var bufferLength = analyser.frequencyBinCount;
var dataArray = new Uint8Array(bufferLength);
canvasCtx.clearRect(0, 0, 300, 300);
function draw() {
drawVisual = requestAnimationFrame(draw);
analyser.getByteTimeDomainData(dataArray);
canvasCtx.fillStyle = '#0e1a3b';
canvasCtx.fillRect(0, 0, 300, 400);
canvasCtx.lineWidth = 2;
canvasCtx.strokeStyle = '#ffffff';
canvasCtx.beginPath();
var sliceWidth = 300 * 1.0 / bufferLength;
var x = 0;
for (var i = 0; i < bufferLength; i++) {
var v = dataArray[i] / 128.0;
var y = v * 200 / 3; //控制音频线在图的位置
if (i === 0) {
canvasCtx.moveTo(x, y);
} else {
canvasCtx.lineTo(x, y);
}
x += sliceWidth;
}
canvasCtx.lineTo(canvas.width, canvas.height / 2);
canvasCtx.stroke();
};
draw();
}, function (err) {
// alert('!Fail to decode the file!'); //解码出错处理
console.log("!Fail to decode the file!", err)
});
};
request.send();
}
if (pickup_isBreak == 0 && audio_type == 0) {
getData();
}
setInterval(function () {
if (pickup_isBreak == 0 && audio_type == 0) {
getData();
}
}, 1000);
}