大话Web-Audio-Api
转:https://www.jianshu.com/p/0079d1fe7496
简单的例子:
<script>
var context;
var musicBuffer;
window.addEventListener('load',init,false);
function init()
{
try{
window.AudioContext = window.AudioContext || window.webkitAudioContext || window.mozAudioContext || window.msAudioContext;
context = new window.AudioContext();
loadSound("m.mp3");//获取音频
}
catch (e) {
alert(e);
}
}
//获取音频数据
function loadSound(url){
var request = new XMLHttpRequest();
request.open('GET',url,true);
request.responseType= 'arraybuffer';
request.send();
//下面就是对音频文件的异步解析
request.onload = function(){
context.decodeAudioData(request.response,function(buffer){
musicBuffer = buffer;
console.log(context);
console.log(musicBuffer);
playSound(context, musicBuffer);//获取成功后播放
});
}
}
//播放音频数据
function playSound(audioContext, buffer) {
var audioBufferSouceNode = audioContext.createBufferSource(),
analyser = audioContext.createAnalyser();
//将source与分析器连接
audioBufferSouceNode.connect(analyser);
//将分析器与destination连接,这样才能形成到达扬声器的通路
analyser.connect(audioContext.destination);
//将上一步解码得到的buffer数据赋值给source
audioBufferSouceNode.buffer = buffer;
//播放
audioBufferSouceNode.start(0);
//音乐响起后,把analyser传递到另一个方法开始绘制频谱图了,因为绘图需要的信息要从analyser里面获取
//this._drawSpectrum(analyser);//可以绘制波形图
}
</script>
上面例子也受同源跨域策略影响