zoukankan      html  css  js  c++  java
  • 《搬砖日记》Unity原生MicroPhone的使用

    我们不生产代码,我们只是代码的搬运工。

    以此博文记录下MicroPhone的使用,方便下次搬砖。

    本博文参考:

    https://blog.csdn.net/dyc333236081818/article/details/73200608

    http://www.mamicode.com/info-detail-1821115.html


    一、MicroPhone的使用及用户自定义录音时长

    用户自定义录音时长的思路就是定义了一个 float recordTimer 来记录录音时长,录音最大时间 RECORD_TIME 尽量大,当录音结束后再用 AudioClip.Create 方法生成一个新的AudioClip。

    using System;
    using System.Collections;
    using System.Collections.Generic;
    using UnityEngine;
    using UnityEngine.Video;
    
    public class MicrophoneManager : MonoBehaviour
    {
        private static MicrophoneManager m_instance;
    
        private static string[] micArray = null; //录音设备列表
        private AudioClip audioClip;
        const int RECORD_TIME = 1200;
        const int RECORD_RATE = 44100; //录音采样率
        public bool isRecording = false;
        private float recordTimer = 0.0f;
    
        public static MicrophoneManager GetInstance()
        {
            if (m_instance == null)
            {
                micArray = Microphone.devices;
                if (micArray.Length == 0)
                {
                    Debug.LogError("no mic device");
                }
                foreach (string deviceStr in Microphone.devices)
                {
                    Debug.Log("device name = " + deviceStr);
                }
                GameObject micManager = new GameObject("MicManager");
                m_instance = micManager.AddComponent<MicrophoneManager>();
            }
            return m_instance;
        }
        private void Update()
        {
            if (isRecording)
            {
                recordTimer += Time.deltaTime;
            }
        }
    
        /// <summary>
        /// 开始录音
        /// </summary>
        public void StartRecord()
        {
            if (micArray.Length == 0)
            {
                Debug.Log("No Record Device!");
                return;
            }
            Microphone.End(null);//录音时先停掉录音,录音参数为null时采用默认的录音驱动
            recordTimer = 0;
            audioClip = Microphone.Start(null, false, RECORD_TIME, RECORD_RATE);
            isRecording = true;
            while (!(Microphone.GetPosition(null) > 0))
            {
            }
            Debug.Log("StartRecord");
        }
    
        /// <summary>
        /// 停止录音
        /// </summary>
        public void StopRecord()
        {
    
            if (micArray.Length == 0)
            {
                Debug.Log("No Record Device!");
                return;
            }
            if (!Microphone.IsRecording(null))
            {
                return;
            }
            Debug.Log("StopRecord");
    
            isRecording = false;
            int position = Microphone.GetPosition(null);
            var soundData = new float[audioClip.samples * audioClip.channels];
            audioClip.GetData(soundData, 0);
            var newData = new float[position * audioClip.channels];
            for (int i = 0; i < newData.Length; i++)
            {
                newData[i] = soundData[i];
            }
            audioClip = AudioClip.Create(audioClip.name,
                                            position,
                                            audioClip.channels,
                                            audioClip.frequency,
                                            false);
            audioClip.SetData(newData, 0);
            Microphone.End(null);
        }
        /// <summary>
        ///播放录音 
        /// </summary>
        public void PlayRecord()
        {
            PlayRecord(audioClip);
        }
    
        public void PlayRecord(AudioClip clip)
        {
            PlayRecord(clip, Vector3.zero);
        }
    
        public void PlayRecord(AudioClip clip, Vector3 pos)
        {
            if (clip == null)
            {
                Debug.Log("audioClip is null");
                return;
            }
            AudioSource.PlayClipAtPoint(clip, pos);
            Debug.Log("PlayRecord");
        }
        /// <summary>
        ///存储录音 
        /// </summary>
        public string Save()
        {
            string recordedAudioPath;
            byte[] data = WavUtility.FromAudioClip(audioClip, out recordedAudioPath, true);
            string filePath = recordedAudioPath;
            return filePath;
        }
        /// <summary>
        ///读取录音 
        /// </summary>
        public AudioClip Read(string path)
        {
            return WavUtility.ToAudioClip(path);
        }
    }

    二、录音文件的本地保存

    使用 WavUtility.cs 实现对AudioClip的管理

    出处来源以及使用方法:https://github.com/deadlyfingers/UnityWav

    using UnityEngine;
    using System.Text;
    using System.IO;
    using System;
    
    /// <summary>
    /// WAV utility for recording and audio playback functions in Unity.
    /// Version: 1.0 alpha 1
    ///
    /// - Use "ToAudioClip" method for loading wav file / bytes.
    /// Loads .wav (PCM uncompressed) files at 8,16,24 and 32 bits and converts data to Unity's AudioClip.
    ///
    /// - Use "FromAudioClip" method for saving wav file / bytes.
    /// Converts an AudioClip's float data into wav byte array at 16 bit.
    /// </summary>
    /// <remarks>
    /// For documentation and usage examples: https://github.com/deadlyfingers/UnityWav
    /// </remarks>
    
    public class WavUtility
    {
        // Force save as 16-bit .wav
        const int BlockSize_16Bit = 2;
    
        /// <summary>
        /// Load PCM format *.wav audio file (using Unity's Application data path) and convert to AudioClip.
        /// </summary>
        /// <returns>The AudioClip.</returns>
        /// <param name="filePath">Local file path to .wav file</param>
        public static AudioClip ToAudioClip(string filePath)
        {
            if (!filePath.StartsWith(Application.persistentDataPath) && !filePath.StartsWith(Application.dataPath))
            {
                Debug.LogWarning("This only supports files that are stored using Unity's Application data path. 
    To load bundled resources use 'Resources.Load("filename") typeof(AudioClip)' method. 
    https://docs.unity3d.com/ScriptReference/Resources.Load.html");
                return null;
            }
            byte[] fileBytes = File.ReadAllBytes(filePath);
            return ToAudioClip(fileBytes, 0);
        }
    
        public static AudioClip ToAudioClip(byte[] fileBytes, int offsetSamples = 0, string name = "wav")
        {
            //string riff = Encoding.ASCII.GetString (fileBytes, 0, 4);
            //string wave = Encoding.ASCII.GetString (fileBytes, 8, 4);
            int subchunk1 = BitConverter.ToInt32(fileBytes, 16);
            UInt16 audioFormat = BitConverter.ToUInt16(fileBytes, 20);
    
            // NB: Only uncompressed PCM wav files are supported.
            string formatCode = FormatCode(audioFormat);
            Debug.AssertFormat(audioFormat == 1 || audioFormat == 65534, "Detected format code '{0}' {1}, but only PCM and WaveFormatExtensable uncompressed formats are currently supported.", audioFormat, formatCode);
    
            UInt16 channels = BitConverter.ToUInt16(fileBytes, 22);
            int sampleRate = BitConverter.ToInt32(fileBytes, 24);
            //int byteRate = BitConverter.ToInt32 (fileBytes, 28);
            //UInt16 blockAlign = BitConverter.ToUInt16 (fileBytes, 32);
            UInt16 bitDepth = BitConverter.ToUInt16(fileBytes, 34);
    
            int headerOffset = 16 + 4 + subchunk1 + 4;
            int subchunk2 = BitConverter.ToInt32(fileBytes, headerOffset);
            //Debug.LogFormat ("riff={0} wave={1} subchunk1={2} format={3} channels={4} sampleRate={5} byteRate={6} blockAlign={7} bitDepth={8} headerOffset={9} subchunk2={10} filesize={11}", riff, wave, subchunk1, formatCode, channels, sampleRate, byteRate, blockAlign, bitDepth, headerOffset, subchunk2, fileBytes.Length);
    
            float[] data;
            switch (bitDepth)
            {
                case 8:
                    data = Convert8BitByteArrayToAudioClipData(fileBytes, headerOffset, subchunk2);
                    break;
                case 16:
                    data = Convert16BitByteArrayToAudioClipData(fileBytes, headerOffset, subchunk2);
                    break;
                case 24:
                    data = Convert24BitByteArrayToAudioClipData(fileBytes, headerOffset, subchunk2);
                    break;
                case 32:
                    data = Convert32BitByteArrayToAudioClipData(fileBytes, headerOffset, subchunk2);
                    break;
                default:
                    throw new Exception(bitDepth + " bit depth is not supported.");
            }
    
            AudioClip audioClip = AudioClip.Create(name, data.Length, (int)channels, sampleRate, false);
            audioClip.SetData(data, 0);
            return audioClip;
        }
    
        #region wav file bytes to Unity AudioClip conversion methods
    
        private static float[] Convert8BitByteArrayToAudioClipData(byte[] source, int headerOffset, int dataSize)
        {
            int wavSize = BitConverter.ToInt32(source, headerOffset);
            headerOffset += sizeof(int);
            Debug.AssertFormat(wavSize > 0 && wavSize == dataSize, "Failed to get valid 8-bit wav size: {0} from data bytes: {1} at offset: {2}", wavSize, dataSize, headerOffset);
    
            float[] data = new float[wavSize];
    
            sbyte maxValue = sbyte.MaxValue;
    
            int i = 0;
            while (i < wavSize)
            {
                data[i] = (float)source[i] / maxValue;
                ++i;
            }
    
            return data;
        }
    
        private static float[] Convert16BitByteArrayToAudioClipData(byte[] source, int headerOffset, int dataSize)
        {
            int wavSize = BitConverter.ToInt32(source, headerOffset);
            headerOffset += sizeof(int);
            Debug.AssertFormat(wavSize > 0 && wavSize == dataSize, "Failed to get valid 16-bit wav size: {0} from data bytes: {1} at offset: {2}", wavSize, dataSize, headerOffset);
    
            int x = sizeof(Int16); // block size = 2
            int convertedSize = wavSize / x;
    
            float[] data = new float[convertedSize];
    
            Int16 maxValue = Int16.MaxValue;
    
            int offset = 0;
            int i = 0;
            while (i < convertedSize)
            {
                offset = i * x + headerOffset;
                data[i] = (float)BitConverter.ToInt16(source, offset) / maxValue;
                ++i;
            }
    
            Debug.AssertFormat(data.Length == convertedSize, "AudioClip .wav data is wrong size: {0} == {1}", data.Length, convertedSize);
    
            return data;
        }
    
        private static float[] Convert24BitByteArrayToAudioClipData(byte[] source, int headerOffset, int dataSize)
        {
            int wavSize = BitConverter.ToInt32(source, headerOffset);
            headerOffset += sizeof(int);
            Debug.AssertFormat(wavSize > 0 && wavSize == dataSize, "Failed to get valid 24-bit wav size: {0} from data bytes: {1} at offset: {2}", wavSize, dataSize, headerOffset);
    
            int x = 3; // block size = 3
            int convertedSize = wavSize / x;
    
            int maxValue = Int32.MaxValue;
    
            float[] data = new float[convertedSize];
    
            byte[] block = new byte[sizeof(int)]; // using a 4 byte block for copying 3 bytes, then copy bytes with 1 offset
    
            int offset = 0;
            int i = 0;
            while (i < convertedSize)
            {
                offset = i * x + headerOffset;
                Buffer.BlockCopy(source, offset, block, 1, x);
                data[i] = (float)BitConverter.ToInt32(block, 0) / maxValue;
                ++i;
            }
    
            Debug.AssertFormat(data.Length == convertedSize, "AudioClip .wav data is wrong size: {0} == {1}", data.Length, convertedSize);
    
            return data;
        }
    
        private static float[] Convert32BitByteArrayToAudioClipData(byte[] source, int headerOffset, int dataSize)
        {
            int wavSize = BitConverter.ToInt32(source, headerOffset);
            headerOffset += sizeof(int);
            Debug.AssertFormat(wavSize > 0 && wavSize == dataSize, "Failed to get valid 32-bit wav size: {0} from data bytes: {1} at offset: {2}", wavSize, dataSize, headerOffset);
    
            int x = sizeof(float); //  block size = 4
            int convertedSize = wavSize / x;
    
            Int32 maxValue = Int32.MaxValue;
    
            float[] data = new float[convertedSize];
    
            int offset = 0;
            int i = 0;
            while (i < convertedSize)
            {
                offset = i * x + headerOffset;
                data[i] = (float)BitConverter.ToInt32(source, offset) / maxValue;
                ++i;
            }
    
            Debug.AssertFormat(data.Length == convertedSize, "AudioClip .wav data is wrong size: {0} == {1}", data.Length, convertedSize);
    
            return data;
        }
    
        #endregion
    
        public static byte[] FromAudioClip(AudioClip audioClip)
        {
            string file;
            return FromAudioClip(audioClip, out file, false);
        }
    
        public static byte[] FromAudioClip(AudioClip audioClip, out string filepath, bool saveAsFile = true, string dirname = "recordings")
        {
            MemoryStream stream = new MemoryStream();
    
            const int headerSize = 44;
    
            // get bit depth
            UInt16 bitDepth = 16; //BitDepth (audioClip);
    
            // NB: Only supports 16 bit
            //Debug.AssertFormat (bitDepth == 16, "Only converting 16 bit is currently supported. The audio clip data is {0} bit.", bitDepth);
    
            // total file size = 44 bytes for header format and audioClip.samples * factor due to float to Int16 / sbyte conversion
            int fileSize = audioClip.samples * BlockSize_16Bit + headerSize; // BlockSize (bitDepth)
    
            // chunk descriptor (riff)
            WriteFileHeader(ref stream, fileSize);
            // file header (fmt)
            WriteFileFormat(ref stream, audioClip.channels, audioClip.frequency, bitDepth);
            // data chunks (data)
            WriteFileData(ref stream, audioClip, bitDepth);
    
            byte[] bytes = stream.ToArray();
    
            // Validate total bytes
            Debug.AssertFormat(bytes.Length == fileSize, "Unexpected AudioClip to wav format byte count: {0} == {1}", bytes.Length, fileSize);
    
            // Save file to persistant storage location
            if (saveAsFile)
            {
                filepath = string.Format("{0}/{1}/{2}.{3}", Application.persistentDataPath, dirname, DateTime.UtcNow.ToString("yyMMdd-HHmmss-fff"), "wav");
                Directory.CreateDirectory(Path.GetDirectoryName(filepath));
                File.WriteAllBytes(filepath, bytes);
                //Debug.Log ("Auto-saved .wav file: " + filepath);
            }
            else
            {
                filepath = null;
            }
    
            stream.Dispose();
    
            return bytes;
        }
    
        #region write .wav file functions
    
        private static int WriteFileHeader(ref MemoryStream stream, int fileSize)
        {
            int count = 0;
            int total = 12;
    
            // riff chunk id
            byte[] riff = Encoding.ASCII.GetBytes("RIFF");
            count += WriteBytesToMemoryStream(ref stream, riff, "ID");
    
            // riff chunk size
            int chunkSize = fileSize - 8; // total size - 8 for the other two fields in the header
            count += WriteBytesToMemoryStream(ref stream, BitConverter.GetBytes(chunkSize), "CHUNK_SIZE");
    
            byte[] wave = Encoding.ASCII.GetBytes("WAVE");
            count += WriteBytesToMemoryStream(ref stream, wave, "FORMAT");
    
            // Validate header
            Debug.AssertFormat(count == total, "Unexpected wav descriptor byte count: {0} == {1}", count, total);
    
            return count;
        }
    
        private static int WriteFileFormat(ref MemoryStream stream, int channels, int sampleRate, UInt16 bitDepth)
        {
            int count = 0;
            int total = 24;
    
            byte[] id = Encoding.ASCII.GetBytes("fmt ");
            count += WriteBytesToMemoryStream(ref stream, id, "FMT_ID");
    
            int subchunk1Size = 16; // 24 - 8
            count += WriteBytesToMemoryStream(ref stream, BitConverter.GetBytes(subchunk1Size), "SUBCHUNK_SIZE");
    
            UInt16 audioFormat = 1;
            count += WriteBytesToMemoryStream(ref stream, BitConverter.GetBytes(audioFormat), "AUDIO_FORMAT");
    
            UInt16 numChannels = Convert.ToUInt16(channels);
            count += WriteBytesToMemoryStream(ref stream, BitConverter.GetBytes(numChannels), "CHANNELS");
    
            count += WriteBytesToMemoryStream(ref stream, BitConverter.GetBytes(sampleRate), "SAMPLE_RATE");
    
            int byteRate = sampleRate * channels * BytesPerSample(bitDepth);
            count += WriteBytesToMemoryStream(ref stream, BitConverter.GetBytes(byteRate), "BYTE_RATE");
    
            UInt16 blockAlign = Convert.ToUInt16(channels * BytesPerSample(bitDepth));
            count += WriteBytesToMemoryStream(ref stream, BitConverter.GetBytes(blockAlign), "BLOCK_ALIGN");
    
            count += WriteBytesToMemoryStream(ref stream, BitConverter.GetBytes(bitDepth), "BITS_PER_SAMPLE");
    
            // Validate format
            Debug.AssertFormat(count == total, "Unexpected wav fmt byte count: {0} == {1}", count, total);
    
            return count;
        }
    
        private static int WriteFileData(ref MemoryStream stream, AudioClip audioClip, UInt16 bitDepth)
        {
            int count = 0;
            int total = 8;
    
            // Copy float[] data from AudioClip
            float[] data = new float[audioClip.samples * audioClip.channels];
            audioClip.GetData(data, 0);
    
            byte[] bytes = ConvertAudioClipDataToInt16ByteArray(data);
    
            byte[] id = Encoding.ASCII.GetBytes("data");
            count += WriteBytesToMemoryStream(ref stream, id, "DATA_ID");
    
            int subchunk2Size = Convert.ToInt32(audioClip.samples * BlockSize_16Bit); // BlockSize (bitDepth)
            count += WriteBytesToMemoryStream(ref stream, BitConverter.GetBytes(subchunk2Size), "SAMPLES");
    
            // Validate header
            Debug.AssertFormat(count == total, "Unexpected wav data id byte count: {0} == {1}", count, total);
    
            // Write bytes to stream
            count += WriteBytesToMemoryStream(ref stream, bytes, "DATA");
    
            // Validate audio data
            Debug.AssertFormat(bytes.Length == subchunk2Size, "Unexpected AudioClip to wav subchunk2 size: {0} == {1}", bytes.Length, subchunk2Size);
    
            return count;
        }
    
        private static byte[] ConvertAudioClipDataToInt16ByteArray(float[] data)
        {
            MemoryStream dataStream = new MemoryStream();
    
            int x = sizeof(Int16);
    
            Int16 maxValue = Int16.MaxValue;
    
            int i = 0;
            while (i < data.Length)
            {
                dataStream.Write(BitConverter.GetBytes(Convert.ToInt16(data[i] * maxValue)), 0, x);
                ++i;
            }
            byte[] bytes = dataStream.ToArray();
    
            // Validate converted bytes
            Debug.AssertFormat(data.Length * x == bytes.Length, "Unexpected float[] to Int16 to byte[] size: {0} == {1}", data.Length * x, bytes.Length);
    
            dataStream.Dispose();
    
            return bytes;
        }
    
        private static int WriteBytesToMemoryStream(ref MemoryStream stream, byte[] bytes, string tag = "")
        {
            int count = bytes.Length;
            stream.Write(bytes, 0, count);
            //Debug.LogFormat ("WAV:{0} wrote {1} bytes.", tag, count);
            return count;
        }
    
        #endregion
    
        /// <summary>
        /// Calculates the bit depth of an AudioClip
        /// </summary>
        /// <returns>The bit depth. Should be 8 or 16 or 32 bit.</returns>
        /// <param name="audioClip">Audio clip.</param>
        public static UInt16 BitDepth(AudioClip audioClip)
        {
            UInt16 bitDepth = Convert.ToUInt16(audioClip.samples * audioClip.channels * audioClip.length / audioClip.frequency);
            Debug.AssertFormat(bitDepth == 8 || bitDepth == 16 || bitDepth == 32, "Unexpected AudioClip bit depth: {0}. Expected 8 or 16 or 32 bit.", bitDepth);
            return bitDepth;
        }
    
        private static int BytesPerSample(UInt16 bitDepth)
        {
            return bitDepth / 8;
        }
    
        private static int BlockSize(UInt16 bitDepth)
        {
            switch (bitDepth)
            {
                case 32:
                    return sizeof(Int32); // 32-bit -> 4 bytes (Int32)
                case 16:
                    return sizeof(Int16); // 16-bit -> 2 bytes (Int16)
                case 8:
                    return sizeof(sbyte); // 8-bit -> 1 byte (sbyte)
                default:
                    throw new Exception(bitDepth + " bit depth is not supported.");
            }
        }
    
        private static string FormatCode(UInt16 code)
        {
            switch (code)
            {
                case 1:
                    return "PCM";
                case 2:
                    return "ADPCM";
                case 3:
                    return "IEEE";
                case 7:
                    return "μ-law";
                case 65534:
                    return "WaveFormatExtensable";
                default:
                    Debug.LogWarning("Unknown wav code format:" + code);
                    return "";
            }
        }
    
    }

    三、方法的调用

            //开始录音
            MicrophoneManager.GetInstance().StartRecord();
            //结束录音
            MicrophoneManager.GetInstance().StopRecord();
            //播放录音
            MicrophoneManager.GetInstance().PlayRecord();
            //保存录音
            string filePath = "";
            filePath = MicrophoneManager.GetInstance().Save();
            //播放保存的录音
            AudioClip audioClip = MicrophoneManager.GetInstance().Read(filePath);
            MicrophoneManager.GetInstance().PlayRecord(audioClip);

    录音保存路径的修改:在 WavUtility.cs 中下面的代码处修改

     filepath = string.Format("{0}/{1}/{2}.{3}", Application.persistentDataPath, dirname, DateTime.UtcNow.ToString("yyMMdd-HHmmss-fff"), "wav");
  • 相关阅读:
    学习之Struts2框架实现原理----个人理解
    The import java.util cannot be resolved The import javax.servlet cannot be resolved
    谈谈HashSet的存储原理
    RabbitMQ的问题
    剑指Offer:合并列表(25)
    剑指Offer:反转列表(24)
    剑指Offer:链表中倒数第 K 个结点(22)
    剑指Offer:调整数组顺序使奇数位于偶数前面(21)
    剑指Offer:机器人的运动范围(13)
    剑指Offer:变态跳台阶(10.4)
  • 原文地址:https://www.cnblogs.com/YorkZhao/p/9837918.html
Copyright © 2011-2022 走看看