一、什么是AIUI
AIUI 是一套人机交互解决方案,整合了语音唤醒、语音识别、语义理解、内容平台、语音合成(比普通的语音合成多一个发音人)等能力。
新用户有20个免费的装机量,每天有500交互次数

二、创建AIUI

  1. 在控制台→我的应用→创建新应用中来创建自己测试项目

    三、进入AIUI控制台

点击服务管理进入AIUI的控制台

四、AIUI设置

默认自带有语音识别的,如果需要AIUI回答你,则需要开启语义理解,如果需要有声音则需要开启语音合成,否则调用SDK的时候是不会返回语义理解和语音合成的数据的


可以在上面的位置直接修改发音人,返回的语音合成数据也会改变

五、导入项目

1.下载SDK

2.我们需要将aiui.dll导入的Unity的Plugins文件夹中

3.我们需要将AIUI文件夹放到StreamingAssets文件夹中,这里面保存的一些配置信息,比如APPID,语音合成等等,只需要在程序开始时读取配置文件进行了,就不需要像讯飞的语音合成,语音识别等模块一样需要登录和登出了。

4.这里有一些工具类,是对 aiui.dll 的扩展,使用的是官方的 c# 的 demo

5.初始化配置

 void Start(){// 为每一个设备设置对应唯一的SN(最好使用设备硬件信息(mac地址,设备序列号等)生成)// 以便正确统计装机量,避免刷机或者应用卸载重装导致装机量重复计数AIUISetting.setSystemInfo(AIUIConstant.KEY_SERIAL_NUM, GetMac());string cfg = File.ReadAllText(Application.streamingAssetsPath + "\\AIUI\\cfg\\aiui.cfg");agent = IAIUIAgent.Create(cfg, onEvent);IAIUIMessage msg_start = IAIUIMessage.Create(AIUIConstant.CMD_START, 0, 0, "", IBuffer.Zero);agent.SendMessage(msg_start);msg_start.Destroy();msg_start = null;}

6.发送消息,这里是将麦克风录入的信息转换为byte[],然后发送的到SDK中

 public IEnumerator SendMassage(byte[] data){//唤醒IAIUIMessage msg_wakeup = IAIUIMessage.Create(AIUIConstant.CMD_WAKEUP, 0, 0, "", IBuffer.Zero);agent.SendMessage(msg_wakeup);msg_wakeup.Destroy();msg_wakeup = null;yield return new WaitForSeconds(0.2f);print("wakeup");//开始录音IAIUIMessage msg_start_r = IAIUIMessage.Create(AIUIConstant.CMD_START_RECORD, 0, 0,"data_type=audio,interact_mode=oneshot", IBuffer.Zero);agent.SendMessage(msg_start_r);msg_start_r.Destroy();msg_start_r = null;//发送语音IBuffer buf_1 = IBuffer.FromData(data, data.Length);IAIUIMessage msg_write_audio = IAIUIMessage.Create(AIUIConstant.CMD_WRITE, 0, 0, "data_type=audio", buf_1);agent.SendMessage(msg_write_audio);msg_write_audio.Destroy();msg_write_audio = null;buf_1 = null;yield return new WaitForSeconds(0.04f);}

7.返回消息

private  void onEvent(IAIUIEvent ev){switch (ev.GetEventType()){case AIUIConstant.EVENT_STATE:{switch (ev.GetArg1()){case AIUIConstant.STATE_IDLE:{print("EVENT_STATE: IDLE");}break;case AIUIConstant.STATE_READY:{print("EVENT_STATE: READY");}break;case AIUIConstant.STATE_WORKING:{print("EVENT_STATE: WORKING");}break;}}break;case AIUIConstant.EVENT_WAKEUP:{Debug.LogFormat("EVENT_WAKEUP: {0}", ev.GetInfo());}break;case AIUIConstant.EVENT_SLEEP:{Debug.LogFormat("EVENT_WAKEUP: arg1={0}", ev.GetArg1());}break;case AIUIConstant.EVENT_VAD:{switch (ev.GetArg1()){case AIUIConstant.VAD_BOS:{print("EVENT_VAD: BOS");}break;case AIUIConstant.VAD_EOS:{print("EVENT_VAD: EOS");}break;}}break;case AIUIConstant.EVENT_RESULT://返回成功时传入的数据{try{var info = JsonConvert.DeserializeObject<Dictionary<object, object>>(ev.GetInfo());var datas = info["data"] as JArray;var data = datas[0] as JObject;var param = data["params"] as JObject;var contents = data["content"] as JArray;var content = contents[0] as JObject;string sub = param["sub"].ToString();string cnt_id = content["cnt_id"].ToString();int dataLen = 0;byte[] buffer = ev.GetData().GetBinary(cnt_id, ref dataLen);Debug.LogFormat("sub: {0} ,info: {1}", sub, ev.GetInfo());switch (sub){                               case "iat":string jsonObject = Encoding.UTF8.GetString(buffer).Replace('\0', ' ');JsonData deJson = JsonMapper.ToObject(jsonObject);StringBuilder cont = new StringBuilder();foreach (JsonData item in deJson["text"]["ws"]){cont.Append(item["cw"][0]["w"]);}//切换的主线程Loom.QueueOnMainThread(() => {sendText.text = cont.ToString();});break;case "nlp":string jsonObject1 = Encoding.UTF8.GetString(buffer).Replace('\0',' ');JsonData deJson1 = JsonMapper.ToObject(jsonObject1);//切换的主线程Loom.QueueOnMainThread(() => {returnText.text = deJson1["intent"]["answer"]["text"].ToString();});break;case "tts":string jsonObject2 = ev.GetInfo().Replace('\0', ' ');JsonData deJson2 = JsonMapper.ToObject(jsonObject2);//复制数据for (int i = 0; i < buffer.Length; i++){audioData.Add(buffer[i]);}Debug.Log("音频数据长度:"+ buffer.Length +"   总长度:"+ audioData.Count);//判断是否结束if (int.Parse(deJson2["data"][0]["content"][0]["dts"].ToString()) == 2){Debug.Log(audioData.Count);//切换的主线程Loom.QueueOnMainThread(() => {Save();//audioData = new List<byte>();//语音合成//audioSource.clip = Clip(audioData.ToArray());//audioSource.loop = false;//audioSource.Play();});}break;case "asr":break;default:break;}datas = null;data = null;param = null;contents = null;content = null;info = null;}catch (Exception e){print(e.Message); ;}}break;case AIUIConstant.EVENT_ERROR:Debug.LogFormat("EVENT_ERROR: {0} {1}", ev.GetArg1(), ev.GetInfo());break;}}

AIUIManager 全部代码

using aiui;
using LitJson;
using Newtonsoft.Json;
using Newtonsoft.Json.Linq;
using System;
using System.Collections;
using System.Collections.Generic;
using System.IO;
using System.Net.NetworkInformation;
using System.Runtime.InteropServices;
using System.Security.Cryptography;
using System.Text;
using UnityEngine;
using UnityEngine.UI;//讯飞AIUI
public class AIUIManager : MonoBehaviour
{public static AIUIManager Instance;private IAIUIAgent agent;public AudioSource audioSource;public Text returnText;public Text sendText;private void Awake(){Instance = this;}// Start is called before the first frame updatevoid Start(){// 为每一个设备设置对应唯一的SN(最好使用设备硬件信息(mac地址,设备序列号等)生成)// 以便正确统计装机量,避免刷机或者应用卸载重装导致装机量重复计数AIUISetting.setSystemInfo(AIUIConstant.KEY_SERIAL_NUM, GetMac());string cfg = File.ReadAllText(Application.streamingAssetsPath + "\\AIUI\\cfg\\aiui.cfg");agent = IAIUIAgent.Create(cfg, onEvent);IAIUIMessage msg_start = IAIUIMessage.Create(AIUIConstant.CMD_START, 0, 0, "", IBuffer.Zero);agent.SendMessage(msg_start);msg_start.Destroy();msg_start = null;}public IEnumerator SendMassage(byte[] data){//唤醒IAIUIMessage msg_wakeup = IAIUIMessage.Create(AIUIConstant.CMD_WAKEUP, 0, 0, "", IBuffer.Zero);agent.SendMessage(msg_wakeup);msg_wakeup.Destroy();msg_wakeup = null;yield return new WaitForSeconds(0.2f);print("wakeup");//开始录音IAIUIMessage msg_start_r = IAIUIMessage.Create(AIUIConstant.CMD_START_RECORD, 0, 0,"data_type=audio,interact_mode=oneshot", IBuffer.Zero);agent.SendMessage(msg_start_r);msg_start_r.Destroy();msg_start_r = null;//发送语音IBuffer buf_1 = IBuffer.FromData(data, data.Length);IAIUIMessage msg_write_audio = IAIUIMessage.Create(AIUIConstant.CMD_WRITE, 0, 0, "data_type=audio", buf_1);agent.SendMessage(msg_write_audio);msg_write_audio.Destroy();msg_write_audio = null;buf_1 = null;yield return new WaitForSeconds(0.04f);}public  string GetMac(){NetworkInterface[] interfaces = NetworkInterface.GetAllNetworkInterfaces();string mac = "";foreach (NetworkInterface ni in interfaces){if (ni.NetworkInterfaceType != NetworkInterfaceType.Loopback){mac += ni.GetPhysicalAddress().ToString();}}byte[] result = Encoding.Default.GetBytes(mac);result = new MD5CryptoServiceProvider().ComputeHash(result);StringBuilder builder = new StringBuilder();for (int i = 0; i < result.Length; i++){builder.Append(result[i].ToString("x2"));}return builder.ToString();}private void OnDisable(){agent.Destroy();}//返回事件private  void onEvent(IAIUIEvent ev){switch (ev.GetEventType()){case AIUIConstant.EVENT_STATE:{switch (ev.GetArg1()){case AIUIConstant.STATE_IDLE:{print("EVENT_STATE: IDLE");}break;case AIUIConstant.STATE_READY:{print("EVENT_STATE: READY");}break;case AIUIConstant.STATE_WORKING:{print("EVENT_STATE: WORKING");}break;}}break;case AIUIConstant.EVENT_WAKEUP:{Debug.LogFormat("EVENT_WAKEUP: {0}", ev.GetInfo());}break;case AIUIConstant.EVENT_SLEEP:{Debug.LogFormat("EVENT_WAKEUP: arg1={0}", ev.GetArg1());}break;case AIUIConstant.EVENT_VAD:{switch (ev.GetArg1()){case AIUIConstant.VAD_BOS:{print("EVENT_VAD: BOS");}break;case AIUIConstant.VAD_EOS:{print("EVENT_VAD: EOS");}break;}}break;case AIUIConstant.EVENT_RESULT:{try{var info = JsonConvert.DeserializeObject<Dictionary<object, object>>(ev.GetInfo());var datas = info["data"] as JArray;var data = datas[0] as JObject;var param = data["params"] as JObject;var contents = data["content"] as JArray;var content = contents[0] as JObject;string sub = param["sub"].ToString();string cnt_id = content["cnt_id"].ToString();int dataLen = 0;byte[] buffer = ev.GetData().GetBinary(cnt_id, ref dataLen);Debug.LogFormat("sub: {0} ,info: {1}", sub, ev.GetInfo());switch (sub){                               case "iat":string jsonObject = Encoding.UTF8.GetString(buffer).Replace('\0', ' ');JsonData deJson = JsonMapper.ToObject(jsonObject);StringBuilder cont = new StringBuilder();foreach (JsonData item in deJson["text"]["ws"]){cont.Append(item["cw"][0]["w"]);}//切换的主线程Loom.QueueOnMainThread(() => {sendText.text = cont.ToString();});break;case "nlp":string jsonObject1 = Encoding.UTF8.GetString(buffer).Replace('\0',' ');JsonData deJson1 = JsonMapper.ToObject(jsonObject1);//切换的主线程Loom.QueueOnMainThread(() => {returnText.text = deJson1["intent"]["answer"]["text"].ToString();});break;case "tts":string jsonObject2 = ev.GetInfo().Replace('\0', ' ');JsonData deJson2 = JsonMapper.ToObject(jsonObject2);if (int.Parse(deJson2["data"][0]["content"][0]["dts"].ToString()) == 2){for (int i = 0; i < 44; i++){audioData.Add(0);}}//复制数据for (int i = 0; i < buffer.Length; i++){audioData.Add(buffer[i]);}Debug.Log("音频数据长度:"+ buffer.Length +"   总长度:"+ audioData.Count);//判断是否结束if (int.Parse(deJson2["data"][0]["content"][0]["dts"].ToString()) == 2){Debug.Log(audioData.Count);//写入wav头//创建wav文件头WAVE_Header header = getWave_Header(audioData.Count-44);//把文件头结构转化为字节数组                      byte[] headerByte = StructToBytes(header);for (int i = 0; i < headerByte.Length; i++){audioData[i] = headerByte[i];}//切换的主线程Loom.QueueOnMainThread(() => {//Save();//语音合成audioSource.clip = Clip(audioData.ToArray());//audioSource.loop = false;audioSource.Play();audioData = new List<byte>();});}break;case "asr":break;default:break;}datas = null;data = null;param = null;contents = null;content = null;info = null;}catch (Exception e){print(e.Message); ;}}break;case AIUIConstant.EVENT_ERROR:Debug.LogFormat("EVENT_ERROR: {0} {1}", ev.GetArg1(), ev.GetInfo());break;}}private List<byte> audioData = new List<byte>();/// <summary>/// 把结构体转化为字节序列/// </summary>/// <param name="structure">被转化的结构体</param>/// <returns>返回字节序列</returns>public static byte[] StructToBytes(object structure){int size = Marshal.SizeOf(structure);IntPtr buffer = Marshal.AllocHGlobal(size);try{Marshal.StructureToPtr(structure, buffer, false);Byte[] bytes = new Byte[size];Marshal.Copy(buffer, bytes, 0, size);return bytes;}finally{Marshal.FreeHGlobal(buffer);}}/// <summary>
/// 根据数据段的长度,生产文件头
/// </summary>
/// <param name="data_len">音频数据长度</param>
/// <returns>返回wav文件头结构体</returns>
public static WAVE_Header getWave_Header(int data_len){WAVE_Header wav_Header = new WAVE_Header();wav_Header.RIFF_ID = 0x46464952;        //字符RIFFwav_Header.File_Size = data_len + 36;wav_Header.RIFF_Type = 0x45564157;      //字符WAVEwav_Header.FMT_ID = 0x20746D66;         //字符fmtwav_Header.FMT_Size = 16;wav_Header.FMT_Tag = 0x0001;wav_Header.FMT_Channel = 1;             //单声道wav_Header.FMT_SamplesPerSec = 16000;   //采样频率wav_Header.AvgBytesPerSec = 32000;      //每秒所需字节数wav_Header.BlockAlign = 2;              //每个采样1个字节wav_Header.BitsPerSample = 16;           //每个采样8bitwav_Header.DATA_ID = 0x61746164;        //字符datawav_Header.DATA_Size = data_len;return wav_Header;}/// <summary>/// wave文件头/// </summary>public struct WAVE_Header{public int RIFF_ID;           //4 byte , 'RIFF'public int File_Size;         //4 byte , 文件长度public int RIFF_Type;         //4 byte , 'WAVE'public int FMT_ID;            //4 byte , 'fmt'public int FMT_Size;          //4 byte , 数值为16或18,18则最后又附加信息public short FMT_Tag;         //2 byte , 编码方式,一般为0x0001public ushort FMT_Channel;    //2 byte , 声道数目,1--单声道;2--双声道public int FMT_SamplesPerSec; //4 byte , 采样频率public int AvgBytesPerSec;    //4 byte , 每秒所需字节数,记录每秒的数据量public ushort BlockAlign;     //2 byte , 数据块对齐单位(每个采样需要的字节数)public ushort BitsPerSample;  //2 byte , 每个采样需要的bit数public int DATA_ID;           //4 byte , 'data'public int DATA_Size;         //4 byte , }#region 播放录音public static AudioClip Clip(byte[] fileBytes, int offsetSamples = 0, string name = "ifly"){//string riff = Encoding.ASCII.GetString (fileBytes, 0, 4);//string wave = Encoding.ASCII.GetString (fileBytes, 8, 4);int subchunk1 = BitConverter.ToInt32(fileBytes, 16);ushort audioFormat = BitConverter.ToUInt16(fileBytes, 20);// NB: Only uncompressed PCM wav files are supported.string formatCode = FormatCode(audioFormat);//Debug.AssertFormat(audioFormat == 1 || audioFormat == 65534, "Detected format code '{0}' {1}, but only PCM and WaveFormatExtensable uncompressed formats are currently supported.", audioFormat, formatCode);ushort channels = BitConverter.ToUInt16(fileBytes, 22);int sampleRate = BitConverter.ToInt32(fileBytes, 24);//int byteRate = BitConverter.ToInt32 (fileBytes, 28);//UInt16 blockAlign = BitConverter.ToUInt16 (fileBytes, 32);ushort bitDepth = BitConverter.ToUInt16(fileBytes, 34);int headerOffset = 16 + 4 + subchunk1 + 4;int subchunk2 = BitConverter.ToInt32(fileBytes, headerOffset);//Debug.LogFormat ("riff={0} wave={1} subchunk1={2} format={3} channels={4} sampleRate={5} byteRate={6} blockAlign={7} bitDepth={8} headerOffset={9} subchunk2={10} filesize={11}", riff, wave, subchunk1, formatCode, channels, sampleRate, byteRate, blockAlign, bitDepth, headerOffset, subchunk2, fileBytes.Length);//Log.Info(bitDepth);float[] data;switch (bitDepth){case 8:data = Convert8BitByteArrayToAudioClipData(fileBytes, headerOffset, subchunk2);break;case 16:data = Convert16BitByteArrayToAudioClipData(fileBytes, headerOffset, subchunk2);break;case 24:data = Convert24BitByteArrayToAudioClipData(fileBytes, headerOffset, subchunk2);break;case 32:data = Convert32BitByteArrayToAudioClipData(fileBytes, headerOffset, subchunk2);break;default:throw new Exception(bitDepth + " bit depth is not supported.");}AudioClip audioClip = AudioClip.Create(name, data.Length, channels, sampleRate, false);audioClip.SetData(data, 0);return audioClip;}private static string FormatCode(UInt16 code){switch (code){case 1:return "PCM";case 2:return "ADPCM";case 3:return "IEEE";case 7:return "μ-law";case 65534:return "WaveFormatExtensable";default:Debug.LogWarning("Unknown wav code format:" + code);return "";}}#region wav file bytes to Unity AudioClip conversion methodsprivate static float[] Convert8BitByteArrayToAudioClipData(byte[] source, int headerOffset, int dataSize){int wavSize = BitConverter.ToInt32(source, headerOffset);headerOffset += sizeof(int);Debug.AssertFormat(wavSize > 0 && wavSize == dataSize, "Failed to get valid 8-bit wav size: {0} from data bytes: {1} at offset: {2}", wavSize, dataSize, headerOffset);float[] data = new float[wavSize];sbyte maxValue = sbyte.MaxValue;int i = 0;while (i < wavSize){data[i] = (float)source[i] / maxValue;++i;}return data;}private static float[] Convert16BitByteArrayToAudioClipData(byte[] source, int headerOffset, int dataSize){int wavSize = BitConverter.ToInt32(source, headerOffset);headerOffset += sizeof(int);Debug.AssertFormat(wavSize > 0 && wavSize == dataSize, "Failed to get valid 16-bit wav size: {0} from data bytes: {1} at offset: {2}", wavSize, dataSize, headerOffset);int x = sizeof(Int16); // block size = 2int convertedSize = wavSize / x;float[] data = new float[convertedSize];Int16 maxValue = Int16.MaxValue;int offset = 0;int i = 0;while (i < convertedSize){offset = i * x + headerOffset;data[i] = (float)BitConverter.ToInt16(source, offset) / maxValue;++i;}Debug.AssertFormat(data.Length == convertedSize, "AudioClip .wav data is wrong size: {0} == {1}", data.Length, convertedSize);return data;}private static float[] Convert24BitByteArrayToAudioClipData(byte[] source, int headerOffset, int dataSize){int wavSize = BitConverter.ToInt32(source, headerOffset);headerOffset += sizeof(int);Debug.AssertFormat(wavSize > 0 && wavSize == dataSize, "Failed to get valid 24-bit wav size: {0} from data bytes: {1} at offset: {2}", wavSize, dataSize, headerOffset);int x = 3; // block size = 3int convertedSize = wavSize / x;int maxValue = Int32.MaxValue;float[] data = new float[convertedSize];byte[] block = new byte[sizeof(int)]; // using a 4 byte block for copying 3 bytes, then copy bytes with 1 offsetint offset = 0;int i = 0;while (i < convertedSize){offset = i * x + headerOffset;Buffer.BlockCopy(source, offset, block, 1, x);data[i] = (float)BitConverter.ToInt32(block, 0) / maxValue;++i;}Debug.AssertFormat(data.Length == convertedSize, "AudioClip .wav data is wrong size: {0} == {1}", data.Length, convertedSize);return data;}private static float[] Convert32BitByteArrayToAudioClipData(byte[] source, int headerOffset, int dataSize){int wavSize = BitConverter.ToInt32(source, headerOffset);headerOffset += sizeof(int);Debug.AssertFormat(wavSize > 0 && wavSize == dataSize, "Failed to get valid 32-bit wav size: {0} from data bytes: {1} at offset: {2}", wavSize, dataSize, headerOffset);int x = sizeof(float); //  block size = 4int convertedSize = wavSize / x;Int32 maxValue = Int32.MaxValue;float[] data = new float[convertedSize];int offset = 0;int i = 0;while (i < convertedSize){offset = i * x + headerOffset;data[i] = (float)BitConverter.ToInt32(source, offset) / maxValue;++i;}Debug.AssertFormat(data.Length == convertedSize, "AudioClip .wav data is wrong size: {0} == {1}", data.Length, convertedSize);return data;}#endregion#endregion#region 保存语音public void Save(){using (FileStream fs = CreateEmpty(Application.streamingAssetsPath + "/dd.wav")){ConvertAndWrite(fs);WriteHeader(fs);Debug.Log("写入完成");}}private void ConvertAndWrite(FileStream fileStream){var outData = audioData.ToArray();fileStream.Write(outData, 0, outData.Length);}private FileStream CreateEmpty(string filepath){FileStream fileStream = new FileStream(filepath, FileMode.Create);byte emptyByte = new byte();for (int i = 0; i < 44; i++) //preparing the header{fileStream.WriteByte(emptyByte);}return fileStream;}private void WriteHeader(FileStream stream){int hz = 16000;int channels = 1;int samples =audioData.Count;stream.Seek(0, SeekOrigin.Begin);Byte[] riff = System.Text.Encoding.UTF8.GetBytes("RIFF");stream.Write(riff, 0, 4);Byte[] chunkSize = BitConverter.GetBytes(stream.Length - 8);stream.Write(chunkSize, 0, 4);Byte[] wave = System.Text.Encoding.UTF8.GetBytes("WAVE");stream.Write(wave, 0, 4);Byte[] fmt = System.Text.Encoding.UTF8.GetBytes("fmt ");stream.Write(fmt, 0, 4);Byte[] subChunk1 = BitConverter.GetBytes(16);stream.Write(subChunk1, 0, 4);UInt16 two = 2;UInt16 one = 1;Byte[] audioFormat = BitConverter.GetBytes(one);stream.Write(audioFormat, 0, 2);Byte[] numChannels = BitConverter.GetBytes(channels);stream.Write(numChannels, 0, 2);Byte[] sampleRate = BitConverter.GetBytes(hz);stream.Write(sampleRate, 0, 4);Byte[] byteRate = BitConverter.GetBytes(hz * channels * 2); // sampleRate * bytesPerSample*number of channels, here 44100*2*2stream.Write(byteRate, 0, 4);UInt16 blockAlign = (ushort)(channels * 2);stream.Write(BitConverter.GetBytes(blockAlign), 0, 2);UInt16 bps = 16;Byte[] bitsPerSample = BitConverter.GetBytes(bps);stream.Write(bitsPerSample, 0, 2);Byte[] datastring = System.Text.Encoding.UTF8.GetBytes("data");stream.Write(datastring, 0, 4);Byte[] subChunk2 = BitConverter.GetBytes(samples * channels * 2);stream.Write(subChunk2, 0, 4);}#endregion
}

这里接收到消息时,是使用的其他线程,所以需要转换到主线程才能使用Unity的API,不然就会报错,我这里可以将传入的数据放在本地,也在内存中使用并播放(这里传输过来的数据是pcm,如果要在Unity中直接使用,需要写入WAV头才行)。语音合成的数据是多段传输的,所以需要整合在一起。

六、项目位置
链接:https://pan.baidu.com/s/1X83tzFAaRLFvPeleT2Pm0A
提取码:a1oj

只需要将SDK中的AIUI文件放在SteamingAssets文件夹中和替换aiui.dll文件即可。

unity接入讯飞AIUI(Windows SDK)相关推荐

  1. Unity接入讯飞语音识别___Android版

    Unity版本4.6+Eclipse 1,首先我们先去讯飞开放平台注册一个开发者账号(注册地址:http://www.xfyun.cn),注册完成后进入控制台创建一个新的应用.​ 2,提交成功后我们需 ...

  2. java Web api接入讯飞aiui(1)

    创建应用 目标: 开发一个语音交互技能,可以让用户订票 创建号技能后点击进去创建意图,意图创建好后不可刪除,一个技能可以有多个意图,一个意图有多个语料 aiui技能文档 链接: https://doc ...

  3. Unity 接讯飞离线语音识别

    Unity 接入讯飞离线SDK , 有一些坑记录一下. 在开发者平台注册之后,申请应用,下载SDK包.这个sdk包和你的appid是对应的,不能使用别人的sdk包,然后用自己的appid 这是SDK文 ...

  4. Unity与讯飞的aiui交互

    前言最近项目是做机器人,机器人最大的(普通的)AI功能就是语音交流,所以AIUI就是一个很好的选择.AIUI是封装了,讯飞的语音合成.语音识别等功能,重点是它有个兜底功能选择,还有技能工作室的加持,虽 ...

  5. 讯飞AIUI平台语义理解配置全攻略——以Android版AIUI SDK为例

    讯飞AIUI可以进行语音识别与语义理解, 首先进入讯飞AIUI开放平台,然后点击上方的应用接入,点击进入应用. 然后点击创建应用,选择自己需要创建的应用即可,我这里以Android应用为例. 在左侧选 ...

  6. Unity与讯飞语音交互:使用aiui技能

    语音交互 说到人工智能,离不开语音,大家会认为一个设备可以跟人对话是有智能的体现,在国内语音智能研发,讯飞是公认的做的最好的,在了解语音智能时,客户提的是讯飞,因为他们目标是要做一个好的语音交互.客户 ...

  7. UNITY 接讯飞语音过程总结

    UNITY 接讯飞语音过程总结 11:13 2017/3/14 1,安装问题:JDK与ECLIPSE位数一定要对应,32位对64位会出现 java was returned ....code 13的弹 ...

  8. Android基于讯飞AIUI的聊天Demo

    基于讯飞AIUI实现一个AI聊天Demo,首先在AIUI开放平台创建应用,做下简单配置,勾选想要的语义技能,记得保存修改 然后选择点开发工具,选择下载对应的SDK 应用信息里有appid,回头下载de ...

  9. JAVA接入讯飞离线合成语音

    由于公司接到的项目环境不允许联网,这边通过不停的探索,终于找到通过JNA的方式接入,废话不多说,下面展示! 1.讯飞开放平台SDK下载 2.JNA包依赖引入 <!-- https://mvnre ...

最新文章

  1. 6001.Cacti监控华为S8512核心交换机多块板卡的CPU和内存
  2. 基于Centos7.2搭建Cobbler自动化批量部署操作系统服务
  3. C 语言判断大端小端
  4. vue --- 全局配置过滤函数,使用moment函数来格式化时间
  5. android编译VTS测试组件
  6. java 静态绑定_java的动态绑定和静态绑定
  7. 20200518每日一句
  8. Visual Studio 2015离线版msdn下载和安装
  9. OpenCV与机器视觉
  10. activiti6监听器使用
  11. Win7系统更新错误代码80073712的解决办法
  12. 计算机右键菜单太多,文件右键太多?win10 ghost小方法删除右键多余菜单
  13. css实现图片水平居中对齐
  14. js 中的 let 关键字
  15. 什么是MRAM(不挥发性磁性随机存储器)
  16. 打开图片或者视频显示“文件系统错误-2147416359”
  17. 文件服务器异地容灾,服务器异地容灾
  18. 黑马程序员_工欲善其事必先利其器
  19. 史上最全的 pom.xml 文件详解
  20. 实名报名超5000人!RTE2022即将开幕,声网发布RTE行业首本专业书《实时万象》

热门文章

  1. 微擎系统内置的所有函数大全,一共5435个,可以当作微擎开发函数手册来查看(下篇)
  2. python教程十一 元组
  3. java服务器保存特殊字符和表情入库报错
  4. chdir改变当前目录
  5. 30天自制操作系统——第0天
  6. python统计小说人物出现次数_使用python统计《三国演义》小说里人物出现次数前十名,并实现可视化。...
  7. @所有运维人丨多链路负载均衡那些事儿
  8. 使用STM32输出PWM波形
  9. windows服务创建
  10. 计算机主板i3 i5区别,8代i3、i5、i7处理器的用途有哪些区别吗?如何配搭主板?...