現在很多平臺對iOS太不友好了,iOS連個demo都沒有...
截屏2021-06-30 上午10.34.23.png
只能自己摸索了,比較費事的就是這個接口鑒權,注意如果鑒權一直不通過就要檢查下signature簽名的base64長度是否是44個字節。
#import <CommonCrypto/CommonHMAC.h>
#import <CommonCrypto/CommonDigest.h>
#import <xlocale.h>
static NSString *const url = @"wss://ws-api.xfyun.cn/v2/igr";//webscoket地址
- (NSString *)host {
NSURL *r = [NSURL URLWithString:url];
NSString *date = [self lg_rfc1123String];
NSString *signature_origin = [NSString stringWithFormat:@"host: %@\ndate: %@\nGET %@ HTTP/1.1",r.host,date,r.path];
NSString *signature = [self hmacSHA256WithSecret:kXfyunApiSecret content:signature_origin];//hmac-sha256算法結合apiSecret對signature_origin簽名 并BASE64編碼
NSString *authorization_origin = [NSString stringWithFormat:@"api_key=\"%@\", algorithm=\"hmac-sha256\", headers=\"host date request-line\", signature=\"%@\"",kXfyunApiKey,signature];
NSData *dataAuthorization = [authorization_origin dataUsingEncoding:NSUTF8StringEncoding];
NSString *authorization = [dataAuthorization base64EncodedStringWithOptions:0];//將加密結果進行一次BASE64編碼。
return [self lg_safeUrlWithFormat:[NSString stringWithFormat:@"%@?authorization=%@&date=%@&host=%@",url,authorization,date,r.host]];
}
- (NSString*)lg_rfc1123String {//當前時間戳,RFC1123格式(Mon, 02 Jan 2006 15:04:05 GMT)
time_t date = (time_t)[[NSDate date] timeIntervalSince1970];
struct tm timeinfo;
gmtime_r(&date, &timeinfo);
char buffer[32];
size_t ret = strftime_l(buffer, sizeof(buffer), "%a, %d %b %Y %H:%M:%S UTC", &timeinfo, NULL);
if (ret) {
return @(buffer);
} else {
return nil;
}
}
/**
* 加密方式,MAC算法: HmacSHA256
*
* @param secret 秘鑰
* @param content 要加密的文本
*
* @return 加密后的字符串
*/
- (NSString *)hmacSHA256WithSecret:(NSString *)secret content:(NSString *)content {
const char *cKey = [secret cStringUsingEncoding:NSASCIIStringEncoding];
const char *cData = [content cStringUsingEncoding:NSASCIIStringEncoding];
//Sha256:
unsigned char cHMAC[CC_SHA256_DIGEST_LENGTH];
CCHmac(kCCHmacAlgSHA256, cKey, strlen(cKey), cData, strlen(cData), cHMAC);
NSData *HMAC = [[NSData alloc] initWithBytes:cHMAC
length:sizeof(cHMAC)];
NSString *hash = [HMAC base64EncodedStringWithOptions:0];//將加密結果進行一次BASE64編碼。
return hash;
}
- (NSString *)lg_safeUrlWithFormat:(id)object {
NSString *cover = [NSString stringWithFormat:@"%@",cover];
cover = [cover stringByAddingPercentEncodingWithAllowedCharacters:[NSCharacterSet URLQueryAllowedCharacterSet]];
return cover;
}
socket用的三方的 pod 'SocketRocket'#websoket。
SocketRocket集成
采集音頻
#import "LGAudioRecordManager.h"
#import <AVFoundation/AVFoundation.h>
#import "LGRecordFileManager.h"
static Float64 const LGSampleRate = 8000;//采樣率
static UInt32 const LGChannelsPerFrame = 1;//聲道數量
static UInt32 const LGBitsPerChannel = 16;//每個采樣數據的位數
static NSInteger const LGBufferCount = 3;//緩存區個數
static CGFloat const LGBufferDurationSeconds = 0.2;
@interface LGAudioRecordManager () {
AudioQueueRef audioRef;//音頻隊列對象指針
AudioStreamBasicDescription recordConfiguration;//音頻流配置
AudioQueueBufferRef audioBuffers[LGBufferCount];//音頻流緩沖區對象
}
@property (nonatomic, copy) NSString *recordFilePath;//音頻沙盒路徑
@property (nonatomic,assign) AudioFileID recordFileID;//音頻文件標識 用于關聯音頻文件
@property (nonatomic,assign) SInt64 recordPacket;//錄音文件的當前包
@end
@implementation LGAudioRecordManager
+ (instancetype)sharedManager {
static LGAudioRecordManager *manager = nil;
static dispatch_once_t onceToken;
dispatch_once(&onceToken, ^{
manager = [[LGAudioRecordManager alloc]init];
});
return manager;
}
- (instancetype)init {
if (self == [super init]) {
//采集路徑
self.recordFilePath = [LGRecordFileManager cacheFileWithFolder:@"LGAudioRecord" FileName:@"audioRecord.wav"];
NSLog(@"recordFile:%@",_recordFilePath);
recordConfiguration.mSampleRate = LGSampleRate;
recordConfiguration.mChannelsPerFrame = LGChannelsPerFrame;
//編碼格式
recordConfiguration.mFormatID = kAudioFormatLinearPCM;
recordConfiguration.mFormatFlags = kLinearPCMFormatFlagIsSignedInteger | kLinearPCMFormatFlagIsPacked;
//每采樣點占用位數
recordConfiguration.mBitsPerChannel = LGBitsPerChannel;
//每幀的字節數
recordConfiguration.mBytesPerFrame = (recordConfiguration.mBitsPerChannel / 8) * recordConfiguration.mChannelsPerFrame;
//每包的字節數
recordConfiguration.mBytesPerPacket = recordConfiguration.mBytesPerFrame;
//每幀的字節數
recordConfiguration.mFramesPerPacket = 1;
}
return self;
}
- (void)initAudio {
//設置音頻輸入信息和回調
OSStatus status = AudioQueueNewInput(&recordConfiguration, inputBufferHandler, (__bridge void *)(self), NULL, NULL, 0, &audioRef);
if( status != kAudioSessionNoError ) {
NSLog(@"初始化出錯");
return ;
}
//計算估算的緩存區大小
int frames = [self computeRecordBufferSize:&recordConfiguration seconds:LGBufferDurationSeconds];
int bufferByteSize = frames * recordConfiguration.mBytesPerFrame;
NSLog(@"緩存區大小%d",bufferByteSize);
//創建緩沖器
for (int i = 0; i < LGBufferCount; i++) {
AudioQueueAllocateBuffer(audioRef, bufferByteSize, &audioBuffers[i]);
AudioQueueEnqueueBuffer(audioRef, audioBuffers[i], 0, NULL);
}
}
//回調
void inputBufferHandler(void *inUserData, AudioQueueRef inAQ, AudioQueueBufferRef inBuffer, const AudioTimeStamp *inStartTime,UInt32 inNumPackets, const AudioStreamPacketDescription *inPacketDesc) {
LGAudioRecordManager *audioManager = [LGAudioRecordManager sharedManager];
if (inNumPackets > 0) {
//寫入文件
AudioFileWritePackets(audioManager.recordFileID, FALSE, inBuffer->mAudioDataByteSize,inPacketDesc, audioManager.recordPacket, &inNumPackets, inBuffer->mAudioData);
audioManager.recordPacket += inNumPackets;
}
if (audioManager.isRecording) {
//將緩沖器重新放入緩沖隊列,以便重復使用該緩沖器
AudioQueueEnqueueBuffer(inAQ, inBuffer, 0, NULL);
}
}
- (int)computeRecordBufferSize:(const AudioStreamBasicDescription*)format seconds:(float)seconds {
int packets, frames, bytes = 0;
frames = (int)ceil(seconds * format->mSampleRate);
if (format->mBytesPerFrame > 0) {
bytes = frames * format->mBytesPerFrame;
}else {
UInt32 maxPacketSize = 0;
if (format->mBytesPerPacket > 0) {
maxPacketSize = format->mBytesPerPacket;
}
if (format->mFramesPerPacket > 0) {
packets = frames / format->mFramesPerPacket;
}else {
packets = frames;
}
if (packets == 0) {
packets = 1;
}
bytes = packets * maxPacketSize;
}
return bytes;
}
- (void)startRecord {
[LGRecordFileManager removeFileAtPath:self.recordFilePath];
[self initAudio];
CFURLRef url = CFURLCreateWithString(kCFAllocatorDefault, (CFStringRef)self.recordFilePath, NULL);
//創建音頻文件夾
AudioFileCreateWithURL(url, kAudioFileCAFType, &recordConfiguration, kAudioFileFlags_EraseFile,&_recordFileID);
CFRelease(url);
self.recordPacket = 0;
//當有音頻設備(比如播放音樂)導致改變時 需要配置
[[AVAudioSession sharedInstance] setCategory:AVAudioSessionCategoryPlayAndRecord error:nil];
[[AVAudioSession sharedInstance] setActive:YES error:nil];
OSStatus status = AudioQueueStart(audioRef, NULL);
if( status != kAudioSessionNoError ) {
NSLog(@"開始出錯");
return;
}
self.isRecording = true;
NSLog(@"開始錄音");
// 設置可以更新聲道的power信息
[self performSelectorOnMainThread:@selector(enableUpdateLevelMetering) withObject:nil waitUntilDone:NO];
}
- (CGFloat)getCurrentVolume {
UInt32 dataSize = sizeof(AudioQueueLevelMeterState) * recordConfiguration.mChannelsPerFrame;
AudioQueueLevelMeterState *levels = (AudioQueueLevelMeterState*)malloc(dataSize);
OSStatus rc = AudioQueueGetProperty(audioRef, kAudioQueueProperty_CurrentLevelMeter, levels, &dataSize);
if (rc) {
NSLog(@"OSStatus %d", (int)rc);
}
float channelAvg = 0;
for (int i = 0; i < recordConfiguration.mChannelsPerFrame; i++) {
channelAvg += levels[i].mPeakPower;
}
free(levels);
return channelAvg;
}
- (BOOL)enableUpdateLevelMetering {
UInt32 val = 1;
OSStatus status = AudioQueueSetProperty(audioRef, kAudioQueueProperty_EnableLevelMetering, &val, sizeof(UInt32));
if( status == kAudioSessionNoError ) {
return YES;
}
return NO;
}
- (void)stopRecord {
if (self.isRecording) {
self.isRecording = NO;
//停止錄音隊列和移,這里無需考慮成功與否
OSStatus status = AudioQueueStop(audioRef, true);
OSStatus status1 = AudioFileClose(_recordFileID);
OSStatus status2 = AudioQueueDispose(audioRef, TRUE);
NSLog(@"銷毀錄音%d%d%d",(int)status,(int)status1,(int)status2);
}
}
- (void)dealloc {
AudioQueueDispose(audioRef, TRUE);
AudioFileClose(_recordFileID);
}
@end
上傳音頻
static char *const record_upload_queue = "com.soma.recordUploadQueue";//錄音上傳隊列
static NSInteger const maxLength = 1280;//每次發送音頻字節數
static NSInteger const minInterval = 40;//每次發送音頻間隔40ms
dispatch_queue_t recordQ = dispatch_queue_create(record_upload_queue,
DISPATCH_QUEUE_SERIAL);
dispatch_async(recordQ, ^{
NSString *path = [LGRecordFileManager cacheFileWithFolder:@"LGAudioRecord" FileName:@"audioRecord.wav"];
NSData *data = [[NSData alloc] initWithContentsOfFile:path];
NSLog(@"文件大小%@ %f",data,(CGFloat)data.length/(CGFloat)maxLength);
NSInputStream *inputStream = [[NSInputStream alloc] initWithFileAtPath:path];
[inputStream open];
NSInteger index = 0;
int64_t addBytes = 0;//累計讀取字節
int64_t allBytes = data.length;//總字節
uint8_t readBuffer [maxLength];
BOOL endOfStreamReached = NO;//是否已經到結尾標識
BOOL isError = NO;//是否出錯
while (!endOfStreamReached) {
// if ([LGAudioUploadManager sharedManager].uploadStatus == AudioUploadStatusFailure) {
// break;
// }
addBytes += maxLength;
NSInteger bytesRead = [inputStream read:readBuffer maxLength:maxLength];
if (bytesRead == 0) {
//文件讀取到最后
endOfStreamReached = YES;
} else if (bytesRead == -1) {
//文件讀取錯誤
endOfStreamReached = YES;
isError = YES;
} else {
//將字符不斷的加載到視圖
NSData *data = [NSData dataWithBytesNoCopy:readBuffer length:bytesRead freeWhenDone:NO];
NSString *audio = [data base64EncodedStringWithOptions:0];//BASE64編碼。
NSInteger status = 0;//第一幀
if (addBytes < allBytes) {
status = 1;//中間幀
}else {
status = 2;//最后一幀
}
if (index == 0) {
status = 0;//第一幀
}
NSMutableDictionary *dict = [NSMutableDictionary dictionary];
if (status == 0) {//第一幀
NSDictionary *business = @{@"ent":@"igr",@"aue":@"raw",@"rate":@(8000)};
NSDictionary *common = @{@"app_id":kXfyunAppId};
dict[@"business"] = business;
dict[@"common"] = common;
}
NSDictionary *dataDict = @{@"status":@(status),@"audio":audio};
dict[@"data"] = dataDict;
[[SocketRocketUtility instance] sendData:dict withRequestURI:nil];
index++;
NSLog(@"NSInputStream %ld %ld",status,index);
[NSThread sleepForTimeInterval:minInterval/1000.f];
}
}
if (isError) {//讀取出錯 上傳結束標識
NSLog(@"錄音文件讀取出錯,請重新錄制");
}
[inputStream close];
});