I'm using AVAudioRecorder. I record the sound in caf format. After that i convert the file caf to aac format using TPAACAudioConverter. It works fine but converted file duration is 00:00. Is there any way to get duration of aac audio file.
可以将文章内容翻译成中文,广告屏蔽插件可能会导致该功能失效(如失效,请关闭广告屏蔽插件后再试):
问题:
回答1:
The AAC format doesn't support for some simulator.You can check it on your device. It will works fine and you may get AAC audio file duration.
回答2:
Can you playback the caf file ?
If you only want to record a sound from microphone to an aac file, you can use Audio Queue Services (I can post some code)
Edit : it's an implementation from Apple dev tutorial, there might be some errors since I modified it to fit your question
//AudioQ.mm @implementation AudioQ static const int nBuffer = 3; struct AQRecorderState{ AudioStreamBasicDescription mDataFormat; AudioQueueRef mQueue; AudioQueueBufferRef mBuffers[nBuffer]; AudioFileID mAudioFile; UInt32 bufferByteSize; SInt64 mCurrentPacket; bool mIsRunning; }; AQRecorderState aqData; CFURLRef url; static OSStatus BufferFilledHandler( void * inUserData, SInt64 inPosition, UInt32 requestCount, const void * buffer, UInt32 * actualCount ){ // callback when you write to the file // you can handle audio packet and send them for broadcasting return 0; } static void HandleInputBuffer( void *aqData, AudioQueueRef inAq, AudioQueueBufferRef inBuffer, const AudioTimeStamp *inStartTime, UInt32 inNumPackets, const AudioStreamPacketDescription *inPacketDesc ) { AQRecorderState *pAqData = (AQRecorderState*) aqData; if (AudioFileWritePackets ( pAqData->mAudioFile, false, inBuffer->mAudioDataByteSize, inPacketDesc, pAqData->mCurrentPacket, &inNumPackets, inBuffer->mAudioData ) == noErr) { pAqData->mCurrentPacket += inNumPackets; } else { NSLog(@"err writing packet"); } if (pAqData->mIsRunning == 0) return; AudioQueueEnqueueBuffer(pAqData->mQueue,inBuffer,0,NULL); } -(OSStatus) initializeAQ{ //--- set the output format ---// aqData.mDataFormat.mSampleRate = 22050; aqData.mDataFormat.mFormatID = kAudioFormatMPEG4AAC; aqData.mDataFormat.mFormatFlags = kMPEG4Object_AAC_Main; aqData.mDataFormat.mBytesPerPacket = 0; aqData.mDataFormat.mFramesPerPacket = 1024; aqData.mDataFormat.mBytesPerFrame = 0; aqData.mDataFormat.mChannelsPerFrame = 1; aqData.mDataFormat.mBitsPerChannel = 0; AudioFileTypeID fileType = kAudioFileAAC_ADTSType; aqData.bufferByteSize = 0x5000; // ?? AudioQueueNewInput(&aqData.mDataFormat, HandleInputBuffer, &aqData, CFRunLoopGetMain(), kCFRunLoopCommonModes, 0, &aqData.mQueue); aqData.mCurrentPacket = 0; aqData.mIsRunning = true; //--- record in a file get the callback when writing ---// AQRecorderState *pAqData = &aqData; AudioFileInitializeWithCallbacks((void*)&pAqData, nil, BufferFilledHandler, nil, nil, fileType, &aqData.mDataFormat, kAudioFileFlags_EraseFile, &aqData.mAudioFile); //--- prepare set of audio queue buffers ---// for(int i = 0 ; i < nBuffer ; i++){ AudioQueueAllocateBuffer(aqData.mQueue, aqData.bufferByteSize, &aqData.mBuffers[i]); AudioQueueEnqueueBuffer(aqData.mQueue, aqData.mBuffers[i], 0, NULL); } return 0; } -(void) start{ AudioQueueStart(aqData.mQueue, NULL); } -(void) stop{ NSLog(@"stoping"); AudioQueueStop(aqData.mQueue, true); aqData.mIsRunning = false; AudioQueueDispose (aqData.mQueue,true); AudioFileClose (aqData.mAudioFile); } @end AudioQ.h
static void HandleInputBuffer( void *aqData, AudioQueueRef inAq, AudioQueueBufferRef inBuffer, const AudioTimeStamp *inStartTime, UInt32 inNumPackets, const AudioStreamPacketDescription *inPacketDesc ); static OSStatus BufferFilledHandler( void * inUserData, SInt64 inPosition, UInt32 requestCount, const void * buffer, UInt32 * actualCount ); -(OSStatus)initializeAQ; -(void)stop; -(void)start;