用AudioQueueStart设置SetProperty怎么样?

时间:2012-08-05 15:23:04

标签: iphone objective-c ios audio-recording audiotoolbox

如何设置audioqueuestart的属性? 我想录制设备的声音,有人告诉我使用'kAudioSessionCategory_PlayAndRecord'

UInt32 sessionCategory = kAudioSessionCategory_PlayAndRecord;
AudioSessionSetProperty(kAudioSessionProperty_AudioCategory, sizeof(sessionCategory), &sessionCategory);

但是在应用我的简单代码时,它不起作用。我该如何设置属性?

#include <AudioToolbox/AudioQueue.h>
#include <AudioToolbox/AudioFile.h>
#include <AudioToolbox/AudioConverter.h>
#include <AudioToolbox/AudioToolbox.h>
#include <stdio.h>
#include <stdlib.h>
#include <errno.h>
#include <sys/stat.h>
#include <sys/select.h>

#define AUDIO_BUFFERS 3

typedef struct AQCallbackStruct
{
    AudioStreamBasicDescription mDataFormat;
    AudioQueueRef queue;
    AudioQueueBufferRef mBuffers[AUDIO_BUFFERS];
    AudioFileID outputFile;
    unsigned long frameSize;
    long long recPtr;
    int run;
} AQCallbackStruct;

static void AQInputCallback(
    void                                 *aqr,
    AudioQueueRef                        inQ,
    AudioQueueBufferRef                  inQB,
    const AudioTimeStamp                 *timestamp,
    unsigned long                        frameSize,
    const AudioStreamPacketDescription   *mDataFormat)
{
    AQCallbackStruct *aqc = (AQCallbackStruct *) aqr;

    /* Write data to file */
    if (AudioFileWritePackets (aqc->outputFile, false, inQB->mAudioDataByteSize,
    mDataFormat, aqc->recPtr, &frameSize, inQB->mAudioData) == noErr)
    {
    aqc->recPtr += frameSize;
    }

    /* Don't re-queue the sound buffers if we're supposed to stop recording */
    if (!aqc->run)
      return;

    AudioQueueEnqueueBuffer (aqc->queue, inQB, 0, NULL);
}

int main(int argc, char *argv[])
{
    AQCallbackStruct aqc;
    AudioFileTypeID fileFormat;
    CFURLRef filename;
    struct timeval tv;
    int i;

    if (argc < 3)
    {
    fprintf(stderr, "Syntax: %s [filename.aif] [seconds]", argv[0]);
    exit(EXIT_FAILURE);
    }

    //how ?
    //UInt32 sessionCategory = kAudioSessionCategory_PlayAndRecord;
    //AudioSessionSetProperty(kAudioSessionProperty_AudioCategory, sizeof(sessionCategory), &sessionCategory);

    aqc.mDataFormat.mFormatID = kAudioFormatLinearPCM;//kAudioFormatLinearPCM;
    aqc.mDataFormat.mSampleRate = 44100.0;
    aqc.mDataFormat.mChannelsPerFrame = 2;
    aqc.mDataFormat.mBitsPerChannel = 16;
    aqc.mDataFormat.mBytesPerPacket =
    aqc.mDataFormat.mBytesPerFrame =
    aqc.mDataFormat.mChannelsPerFrame * sizeof (short int);
    aqc.mDataFormat.mFramesPerPacket = 1;
    aqc.mDataFormat.mFormatFlags =
        kLinearPCMFormatFlagIsBigEndian
      | kLinearPCMFormatFlagIsSignedInteger
      | kLinearPCMFormatFlagIsPacked;
    aqc.frameSize = 735;

    AudioQueueNewInput (&aqc.mDataFormat, AQInputCallback, &aqc, NULL,
    kCFRunLoopCommonModes, 0, &aqc.queue);

    /* Create output file */

    fileFormat = kAudioFileAIFFType;
    filename = CFURLCreateFromFileSystemRepresentation (NULL, (const UInt8*)argv[1], strlen (argv[1]), false);

    AudioFileCreateWithURL (
    filename,
    fileFormat,
    &aqc.mDataFormat,
    kAudioFileFlags_EraseFile,
    &aqc.outputFile
    );

    /* Initialize the recording buffers */

    for(i=0; i<AUDIO_BUFFERS; i++)
    {
    AudioQueueAllocateBuffer (aqc.queue, aqc.frameSize, &aqc.mBuffers[i]);
    AudioQueueEnqueueBuffer (aqc.queue, aqc.mBuffers[i], 0, NULL);
    //AudioQueueEnqueueBuffer (aqc.queue, aqc.mBuffers[i], 0x11, (const AudioStreamPacketDescription)"\x22");
    }

    aqc.recPtr = 0;
    aqc.run = 1;

    AudioQueueStart (aqc.queue, NULL);

    /* Hang around for a while while the recording takes place */

    tv.tv_sec = atof(argv[2]);
    tv.tv_usec = 0;
    select(0, NULL, NULL, NULL, &tv);

    /* Shut down recording */

    AudioQueueStop (aqc.queue, true);
    aqc.run = 0;

    AudioQueueDispose (aqc.queue, true);
    AudioFileClose (aqc.outputFile);

    exit(EXIT_SUCCESS);
}

0 个答案:

没有答案