我有一个AVPlayer
播放来自单声道AVAsset
的音频,并通过音频处理点击进行一些处理。 如何在到达水龙头之前将此资产转换为立体声?第二个频道可以为空或第一个频道的副本(它会在水龙头中手动填充)。
我已经尝试在水龙头内将单声道转换为立体声,但是当我们进入水龙头时,显然我们无法控制ASBD或AudioBufferList结构。我也完成了离线转换,但这会带来很大的障碍(可能很慢,不适合网络流媒体)。
以下是可以与任何单声道音频文件一起使用的准系统(但完整)代码。你会看到,当它到达处理水龙头时,只有一个通道可用而不是所需的两个通道。要使用代码,只需将下面的MediaPlayer
和TapProcessor
类添加到空白的单一视图应用程序中,使用以下ViewController
代码代替默认代码,然后添加您自己的单声道音频文件到您的项目。谢谢你的阅读。
MediaPlayer.h
#import <Foundation/Foundation.h>
@interface MediaPlayer : NSObject
@end
MediaPlayer.m
#import "MediaPlayer.h"
#import "TapProcessor.h"
#import <AVFoundation/AVFoundation.h>
@interface MediaPlayer()
@property (nonatomic, strong) AVAsset *asset;
@property (nonatomic, strong) AVPlayer *player;
@property (nonatomic, strong) TapProcessor *audioTapProcessor;
@end
@implementation MediaPlayer
- (id)init {
if (self = [super init]){
NSString *path = [[NSBundle mainBundle] pathForResource:@"MonoSource"
ofType:@"mp3"];
[self loadFileWithPath:path];
}
return self;
}
-(void)loadFileWithPath:(NSString*)path{
NSURL *fileURL = [NSURL fileURLWithPath:path];
NSDictionary *options = [NSDictionary dictionaryWithObject:[NSNumber numberWithBool:YES]
forKey:AVURLAssetPreferPreciseDurationAndTimingKey];
self.asset = [AVURLAsset URLAssetWithURL:fileURL options:options];
[self.asset loadValuesAsynchronouslyForKeys:@[@"tracks"] completionHandler:^{
dispatch_async(dispatch_get_main_queue(), ^{
AVKeyValueStatus status = [self.asset statusOfValueForKey:@"tracks" error:nil];
switch (status) {
case AVKeyValueStatusLoaded:
[self setupPlayer];
break;
default:
break;
}
});
}];
}
- (void) setupPlayer{
AVPlayerItem *item = [AVPlayerItem playerItemWithAsset:self.asset];
AVAssetTrack *audioTrack = [[self.asset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0];
[self printInfoForTrack:audioTrack];
TapProcessor *newProcessor = [[TapProcessor alloc] initWithTrack:audioTrack];
AVAudioMix *audioMix = [newProcessor audioMix];
item.audioMix = audioMix;
self.player = [AVPlayer playerWithPlayerItem:item];
[self.player play];
}
-(void) printInfoForTrack:(AVAssetTrack*)track{
CMAudioFormatDescriptionRef item = (__bridge CMAudioFormatDescriptionRef)[track.formatDescriptions objectAtIndex:0];
const AudioStreamBasicDescription* desc = CMAudioFormatDescriptionGetStreamBasicDescription(item);
NSLog(@"Number of track channels: %d", desc->mChannelsPerFrame);
}
@end
TapProcessor.h
#import <Foundation/Foundation.h>
#import <AVFoundation/AVFoundation.h>
@interface TapProcessor : NSObject
- (id)initWithTrack:(AVAssetTrack *)track;
@property (readonly, nonatomic) AVAssetTrack *track;
@property (readonly, nonatomic) AVAudioMix *audioMix;
@end
TapProcessor.m
#import "TapProcessor.h"
// TAP CALLBACKS
static void tap_InitCallback(MTAudioProcessingTapRef tap,
void *clientInfo,
void **tapStorageOut){
}
static void tap_FinalizeCallback(MTAudioProcessingTapRef tap){
}
static void tap_PrepareCallback(MTAudioProcessingTapRef tap,
CMItemCount maxFrames,
const AudioStreamBasicDescription *processingFormat){
NSLog(@"Number of tap channels: %d", processingFormat->mChannelsPerFrame);
}
static void tap_UnprepareCallback(MTAudioProcessingTapRef tap){
}
static void tap_ProcessCallback(MTAudioProcessingTapRef tap,
CMItemCount numberFrames,
MTAudioProcessingTapFlags flags,
AudioBufferList *bufferListInOut,
CMItemCount *numberFramesOut,
MTAudioProcessingTapFlags *flagsOut){
MTAudioProcessingTapGetSourceAudio(tap, numberFrames, bufferListInOut, NULL, NULL, NULL);
*numberFramesOut = numberFrames;
}
@implementation TapProcessor
- (id)initWithTrack:(AVAssetTrack *)track{
self = [super init];
if (self){
_track = track;
}
return self;
}
@synthesize audioMix = _audioMix;
- (AVAudioMix *)audioMix {
if (!_audioMix){
AVMutableAudioMix *audioMix = [AVMutableAudioMix audioMix];
if (audioMix){
AVMutableAudioMixInputParameters *audioMixInputParameters = [AVMutableAudioMixInputParameters audioMixInputParametersWithTrack:self.track];
if (audioMixInputParameters) {
MTAudioProcessingTapCallbacks callbacks;
callbacks.version = kMTAudioProcessingTapCallbacksVersion_0;
callbacks.clientInfo = (__bridge void *)self,
callbacks.init = tap_InitCallback;
callbacks.finalize = tap_FinalizeCallback;
callbacks.prepare = tap_PrepareCallback;
callbacks.unprepare = tap_UnprepareCallback;
callbacks.process = tap_ProcessCallback;
MTAudioProcessingTapRef audioProcessingTap;
if (noErr == MTAudioProcessingTapCreate(kCFAllocatorDefault,
&callbacks,
kMTAudioProcessingTapCreationFlag_PreEffects,
&audioProcessingTap)){
audioMixInputParameters.audioTapProcessor = audioProcessingTap;
CFRelease(audioProcessingTap);
audioMix.inputParameters = @[audioMixInputParameters];
_audioMix = audioMix;
}
}
}
}
return _audioMix;
}
@end
ViewController.h
#import <UIKit/UIKit.h>
@interface ViewController : UIViewController
@end
ViewController.m
#import "ViewController.h"
#import "MediaPlayer.h"
@interface ViewController ()
@property (nonatomic,strong) MediaPlayer *mediaPlayer;
@end
@implementation ViewController
- (void)viewDidLoad {
[super viewDidLoad];
self.mediaPlayer = [[MediaPlayer alloc] init];
}
@end