ARTICLE AD BOX
When I write code in Swift with engine.mainMixerNode.outputVolume = 0, the code runs normally and can execute the renderCallback callback function. However, when I rewrite it in Objective-C with engine.mainMixerNode.outputVolume = 0, it doesn't work and fails to execute the renderCallback callback function. It only works when I set a value greater than 0. Why is this?
Here are the code snippets for Swift and Objective-C respectively. Could anyone who understands this explain it?
class Engine { let engine: AVAudioEngine let format: AVAudioFormat var lastSampleTime: Double = -1 var inputBuffer:UnsafeMutableAudioBufferListPointer? var buffer: CircularBuffer<Float> var device: AudioDevice//这是输入设备device var volumeIntensity = 0.0 init (device: AudioDevice,index:Int) { engine = AVAudioEngine() self.device = device // Sink audio into void engine.mainMixerNode.outputVolume = 0 let framesPerSample = device.bufferFrameSize(scope: .output) buffer = CircularBuffer<Float>(channelCount: 2, capacity: Int(framesPerSample!) * 512) engine.setInputDevice(device) format = AVAudioFormat(commonFormat: .pcmFormatFloat32, sampleRate: device.nominalSampleRate!, channels: 2, interleaved: false)! engine.connect(engine.inputNode, to: engine.mainMixerNode, format: format) let channels = device.channels(scope: .input) var channelArray:Array<Int32> = Array(repeating: -1, count: channels.intValue) if index % 2 == 0{ channelArray[0] = index.int32Value }else{ channelArray[1] = index.int32Value } guard let audioUnit = engine.inputNode.audioUnit else { return } audioUnit.setPropertyChannelMap(&channelArray, .input, 1) // Render callback checkErr(AudioUnitAddRenderNotify(engine.inputNode.audioUnit!, renderCallback, Unmanaged.passUnretained(self).toOpaque()))}
- (instancetype)initWith:(int)index{ self = [super init]; if (self) { _lastSampleTime = -1; _volumeIntensity = 0.0; self.engine = [[AVAudioEngine alloc] init]; _engine.mainMixerNode.outputVolume = 0.0001; AudioDeviceID outputDeviceID = kAudioObjectUnknown; UInt32 propertySize = sizeof(AudioDeviceID); // 设置属性地址:获取默认输出设备 AudioObjectPropertyAddress propertyAddress = { kAudioHardwarePropertyDefaultOutputDevice, // 选择器:默认输出设备 kAudioObjectPropertyScopeGlobal, // 作用域:全局 kAudioObjectPropertyElementMain // 元素:主元素 }; // 获取属性数据 OSStatus status = AudioObjectGetPropertyData(kAudioObjectSystemObject, &propertyAddress, 0, NULL, &propertySize, &outputDeviceID); if (status != noErr) { NSLog(@"获取ID失败: %d", status); } status = AudioUnitSetProperty( _engine.inputNode.audioUnit, kAudioOutputUnitProperty_CurrentDevice, kAudioUnitScope_Global, 0, &outputDeviceID, sizeof(AudioDeviceID)); if (status != noErr) { NSLog(@"设置为当前设备失败: %d", status); } Float64 sampleRate = 0.0; UInt32 dataSize = sizeof(Float64); propertyAddress = { kAudioDevicePropertyNominalSampleRate, // 属性:标称采样率 kAudioDevicePropertyScopeOutput, // 作用域:输出 kAudioObjectPropertyElementMain // 元素:主元素 }; status = AudioObjectGetPropertyData(outputDeviceID, &propertyAddress, 0, NULL, &dataSize, &sampleRate); if (status != noErr) { NSLog(@"获取采样路失败: %d", status); } _format = [[AVAudioFormat alloc] initWithCommonFormat:AVAudioPCMFormatFloat32 sampleRate:sampleRate channels:2 interleaved:NO]; [_engine connect:_engine.inputNode to:_engine.mainMixerNode format:_format]; SInt32 channelMap [12]; for(UInt32 i = 0; i < 12; ++i){ channelMap[i] = -1; } if (index % 2 == 0){ channelMap[0] = index; }else{ channelMap[1] = index; } // 设置通道映射属性 status = AudioUnitSetProperty(_engine.inputNode.audioUnit, kAudioOutputUnitProperty_ChannelMap, kAudioUnitScope_Input, 1, // 元素ID channelMap, sizeof(int32_t) * 12); if (status != noErr) { NSLog(@"映射失败: %d", status); } status = AudioUnitAddRenderNotify(_engine.inputNode.audioUnit, renderCallback, (__bridge void *)self); if (status != noErr) { NSLog(@"设置回调函数失败: %d", status); } } return self;}
