var AVCaptureSession = plus.ios.importClass("AVCaptureSession");
m_capture = new AVCaptureSession();
var error = plus.ios.import("NSError");
var AVMediaTypeAudio = plus.ios.import("AVMediaTypeAudio");
var AVCaptureDevice = plus.ios.import("AVCaptureDevice");
var audioDev = AVCaptureDevice.defaultDeviceWithMediaType(AVMediaTypeAudio);
// if(audioDev==null){
// mui.toast("Couldn't create audio capture device");
// return;
// }
var AVCaptureDeviceInput = plus.ios.import("AVCaptureDeviceInput");
var audioIn = AVCaptureDeviceInput.deviceInputWithDeviceerror(audioDev,error);
// if(error){
// mui.toast("Error getting video input device:"+error.description);
// }
if(m_capture.canAddInput(audioIn)){
m_capture.addInput(audioIn);
}
//配置采集输出,即我们取得音频的接口
var AVCaptureAudioDataOutput = plus.ios.importClass("AVCaptureAudioDataOutput");
var audioOutput = new AVCaptureAudioDataOutput();
//// audioOutput.setSampleBufferDelegate
delegate = plus.ios.implements("NSObject",{"dispatch_get_main_queue":'dispatch_queue_create("Audio Capture Queue", DISPATCH_QUEUE_SERIAL)'});
audioOutput.setSampleBufferDelegatequeue(delegate,"dispatch_get_main_queue");
if(m_capture.canAddOutput(audioOutput)){
m_capture.addOutput(audioOutput);
}
audioConnection = audioOutput.connectionWithMediaType(AVMediaTypeAudio);
m_capture.startRunning();
function captureOutputdidOutputSampleBufferfromConnection(captureOutput,sampleBuffer,connection){
if(connection==audioConnection){
alert("获取到音频信息");
}
}

a***@lechuangnet.com
- 发布:2016-07-14 17:45
- 更新:2016-07-15 14:53
- 阅读:1607
DCloud_heavensoft
这个需求主要为了解决什么问题?为何不用HTML5的audio和plus的audio
2016-07-15 22:23
a***@lechuangnet.com (作者)
回复 DCloud_heavensoft:我要获取到麦克风的实时流来做语音通话的功能
2016-07-21 10:00
DCloud_heavensoft
这种实时要求很强的,建议使用webrtc或者原生sdk的方式实现,Native.js效率怕不够
2016-07-21 16:31