A. (四)android基于UDP的多客户端语音通信
在前三篇得基础上,这次研究了组播功能。非常感谢https://blog.csdn.net/jspping/article/details/64438515得贡献!
组播也就是通过MulticastSocket来进行开发,与DatagramSocket比较相类似,这次依然是用两个线程进行实现,发送线程MultiSendThread和接收线程MultiReceiveThread。废话不多说,开始码:
(一)MultiSendThread:
(1)初始化MuticastSocket
// 侦听的端口
try {
multicastSocket = new MulticastSocket(8082);
// 使用D类地址,该地址为发起组播的那个ip段,即侦听10001的套接字
address = InetAddress.getByName("239.0.0.1");
} catch (IOException e) {
e.printStackTrace();
}
(2)初始化AudioRecord
protected LinkedList<byte[]> mRecordQueue;
int minBufferSize;
private static AcousticEchoCanceler aec;
private static AutomaticGainControl agc;
private static NoiseSuppressor nc;
AudioRecord audioRec;
byte[] buffer;
@RequiresApi(api = Build.VERSION_CODES.JELLY_BEAN)
private void initAudio() {
//播放的采样频率 和录制的采样频率一样
int sampleRate = 44100;
//和录制的一样的
int audioFormat = AudioFormat.ENCODING_PCM_16BIT;
//录音用输入单声道 播放用输出单声道
int channelConfig = AudioFormat.CHANNEL_IN_MONO;
minBufferSize = AudioRecord.getMinBufferSize(
sampleRate,
channelConfig, AudioFormat.ENCODING_PCM_16BIT);
System.out.println("****RecordMinBufferSize = " + minBufferSize);
audioRec = new AudioRecord(
MediaRecorder.AudioSource.MIC,
sampleRate,
channelConfig,
audioFormat,
minBufferSize);
buffer = new byte[minBufferSize];
if (audioRec == null) {
return;
}
//声学回声消除器 AcousticEchoCanceler 消除了从远程捕捉到音频信号上的信号的作用
if (AcousticEchoCanceler.isAvailable()) {
aec = AcousticEchoCanceler.create(audioRec.getAudioSessionId());
if (aec != null) {
aec.setEnabled(true);
}
}
//自动增益控制 AutomaticGainControl 自动恢复正常捕获的信号输出
if (AutomaticGainControl.isAvailable()) {
agc = AutomaticGainControl.create(audioRec.getAudioSessionId());
if (agc != null) {
agc.setEnabled(true);
}
}
//噪声抑制器 NoiseSuppressor 可以消除被捕获信号的背景噪音
if (NoiseSuppressor.isAvailable()) {
nc = NoiseSuppressor.create(audioRec.getAudioSessionId());
if (nc != null) {
nc.setEnabled(true);
}
}
mRecordQueue = new LinkedList<byte[]>();
}
(3)开始录制,并实时发送出去
@Override
public void run() {
if (multicastSocket == null)
return;
try {
audioRec.startRecording();
while (true) {
try {
byte[] bytes_pkg = buffer.clone();
if (mRecordQueue.size() >= 2) {
int length = audioRec.read(buffer, 0, minBufferSize);
// 组报
DatagramPacket datagramPacket = new DatagramPacket(buffer, length);
// 向组播ID,即接收group /239.0.0.1 端口 10001
datagramPacket.setAddress(address);
// 发送的端口号
datagramPacket.setPort(10001);
System.out.println("AudioRTwritePacket = " + datagramPacket.getData().toString());
multicastSocket.send(datagramPacket);
}
mRecordQueue.add(bytes_pkg);
} catch (Exception e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
} catch (Exception e) {
e.printStackTrace();
}
}
(二)MultiReceiveThread
(1)初始化MulticastSocket
// 接收数据时需要指定监听的端口号
try {
multicastSocket = new MulticastSocket(10001);
// 创建组播ID地址
InetAddress address = InetAddress.getByName("239.0.0.1");
// 加入地址
multicastSocket.joinGroup(address);
} catch (IOException e) {
e.printStackTrace();
}
(2)初始化AudioTrack
byte[] buffer;
AudioTrack audioTrk;
private void initAudioTracker() {
//扬声器播放
int streamType = AudioManager.STREAM_MUSIC;
//播放的采样频率 和录制的采样频率一样
int sampleRate = 44100;
//和录制的一样的
int audioFormat = AudioFormat.ENCODING_PCM_16BIT;
//流模式
int mode = AudioTrack.MODE_STREAM;
//录音用输入单声道 播放用输出单声道
int channelConfig = AudioFormat.CHANNEL_OUT_MONO;
int recBufSize = AudioTrack.getMinBufferSize(
sampleRate,
channelConfig,
audioFormat);
System.out.println("****playRecBufSize = " + recBufSize);
audioTrk = new AudioTrack(
streamType,
sampleRate,
channelConfig,
audioFormat,
recBufSize,
mode);
audioTrk.setStereoVolume(AudioTrack.getMaxVolume(),
AudioTrack.getMaxVolume());
buffer = new byte[recBufSize];
}
(3)开始接收,并进行实时播放
@Override
public void run() {
if (multicastSocket == null)
return;
//从文件流读数据
audioTrk.play();
// 包长
while (true) {
try {
// 数据报
DatagramPacket datagramPacket = new DatagramPacket(buffer, buffer.length);
// 接收数据,同样会进入阻塞状态
multicastSocket.receive(datagramPacket);
audioTrk.write(datagramPacket.getData(), 0, datagramPacket.getLength());
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
}
(三)开始测试
MultiSendThread multiSendThread;
MultiReceiverThread multiReceiverThread;
@OnClick({R.id.btnSend, R.id.btnReceive})
public void onViewClicked(View view) {
switch (view.getId()) {
case R.id.btnSend:
if (multiSendThread == null) {
multiSendThread = new MultiSendThread();
}
new Thread(multiSendThread).start();
break;
case R.id.btnReceive:
if (multiReceiverThread == null) {
multiReceiverThread = new MultiReceiverThread();
}
new Thread(multiReceiverThread).start();
break;
}
}
B. 有人用过android 4.1的新API里的AcousticEchoCanceler 回声消除吗
下面简单的备忘下AcousticEchoCanceler的使用方法:
1)判断当前机型是否支持AEC,需要注意这里的检查不一定准确。
public static boolean isDeviceSupport()
{
return AcousticEchoCanceler.isAvailable();
}
2)初始化并使能AEC。
private AcousticEchoCanceler canceler;
public boolean initAEC(int audioSession)
{
if (canceler != null)
{
return false;
}
canceler = AcousticEchoCanceler.create(audioSession);
canceler.setEnabled(true);
return canceler.getEnabled();
}
3)使能/去使能AEC。
public boolean setAECEnabled(boolean enable)
{
if (null == canceler)
{
return false;
}
canceler.setEnabled(enable);
return canceler.getEnabled();
}
4)释放AEC。
public boolean release()
{
if (null == canceler)
{
return false;
}
canceler.setEnabled(false);
canceler.release();
return true;
}
AcousticEchoCanceler的初始化需要一个sessionid,下面简单的备忘下上层的调用方式:
1)初始化AudioRecord的时候需要处理第一个参数。
if (chkNewDev())
{
audioRecord = new AudioRecord(MediaRecorder.AudioSource.VOICE_COMMUNICATION, frequency, channelIN, audioEncoding, tmpSize);
}else
{
audioRecord = new AudioRecord(MediaRecorder.AudioSource.MIC, frequency, channelIN, audioEncoding, tmpSize);
}
2)初始化好audioRecord之后,就可以通过
audioRecord.getAudioSessionId()
获取到相应的sessionid。
3)初始化AudioTrack时,也需要额外的处理sessionid。
if (chkNewDev() && audioRecord != null)
{
audioTrack = new AudioTrack(AudioManager.STREAM_VOICE_CALL, frequency, channelOUT, audioEncoding, tmpSize, AudioTrack.MODE_STREAM,audioRecord.getAudioSessionId());
}
else
{
audioTrack = new AudioTrack(AudioManager.STREAM_VOICE_CALL, frequency, channelOUT, audioEncoding, tmpSize, AudioTrack.MODE_STREAM);
}
另外,由于API的限制,需要考虑机型不匹配的情况:
public static boolean chkNewDev()
{
return android.os.Build.VERSION.SDK_INT >= 16;
}
权限:
<uses-permission android:name="android.permission.RECORD_AUDIO" />
总结:
1)android新版本增加的API AcousticEchoCanceler 可以非常快速的开发出符合VOIP性质的回声消除程序。但是考虑到各种机型适配,仍然需要第三方的回声消除程序。这里主要推荐两个:webrtc里面的AEC/AECM,speex。
作者成功的在项目中使用了webrtc里面的回声消除,感觉效果还可以。
2)作者编写的上述代码也只是根据官方的介绍编写的,由于资料较少,不能保证代码的正确性。
转载
C. android webrtc 回音消除使用audiotrack 合适吗
android webrtc 回音消除使用audiotrack合适。推荐即构科技,产品可选:实时音视频、实时语音、互动直播、IM即时通讯。【点击免费试用,0成本启动】
WebRTC的代码结构布局清晰,在“webrtc\moles\audio_processing\aee”目录下可以找到几个用于回声处理GIPS的AEC源文件。然后主要查找每个AEC源文件所关联的WebRTC代码,就可找出回声处理模块所需要WebRTC相关的源代码文件和头文件,这样就可以将AEC从WebRTC中提取出来单独使用。为方便使用,将需要这些代码分成2个模块,通用音频处理模块webRTC_AUDIO和GIPS-AEC模块。WebRTC—AUDIO模块中包含AEC源文件运行所依赖的WebRTC音频处理相关源文件及头文件,而GIPS—AEC模块则是WebRTC中专门用于回声处理GIPS的AEC源文件。GIPS-AEC模块以WebRTC_AUDIO模块为基础,对回声进行处理。
想要了解更多关于这方面的相关信息,推荐咨询ZEGO即构科技。ZEGO即构科技是一家全球云通讯服务商,专注自研音视频引擎,服务覆盖全球,链接 5 亿终端用户。ZEGO即构科技覆盖212个国家/地区,全球用户体验毫秒级互动,日均通话时长达30亿分钟,跻身云通讯行业头部,全方位行业解决方案,满足百余个业务场景需要,服务客户4000家,70%泛娱乐/在线教育客户的选择。
D. Android开发视频通话怎么实现
/**
* Android视频聊天
* 1、初始化SDK 2、连接服务器、 3、用户登录;4、进入房间;5、打开本地视频;6、请求对方视频
*/
public class VideoChatActivity extends Activity implements AnyChatBaseEvent
{
private AnyChatCoreSDK anychat; // 核心SDK
private SurfaceView remoteSurfaceView; // 对方视频
private SurfaceView localSurfaceView; // 本地视频
private ConfigEntity configEntity;
private boolean bSelfVideoOpened = false; // 本地视频是否已打开
private boolean bOtherVideoOpened = false; // 对方视频是否已打开
private TimerTask mTimerTask; // 定时器
private Timer mTimer = new Timer(true);
private Handler handler; // 用Handler来不间断刷新即时视频
private List<String> userlist = new ArrayList<String>();//保存在线用户列表
private int userid; // 用户ID
@Override
public void onCreate(Bundle savedInstanceState)
{
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_video_chat);
remoteSurfaceView = (SurfaceView) findViewById(R.id.surface_remote);
localSurfaceView = (SurfaceView) findViewById(R.id.surface_local);
configEntity = ConfigService.LoadConfig(this);//加载视频通话设置
loginSystem();// 初始化SDK 连接服务器
mTimerTask = new TimerTask(){
public void run(){
Message mesasge = new Message();
handler.sendMessage(mesasge);
}
};
mTimer.schele(mTimerTask, 1000, 100);
handler = new Handler(){
@Override
public void handleMessage(Message msg){
VideoChat();// 不间断显示即时视频通话画面
super.handleMessage(msg);
}
};
}
// 初始化SDK 连接服务器
private void loginSystem(){
if (anychat == null){
anychat = new AnyChatCoreSDK();
anychat.SetBaseEvent(this); // 设置基本事件回调函数
if (configEntity.useARMv6Lib != 0) // 使用ARMv6指令集
anychat.SetSDKOptionInt(AnyChatDefine.
BRAC_SO_CORESDK_USEARMV6LIB, 1);
anychat.InitSDK(android.os.Build.VERSION.SDK_INT, 0); // 初始化SDK
}
anychat.Connect("demo.anychat.cn", 8906);// 连接服务器
}
// 显示即时视频通话画面
public void VideoChat(){
if (!bOtherVideoOpened){
if (anychat.GetCameraState(userid) == 2
&& anychat.GetUserVideoWidth(userid) != 0){
SurfaceHolder holder = remoteSurfaceView.getHolder();
holder.setFormat(PixelFormat.RGB_565);
holder.setFixedSize(anychat.GetUserVideoWidth(userid),
anychat.GetUserVideoHeight(userid));
Surface s = holder.getSurface(); // 获得视频画面
anychat.SetVideoPos(userid, s, 0, 0, 0, 0); // 调用API显示视频画面
bOtherVideoOpened = true;
}
if (!bSelfVideoOpened){
if (anychat.GetCameraState(-1) == 2
&& anychat.GetUserVideoWidth(-1) != 0){
SurfaceHolder holder = localSurfaceView.getHolder();
holder.setFormat(PixelFormat.RGB_565);
holder.setFixedSize(anychat.GetUserVideoWidth(-1),
anychat.GetUserVideoHeight(-1));
Surface s = holder.getSurface();
anychat.SetVideoPos(-1, s, 0, 0, 0, 0);
bSelfVideoOpened = true;
}
}
}
public void OnAnyChatConnectMessage(boolean bSuccess){
if (!bSuccess){
Toast.makeText(VideoChatActivity.this, "连接服务器失败,自动重连,请稍后...", Toast.LENGTH_SHORT).show();
}
anychat.Login("android", ""); // 服务器连接成功 用户登录
}
public void OnAnyChatLoginMessage(int dwUserId, int dwErrorCode){
if (dwErrorCode == 0) {
Toast.makeText(this, "登录成功!", Toast.LENGTH_SHORT).show();
anychat.EnterRoom(1, ""); // 用户登录成功 进入房间
ApplyVideoConfig();
} else {
Toast.makeText(this, "登录失败,错误代码:" + dwErrorCode, Toast.LENGTH_SHORT).show();
}
}
public void OnAnyChatEnterRoomMessage(int dwRoomId, int dwErrorCode){
if (dwErrorCode == 0) { // 进入房间成功 打开本地音视频
Toast.makeText(this, "进入房间成功", Toast.LENGTH_SHORT).show();
anychat.UserCameraControl(-1, 1); // 打开本地视频
anychat.UserSpeakControl(-1, 1); // 打开本地音频
} else {
Toast.makeText(this, "进入房间失败,错误代码:" + dwErrorCode, Toast.LENGTH_SHORT).show();
}
}
public void OnAnyChatOnlineUserMessage(int dwUserNum, int dwRoomId){
if (dwRoomId == 1){
int user[] = anychat.GetOnlineUser();
if (user.length != 0){
for (int i = 0; i < user.length; i++){
userlist.add(user[i]+"");
. }
String temp =userlist.get(0);
userid = Integer.parseInt(temp);
anychat.UserCameraControl(userid, 1);// 请求用户视频
anychat.UserSpeakControl(userid, 1); // 请求用户音频
}
else {
Toast.makeText(VideoChatActivity.this, "当前没有在线用户", Toast.LENGTH_SHORT).show();
}
}
}
public void OnAnyChatUserAtRoomMessage(int dwUserId, boolean bEnter){
if (bEnter) {//新用户进入房间
userlist.add(dwUserId+"");
}
else { //用户离开房间
if (dwUserId == userid)
{
Toast.makeText(VideoChatActivity.this, "视频用户已下线", Toast.LENGTH_SHORT).show();
anychat.UserCameraControl(userid, 0);// 关闭用户视频
anychat.UserSpeakControl(userid, 0); // 关闭用户音频
userlist.remove(userid+""); //移除该用户
if (userlist.size() != 0)
{
String temp =userlist.get(0);
userid = Integer.parseInt(temp);
anychat.UserCameraControl(userid, 1);// 请求其他用户视频
anychat.UserSpeakControl(userid, 1); // 请求其他用户音频
}
}
141. else {
userlist.remove(dwUserId+""); //移除该用户
}
}
}
public void OnAnyChatLinkCloseMessage(int dwErrorCode){
Toast.makeText(VideoChatActivity.this, "连接关闭,error:" + dwErrorCode, Toast.LENGTH_SHORT).show();
}
@Override
protected void onDestroy(){ //程序退出
anychat.LeaveRoom(-1); //离开房间
anychat.Logout(); //注销登录
anychat.Release(); //释放资源
mTimer.cancel();
super.onDestroy();
}
// 根据配置文件配置视频参数
private void ApplyVideoConfig(){
if (configEntity.configMode == 1) // 自定义视频参数配置
{
// 设置本地视频编码的码率(如果码率为0,则表示使用质量优先模式)
anychat.SetSDKOptionInt(AnyChatDefine.BRAC_SO_LOCALVIDEO_BITRATECTRL,configEntity.videoBitrate);
if (configEntity.videoBitrate == 0)
{
// 设置本地视频编码的质量
anychat.SetSDKOptionInt(AnyChatDefine.BRAC_SO_LOCALVIDEO_QUALITYCTRL,configEntity.videoQuality);
}
// 设置本地视频编码的帧率
anychat.SetSDKOptionInt(AnyChatDefine.BRAC_SO_LOCALVIDEO_FPSCTRL,configEntity.videoFps);
// 设置本地视频编码的关键帧间隔
anychat.SetSDKOptionInt(AnyChatDefine.BRAC_SO_LOCALVIDEO_GOPCTRL,configEntity.videoFps * 4);
// 设置本地视频采集分辨率
anychat.SetSDKOptionInt(AnyChatDefine.BRAC_SO_LOCALVIDEO_WIDTHCTRL,configEntity.resolution_width);
anychat.SetSDKOptionInt(AnyChatDefine.BRAC_SO_LOCALVIDEO_HEIGHTCTRL,configEntity.resolution_height);
// 设置视频编码预设参数(值越大,编码质量越高,占用CPU资源也会越高)
anychat.SetSDKOptionInt(AnyChatDefine.BRAC_SO_LOCALVIDEO_PRESETCTRL,configEntity.videoPreset);
}
// 让视频参数生效
anychat.SetSDKOptionInt(AnyChatDefine.BRAC_SO_LOCALVIDEO_APPLYPARAM,configEntity.configMode);
// P2P设置
anychat.SetSDKOptionInt(AnyChatDefine.BRAC_SO_NETWORK_P2PPOLITIC,configEntity.enableP2P);
// 本地视频Overlay模式设置
anychat.SetSDKOptionInt(AnyChatDefine.BRAC_SO_LOCALVIDEO_OVERLAY,configEntity.videoOverlay);
// 回音消除设置
anychat.SetSDKOptionInt(AnyChatDefine.BRAC_SO_AUDIO_ECHOCTRL,configEntity.enableAEC);
// 平台硬件编码设置
anychat.SetSDKOptionInt(AnyChatDefine.BRAC_SO_CORESDK_USEHWCODEC,configEntity.useHWCodec);
// 视频旋转模式设置
anychat.SetSDKOptionInt(AnyChatDefine.BRAC_SO_LOCALVIDEO_ROTATECTRL,configEntity.videorotatemode);
// 视频平滑播放模式设置
anychat.SetSDKOptionInt(AnyChatDefine.BRAC_SO_STREAM_SMOOTHPLAYMODE,configEntity.smoothPlayMode);
// 视频采集驱动设置
anychat.SetSDKOptionInt(AnyChatDefine.BRAC_SO_LOCALVIDEO_CAPDRIVER,configEntity.videoCapDriver);
// 本地视频采集偏色修正设置
anychat.SetSDKOptionInt(AnyChatDefine.BRAC_SO_LOCALVIDEO_FIXCOLORDEVIA,configEntity.fixcolordeviation);
// 视频显示驱动设置
anychat.SetSDKOptionInt(AnyChatDefine.BRAC_SO_VIDEOSHOW_DRIVERCTRL,configEntity.videoShowDriver);
}
}