WebRTC音视频之优化音频造成的内存增长和增加大小流
Posted hbblzjy
tags:
篇首语:本文由小常识网(cha138.com)小编为大家整理,主要介绍了WebRTC音视频之优化音频造成的内存增长和增加大小流相关的知识,希望对你有一定的参考价值。
1、关于多人通话内存不断增长的问题
在旧版本iPhone上(比如:iPhone6、6s、7),四人及以上通话,造成内存不断增长,使用Instrument工具分析,是因为audioFrame创建过快,再加上人数过多以后,CPU处理能力跟不上,释放audioFrame对象逐渐变慢,造成内存逐步上涨,后来查看了线程等级和使用位置,进行了调整,将原来的AudioEncoder线程的等级从NORMAL提高到了HIGH,视图如下:
其实也可以通过修改其他两个关于video和stream的线程等级达到效果,但是会造成视频画面卡顿,效果不太理想,所以相对于来说,改audio的比较合适。
2、关于video大小流的分辨率更改问题
为了适配多人通话,所以将视频流分成了大小流,即高清晰度和低清晰度,比如场景:设置开启双流的开关,默认订阅远端视频流的小流,当需要清楚查看远端某个人的视频时,可以将此人切换至大流,在具体情况下使用,可以有效的优化多人视频,减少多人会话时的资源消耗。
webrtc_video_engine.cc
std::vector<webrtc::VideoStream> EncoderStreamFactory::CreateEncoderStreams(
int width,
int height,
const webrtc::VideoEncoderConfig& encoder_config) {
RTC_DCHECK_GT(encoder_config.number_of_streams, 0);
RTC_DCHECK_GE(encoder_config.simulcast_layers.size(),
encoder_config.number_of_streams);
std::vector<webrtc::VideoStream> layers;
const absl::optional<webrtc::DataRate> experimental_min_bitrate =
GetExperimentalMinVideoBitrate(encoder_config.codec_type);
if (encoder_config.number_of_streams > 1 ||
((absl::EqualsIgnoreCase(codec_name_, kVp8CodecName) ||
absl::EqualsIgnoreCase(codec_name_, kH264CodecName)) &&
is_screenshare_ && conference_mode_)) {
const bool temporal_layers_supported =
absl::EqualsIgnoreCase(codec_name_, kVp8CodecName) ||
absl::EqualsIgnoreCase(codec_name_, kH264CodecName);
// Use legacy simulcast screenshare if conference mode is explicitly enabled
// or use the regular simulcast configuration path which is generic.
layers = GetSimulcastConfig(encoder_config.number_of_streams, width, height,
encoder_config.bitrate_priority, max_qp_,
is_screenshare_ && conference_mode_,
temporal_layers_supported);
//屏幕共享只保留大流
if(is_screenshare_ && layers.size() == 2) {
layers.pop_back();
}
// Allow an experiment to override the minimum bitrate for the lowest
// spatial layer. The experiment's configuration has the lowest priority.
if (experimental_min_bitrate) {
layers[0].min_bitrate_bps =
rtc::saturated_cast<int>(experimental_min_bitrate->bps());
}
// The maximum |max_framerate| is currently used for video.
const int max_framerate = GetMaxFramerate(encoder_config, layers.size());
RTC_LOG(LS_INFO) << "video encode max_framerate " << max_framerate;
// Update the active simulcast layers and configured bitrates.
bool is_highest_layer_max_bitrate_configured = false;
const bool has_scale_resolution_down_by = absl::c_any_of(
encoder_config.simulcast_layers, [](const webrtc::VideoStream& layer) {
return layer.scale_resolution_down_by != -1.;
});
const int normalized_width =
NormalizeSimulcastSize(width, encoder_config.number_of_streams);
const int normalized_height =
NormalizeSimulcastSize(height, encoder_config.number_of_streams);
for (size_t i = 0; i < layers.size(); ++i) {
layers[i].active = encoder_config.simulcast_layers[i].active;
if (!is_screenshare_) {
// Update simulcast framerates with max configured max framerate.
layers[i].max_framerate = max_framerate;
}
// Update with configured num temporal layers if supported by codec.
if (encoder_config.simulcast_layers[i].num_temporal_layers &&
IsTemporalLayersSupported(codec_name_)) {
layers[i].num_temporal_layers =
*encoder_config.simulcast_layers[i].num_temporal_layers;
}
if (has_scale_resolution_down_by) {
const double scale_resolution_down_by = std::max(
encoder_config.simulcast_layers[i].scale_resolution_down_by, 1.0);
layers[i].width = std::max(
static_cast<int>(normalized_width / scale_resolution_down_by),
kMinLayerSize);
layers[i].height = std::max(
static_cast<int>(normalized_height / scale_resolution_down_by),
kMinLayerSize);
}
// Update simulcast bitrates with configured min and max bitrate.
if (encoder_config.simulcast_layers[i].min_bitrate_bps > 0) {
layers[i].min_bitrate_bps =
encoder_config.simulcast_layers[i].min_bitrate_bps;
}
if (encoder_config.simulcast_layers[i].max_bitrate_bps > 0) {
layers[i].max_bitrate_bps =
encoder_config.simulcast_layers[i].max_bitrate_bps;
}
if (encoder_config.simulcast_layers[i].target_bitrate_bps > 0) {
layers[i].target_bitrate_bps =
encoder_config.simulcast_layers[i].target_bitrate_bps;
}
if (encoder_config.simulcast_layers[i].min_bitrate_bps > 0 &&
encoder_config.simulcast_layers[i].max_bitrate_bps > 0) {
// Min and max bitrate are configured.
// Set target to 3/4 of the max bitrate (or to max if below min).
if (encoder_config.simulcast_layers[i].target_bitrate_bps <= 0)
layers[i].target_bitrate_bps = layers[i].max_bitrate_bps * 3 / 4;
if (layers[i].target_bitrate_bps < layers[i].min_bitrate_bps)
layers[i].target_bitrate_bps = layers[i].max_bitrate_bps;
} else if (encoder_config.simulcast_layers[i].min_bitrate_bps > 0) {
// Only min bitrate is configured, make sure target/max are above min.
layers[i].target_bitrate_bps =
std::max(layers[i].target_bitrate_bps, layers[i].min_bitrate_bps);
layers[i].max_bitrate_bps =
std::max(layers[i].max_bitrate_bps, layers[i].min_bitrate_bps);
} else if (encoder_config.simulcast_layers[i].max_bitrate_bps > 0) {
// Only max bitrate is configured, make sure min/target are below max.
layers[i].min_bitrate_bps =
std::min(layers[i].min_bitrate_bps, layers[i].max_bitrate_bps);
layers[i].target_bitrate_bps =
std::min(layers[i].target_bitrate_bps, layers[i].max_bitrate_bps);
}
if (i == layers.size() - 1) {
is_highest_layer_max_bitrate_configured =
encoder_config.simulcast_layers[i].max_bitrate_bps > 0;
}
if(i == 1){
layers[i].min_bitrate_bps=50*1000;
layers[i].target_bitrate_bps=120*1000;
layers[i].max_bitrate_bps=180*1000;
}
//小流分辨率比例1:1,4:3,16:9,7:4
if(encoder_config.simulcast_layers[i].scale_resolution_down_by==2 && i==1){
if(normalized_width >= 160 || normalized_height >= 160) {
layers[i].width = 160;
layers[i].height = 160;
}else{
layers[i].width = (normalized_width>normalized_height ? normalized_width:normalized_height)/2;
layers[i].height = (normalized_width>normalized_height ? normalized_height:normalized_width)/2;
}
}
else if(encoder_config.simulcast_layers[i].scale_resolution_down_by==3 && i==1){
if(normalized_width >= 120 || normalized_height >= 120) {
layers[i].width = normalized_width>normalized_height?160:120;
layers[i].height = normalized_width>normalized_height?120:160;
}else{
layers[i].width = (normalized_width>normalized_height ? normalized_width:normalized_height)/2;
layers[i].height = (normalized_width>normalized_height ? normalized_height:normalized_width)/2;
}
}else if(encoder_config.simulcast_layers[i].scale_resolution_down_by==4 && i==1){
if(normalized_width >= 160 || normalized_height >= 160) {
layers[i].width = normalized_width>normalized_height?320:180;
layers[i].height = normalized_width>normalized_height?180:320;
}else{
layers[i].width = (normalized_width>normalized_height ? normalized_width:normalized_height)/2;
layers[i].height = (normalized_width>normalized_height ? normalized_height:normalized_width)/2;
}
}else if(encoder_config.simulcast_layers[i].scale_resolution_down_by==5 && i==1){
if(normalized_width >= 120 || normalized_height >= 120) {
layers[i].width = normalized_width>normalized_height?210:120;
layers[i].height = normalized_width>normalized_height?120:210;
}else{
layers[i].width = (normalized_width>normalized_height ? normalized_width:normalized_height)/2;
layers[i].height = (normalized_width>normalized_height ? normalized_height:normalized_width)/2;
}
}
RTC_LOG(LS_INFO) << "video encode normalized_width " << normalized_width << " normalized_height " << normalized_height << " max_fps " << layers[i].max_framerate
<< " min_bitrate_bps " << layers[i].min_bitrate_bps << " target_bitrate_bps " << layers[i].target_bitrate_bps << " max_bitrate_bps " << layers[i].max_bitrate_bps << " layer width " << layers[i].width << " layer height " << layers[i].height;
}
if (!is_screenshare_ && !is_highest_layer_max_bitrate_configured) {
// No application-configured maximum for the largest layer.
// If there is bitrate leftover, give it to the largest layer.
BoostMaxSimulcastLayer(encoder_config.max_bitrate_bps, &layers);
}
return layers;
}
// For unset max bitrates set default bitrate for non-simulcast.
int max_bitrate_bps =
(encoder_config.max_bitrate_bps > 0)
? encoder_config.max_bitrate_bps
: GetMaxDefaultVideoBitrateKbps(width, height, is_screenshare_) *
1000;
int min_bitrate_bps =
experimental_min_bitrate
? rtc::saturated_cast<int>(experimental_min_bitrate->bps())
: webrtc::kDefaultMinVideoBitrateBps;
if (encoder_config.simulcast_layers[0].min_bitrate_bps > 0) {
// Use set min bitrate.
min_bitrate_bps = encoder_config.simulcast_layers[0].min_bitrate_bps;
// If only min bitrate is configured, make sure max is above min.
if (encoder_config.max_bitrate_bps <= 0)
max_bitrate_bps = std::max(min_bitrate_bps, max_bitrate_bps);
}
int max_framerate = (encoder_config.simulcast_layers[0].max_framerate > 0)
? encoder_config.simulcast_layers[0].max_framerate
: kDefaultVideoMaxFramerate;
webrtc::VideoStream layer;
layer.width = width;
layer.height = height;
layer.max_framerate = max_framerate;
if (encoder_config.simulcast_layers[0].scale_resolution_down_by > 1.) {
layer.width = std::max<size_t>(
layer.width /
encoder_config.simulcast_layers[0].scale_resolution_down_by,
kMinLayerSize);
layer.height = std::max<size_t>(
layer.height /
encoder_config.simulcast_layers[0].scale_resolution_down_by,
kMinLayerSize);
}
// In the case that the application sets a max bitrate that's lower than the
// min bitrate, we adjust it down (see bugs.webrtc.org/9141).
layer.min_bitrate_bps = std::min(min_bitrate_bps, max_bitrate_bps);
if (encoder_config.simulcast_layers[0].target_bitrate_bps <= 0) {
layer.target_bitrate_bps = max_bitrate_bps;
} else {
layer.target_bitrate_bps =
encoder_config.simulcast_layers[0].target_bitrate_bps;
}
layer.max_bitrate_bps = max_bitrate_bps;
layer.max_qp = max_qp_;
layer.bitrate_priority = encoder_config.bitrate_priority;
if (absl::EqualsIgnoreCase(codec_name_, kVp9CodecName)) {
RTC_DCHECK(encoder_config.encoder_specific_settings);
// Use VP9 SVC layering from codec settings which might be initialized
// though field trial in ConfigureVideoEncoderSettings.
webrtc::VideoCodecVP9 vp9_settings;
encoder_config.encoder_specific_settings->FillVideoCodecVp9(&vp9_settings);
layer.num_temporal_layers = vp9_settings.numberOfTemporalLayers;
}
if (IsTemporalLayersSupported(codec_name_)) {
// Use configured number of temporal layers if set.
if (encoder_config.simulcast_layers[0].num_temporal_layers) {
layer.num_temporal_layers =
*encoder_config.simulcast_layers[0].num_temporal_layers;
}
}
layers.push_back(layer);
return layers;
}
记录开发过程遇到的疑难病症,以供参考~
以上是关于WebRTC音视频之优化音频造成的内存增长和增加大小流的主要内容,如果未能解决你的问题,请参考以下文章