* Adjust bitrate and fps based on TestDelay messages.
* Bitrate is adjusted every 3 seconds, fps is adjusted every second and when receiving test lag.
* Latency optimized at high resolutions. However, when the network is poor, the delay when just connecting or sliding static pages is still obvious.

Signed-off-by: 21pages <sunboeasy@gmail.com>
This commit is contained in:
21pages 2025-01-20 17:59:36 +08:00 committed by GitHub
parent c44803f5b0
commit 5fa8c25e65
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
10 changed files with 735 additions and 548 deletions

View file

@ -5,7 +5,7 @@ use hbb_common::{
};
use scrap::{
aom::{AomDecoder, AomEncoder, AomEncoderConfig},
codec::{EncoderApi, EncoderCfg, Quality as Q},
codec::{EncoderApi, EncoderCfg},
Capturer, Display, TraitCapturer, VpxDecoder, VpxDecoderConfig, VpxEncoder, VpxEncoderConfig,
VpxVideoCodecId::{self, *},
STRIDE_ALIGN,
@ -27,25 +27,17 @@ Usage:
Options:
-h --help Show this screen.
--count=COUNT Capture frame count [default: 100].
--quality=QUALITY Video quality [default: Balanced].
Valid values: Best, Balanced, Low.
--quality=QUALITY Video quality [default: 1.0].
--i444 I444.
";
#[derive(Debug, serde::Deserialize, Clone, Copy)]
struct Args {
flag_count: usize,
flag_quality: Quality,
flag_quality: f32,
flag_i444: bool,
}
#[derive(Debug, serde::Deserialize, Clone, Copy)]
enum Quality {
Best,
Balanced,
Low,
}
fn main() {
init_from_env(Env::default().filter_or(DEFAULT_FILTER_ENV, "info"));
let args: Args = Docopt::new(USAGE)
@ -70,11 +62,6 @@ fn main() {
"benchmark {}x{} quality:{:?}, i444:{:?}",
width, height, quality, args.flag_i444
);
let quality = match quality {
Quality::Best => Q::Best,
Quality::Balanced => Q::Balanced,
Quality::Low => Q::Low,
};
[VP8, VP9].map(|codec| {
test_vpx(
&mut c,
@ -98,7 +85,7 @@ fn test_vpx(
codec_id: VpxVideoCodecId,
width: usize,
height: usize,
quality: Q,
quality: f32,
yuv_count: usize,
i444: bool,
) {
@ -177,7 +164,7 @@ fn test_av1(
c: &mut Capturer,
width: usize,
height: usize,
quality: Q,
quality: f32,
yuv_count: usize,
i444: bool,
) {
@ -247,7 +234,7 @@ mod hw {
use super::*;
pub fn test(c: &mut Capturer, width: usize, height: usize, quality: Q, yuv_count: usize) {
pub fn test(c: &mut Capturer, width: usize, height: usize, quality: f32, yuv_count: usize) {
let mut h264s = Vec::new();
let mut h265s = Vec::new();
if let Some(info) = HwRamEncoder::try_get(CodecFormat::H264) {
@ -263,7 +250,7 @@ mod hw {
fn test_encoder(
width: usize,
height: usize,
quality: Q,
quality: f32,
info: CodecInfo,
c: &mut Capturer,
yuv_count: usize,

View file

@ -13,7 +13,7 @@ use std::time::{Duration, Instant};
use std::{io, thread};
use docopt::Docopt;
use scrap::codec::{EncoderApi, EncoderCfg, Quality as Q};
use scrap::codec::{EncoderApi, EncoderCfg};
use webm::mux;
use webm::mux::Track;
@ -31,8 +31,7 @@ Options:
-h --help Show this screen.
--time=<s> Recording duration in seconds.
--fps=<fps> Frames per second [default: 30].
--quality=<quality> Video quality [default: Balanced].
Valid values: Best, Balanced, Low.
--quality=<quality> Video quality [default: 1.0].
--ba=<kbps> Audio bitrate in kilobits per second [default: 96].
--codec CODEC Configure the codec used. [default: vp9]
Valid values: vp8, vp9.
@ -44,14 +43,7 @@ struct Args {
flag_codec: Codec,
flag_time: Option<u64>,
flag_fps: u64,
flag_quality: Quality,
}
#[derive(Debug, serde::Deserialize)]
enum Quality {
Best,
Balanced,
Low,
flag_quality: f32,
}
#[derive(Debug, serde::Deserialize)]
@ -105,11 +97,7 @@ fn main() -> io::Result<()> {
let mut vt = webm.add_video_track(width, height, None, mux_codec);
// Setup the encoder.
let quality = match args.flag_quality {
Quality::Best => Q::Best,
Quality::Balanced => Q::Balanced,
Quality::Low => Q::Low,
};
let quality = args.flag_quality;
let mut vpx = vpx_encode::VpxEncoder::new(
EncoderCfg::VPX(vpx_encode::VpxEncoderConfig {
width,

View file

@ -6,7 +6,7 @@
include!(concat!(env!("OUT_DIR"), "/aom_ffi.rs"));
use crate::codec::{base_bitrate, codec_thread_num, Quality};
use crate::codec::{base_bitrate, codec_thread_num};
use crate::{codec::EncoderApi, EncodeFrame, STRIDE_ALIGN};
use crate::{common::GoogleImage, generate_call_macro, generate_call_ptr_macro, Error, Result};
use crate::{EncodeInput, EncodeYuvFormat, Pixfmt};
@ -45,7 +45,7 @@ impl Default for aom_image_t {
pub struct AomEncoderConfig {
pub width: u32,
pub height: u32,
pub quality: Quality,
pub quality: f32,
pub keyframe_interval: Option<usize>,
}
@ -62,15 +62,9 @@ mod webrtc {
use super::*;
const kUsageProfile: u32 = AOM_USAGE_REALTIME;
const kMinQindex: u32 = 145; // Min qindex threshold for QP scaling.
const kMaxQindex: u32 = 205; // Max qindex threshold for QP scaling.
const kBitDepth: u32 = 8;
const kLagInFrames: u32 = 0; // No look ahead.
pub(super) const kTimeBaseDen: i64 = 1000;
const kMinimumFrameRate: f64 = 1.0;
pub const DEFAULT_Q_MAX: u32 = 56; // no more than 63
pub const DEFAULT_Q_MIN: u32 = 12; // no more than 63, litter than q_max
// Only positive speeds, range for real-time coding currently is: 6 - 8.
// Lower means slower/better quality, higher means fastest/lower quality.
@ -116,21 +110,10 @@ mod webrtc {
} else {
c.kf_mode = aom_kf_mode::AOM_KF_DISABLED;
}
let (q_min, q_max, b) = AomEncoder::convert_quality(cfg.quality);
if q_min > 0 && q_min < q_max && q_max < 64 {
c.rc_min_quantizer = q_min;
c.rc_max_quantizer = q_max;
} else {
c.rc_min_quantizer = DEFAULT_Q_MIN;
c.rc_max_quantizer = DEFAULT_Q_MAX;
}
let base_bitrate = base_bitrate(cfg.width as _, cfg.height as _);
let bitrate = base_bitrate * b / 100;
if bitrate > 0 {
c.rc_target_bitrate = bitrate;
} else {
c.rc_target_bitrate = base_bitrate;
}
let (q_min, q_max) = AomEncoder::calc_q_values(cfg.quality);
c.rc_min_quantizer = q_min;
c.rc_max_quantizer = q_max;
c.rc_target_bitrate = AomEncoder::bitrate(cfg.width as _, cfg.height as _, cfg.quality);
c.rc_undershoot_pct = 50;
c.rc_overshoot_pct = 50;
c.rc_buf_initial_sz = 600;
@ -273,17 +256,12 @@ impl EncoderApi for AomEncoder {
false
}
fn set_quality(&mut self, quality: Quality) -> ResultType<()> {
fn set_quality(&mut self, ratio: f32) -> ResultType<()> {
let mut c = unsafe { *self.ctx.config.enc.to_owned() };
let (q_min, q_max, b) = Self::convert_quality(quality);
if q_min > 0 && q_min < q_max && q_max < 64 {
c.rc_min_quantizer = q_min;
c.rc_max_quantizer = q_max;
}
let bitrate = base_bitrate(self.width as _, self.height as _) * b / 100;
if bitrate > 0 {
c.rc_target_bitrate = bitrate;
}
let (q_min, q_max) = Self::calc_q_values(ratio);
c.rc_min_quantizer = q_min;
c.rc_max_quantizer = q_max;
c.rc_target_bitrate = Self::bitrate(self.width as _, self.height as _, ratio);
call_aom!(aom_codec_enc_config_set(&mut self.ctx, &c));
Ok(())
}
@ -293,10 +271,6 @@ impl EncoderApi for AomEncoder {
c.rc_target_bitrate
}
fn support_abr(&self) -> bool {
true
}
fn support_changing_quality(&self) -> bool {
true
}
@ -370,31 +344,27 @@ impl AomEncoder {
}
}
pub fn convert_quality(quality: Quality) -> (u32, u32, u32) {
// we can use lower bitrate for av1
match quality {
Quality::Best => (12, 25, 100),
Quality::Balanced => (12, 35, 100 * 2 / 3),
Quality::Low => (18, 45, 50),
Quality::Custom(b) => {
let (q_min, q_max) = Self::calc_q_values(b);
(q_min, q_max, b)
}
}
fn bitrate(width: u32, height: u32, ratio: f32) -> u32 {
let bitrate = base_bitrate(width, height) as f32;
(bitrate * ratio) as u32
}
#[inline]
fn calc_q_values(b: u32) -> (u32, u32) {
fn calc_q_values(ratio: f32) -> (u32, u32) {
let b = (ratio * 100.0) as u32;
let b = std::cmp::min(b, 200);
let q_min1: i32 = 24;
let q_min1 = 24;
let q_min2 = 5;
let q_max1 = 45;
let q_max2 = 25;
let t = b as f32 / 200.0;
let q_min: u32 = ((1.0 - t) * q_min1 as f32 + t * q_min2 as f32).round() as u32;
let q_max = ((1.0 - t) * q_max1 as f32 + t * q_max2 as f32).round() as u32;
let mut q_min: u32 = ((1.0 - t) * q_min1 as f32 + t * q_min2 as f32).round() as u32;
let mut q_max = ((1.0 - t) * q_max1 as f32 + t * q_max2 as f32).round() as u32;
q_min = q_min.clamp(q_min2, q_min1);
q_max = q_max.clamp(q_max2, q_max1);
(q_min, q_max)
}

View file

@ -62,12 +62,10 @@ pub trait EncoderApi {
#[cfg(feature = "vram")]
fn input_texture(&self) -> bool;
fn set_quality(&mut self, quality: Quality) -> ResultType<()>;
fn set_quality(&mut self, ratio: f32) -> ResultType<()>;
fn bitrate(&self) -> u32;
fn support_abr(&self) -> bool;
fn support_changing_quality(&self) -> bool;
fn latency_free(&self) -> bool;
@ -882,12 +880,16 @@ pub fn enable_directx_capture() -> bool {
)
}
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub const BR_BEST: f32 = 1.5;
pub const BR_BALANCED: f32 = 0.67;
pub const BR_SPEED: f32 = 0.5;
#[derive(Debug, Clone, Copy, PartialEq)]
pub enum Quality {
Best,
Balanced,
Low,
Custom(u32),
Custom(f32),
}
impl Default for Quality {
@ -903,22 +905,59 @@ impl Quality {
_ => false,
}
}
pub fn ratio(&self) -> f32 {
match self {
Quality::Best => BR_BEST,
Quality::Balanced => BR_BALANCED,
Quality::Low => BR_SPEED,
Quality::Custom(v) => *v,
}
}
}
pub fn base_bitrate(width: u32, height: u32) -> u32 {
#[allow(unused_mut)]
let mut base_bitrate = ((width * height) / 1000) as u32; // same as 1.1.9
if base_bitrate == 0 {
base_bitrate = 1920 * 1080 / 1000;
}
const RESOLUTION_PRESETS: &[(u32, u32, u32)] = &[
(640, 480, 400), // VGA, 307k pixels
(800, 600, 500), // SVGA, 480k pixels
(1024, 768, 800), // XGA, 786k pixels
(1280, 720, 1000), // 720p, 921k pixels
(1366, 768, 1100), // HD, 1049k pixels
(1440, 900, 1300), // WXGA+, 1296k pixels
(1600, 900, 1500), // HD+, 1440k pixels
(1920, 1080, 2073), // 1080p, 2073k pixels
(2048, 1080, 2200), // 2K DCI, 2211k pixels
(2560, 1440, 3000), // 2K QHD, 3686k pixels
(3440, 1440, 4000), // UWQHD, 4953k pixels
(3840, 2160, 5000), // 4K UHD, 8294k pixels
(7680, 4320, 12000), // 8K UHD, 33177k pixels
];
let pixels = width * height;
let (preset_pixels, preset_bitrate) = RESOLUTION_PRESETS
.iter()
.map(|(w, h, bitrate)| (w * h, bitrate))
.min_by_key(|(preset_pixels, _)| {
if *preset_pixels >= pixels {
preset_pixels - pixels
} else {
pixels - preset_pixels
}
})
.unwrap_or(((1920 * 1080) as u32, &2073)); // default 1080p
let bitrate = (*preset_bitrate as f32 * (pixels as f32 / preset_pixels as f32)).round() as u32;
#[cfg(target_os = "android")]
{
// fix when android screen shrinks
let fix = crate::Display::fix_quality() as u32;
log::debug!("Android screen, fix quality:{}", fix);
base_bitrate = base_bitrate * fix;
bitrate * fix
}
#[cfg(not(target_os = "android"))]
{
bitrate
}
base_bitrate
}
pub fn codec_thread_num(limit: usize) -> usize {
@ -1001,8 +1040,7 @@ pub fn test_av1() {
static ONCE: Once = Once::new();
ONCE.call_once(|| {
let f = || {
let (width, height, quality, keyframe_interval, i444) =
(1920, 1080, Quality::Balanced, None, false);
let (width, height, quality, keyframe_interval, i444) = (1920, 1080, 1.0, None, false);
let frame_count = 10;
let block_size = 300;
let move_step = 50;

View file

@ -1,7 +1,5 @@
use crate::{
codec::{
base_bitrate, codec_thread_num, enable_hwcodec_option, EncoderApi, EncoderCfg, Quality as Q,
},
codec::{base_bitrate, codec_thread_num, enable_hwcodec_option, EncoderApi, EncoderCfg},
convert::*,
CodecFormat, EncodeInput, ImageFormat, ImageRgb, Pixfmt, HW_STRIDE_ALIGN,
};
@ -47,7 +45,7 @@ pub struct HwRamEncoderConfig {
pub mc_name: Option<String>,
pub width: usize,
pub height: usize,
pub quality: Q,
pub quality: f32,
pub keyframe_interval: Option<usize>,
}
@ -67,12 +65,8 @@ impl EncoderApi for HwRamEncoder {
match cfg {
EncoderCfg::HWRAM(config) => {
let rc = Self::rate_control(&config);
let b = Self::convert_quality(&config.name, config.quality);
let base_bitrate = base_bitrate(config.width as _, config.height as _);
let mut bitrate = base_bitrate * b / 100;
if bitrate <= 0 {
bitrate = base_bitrate;
}
let mut bitrate =
Self::bitrate(&config.name, config.width, config.height, config.quality);
bitrate = Self::check_bitrate_range(&config, bitrate);
let gop = config.keyframe_interval.unwrap_or(DEFAULT_GOP as _) as i32;
let ctx = EncodeContext {
@ -176,15 +170,19 @@ impl EncoderApi for HwRamEncoder {
false
}
fn set_quality(&mut self, quality: crate::codec::Quality) -> ResultType<()> {
let b = Self::convert_quality(&self.config.name, quality);
let mut bitrate = base_bitrate(self.config.width as _, self.config.height as _) * b / 100;
fn set_quality(&mut self, ratio: f32) -> ResultType<()> {
let mut bitrate = Self::bitrate(
&self.config.name,
self.config.width,
self.config.height,
ratio,
);
if bitrate > 0 {
bitrate = Self::check_bitrate_range(&self.config, bitrate);
self.encoder.set_bitrate(bitrate as _).ok();
self.bitrate = bitrate;
}
self.config.quality = quality;
self.config.quality = ratio;
Ok(())
}
@ -192,10 +190,6 @@ impl EncoderApi for HwRamEncoder {
self.bitrate
}
fn support_abr(&self) -> bool {
["vaapi"].iter().all(|&x| !self.config.name.contains(x))
}
fn support_changing_quality(&self) -> bool {
["vaapi"].iter().all(|&x| !self.config.name.contains(x))
}
@ -254,21 +248,35 @@ impl HwRamEncoder {
RC_CBR
}
pub fn convert_quality(name: &str, quality: crate::codec::Quality) -> u32 {
use crate::codec::Quality;
let quality = match quality {
Quality::Best => 150,
Quality::Balanced => 100,
Quality::Low => 50,
Quality::Custom(b) => b,
};
let factor = if name.contains("mediacodec") {
pub fn bitrate(name: &str, width: usize, height: usize, ratio: f32) -> u32 {
Self::calc_bitrate(width, height, ratio, name.contains("h264"))
}
pub fn calc_bitrate(width: usize, height: usize, ratio: f32, h264: bool) -> u32 {
let base = base_bitrate(width as _, height as _) as f32 * ratio;
let threshold = 2000.0;
let decay_rate = 0.001; // 1000 * 0.001 = 1
let factor: f32 = if cfg!(target_os = "android") {
// https://stackoverflow.com/questions/26110337/what-are-valid-bit-rates-to-set-for-mediacodec?rq=3
5
if base > threshold {
1.0 + 4.0 / (1.0 + (base - threshold) * decay_rate)
} else {
5.0
}
} else if h264 {
if base > threshold {
1.0 + 1.0 / (1.0 + (base - threshold) * decay_rate)
} else {
2.0
}
} else {
1
if base > threshold {
1.0 + 0.5 / (1.0 + (base - threshold) * decay_rate)
} else {
1.5
}
};
quality * factor
(base * factor) as u32
}
pub fn check_bitrate_range(_config: &HwRamEncoderConfig, bitrate: u32) -> u32 {

View file

@ -1,13 +1,14 @@
// https://github.com/astraw/vpx-encode
// https://github.com/astraw/env-libvpx-sys
// https://github.com/rust-av/vpx-rs/blob/master/src/decoder.rs
// https://github.com/chromium/chromium/blob/e7b24573bc2e06fed4749dd6b6abfce67f29052f/media/video/vpx_video_encoder.cc#L522
use hbb_common::anyhow::{anyhow, Context};
use hbb_common::log;
use hbb_common::message_proto::{Chroma, EncodedVideoFrame, EncodedVideoFrames, VideoFrame};
use hbb_common::ResultType;
use crate::codec::{base_bitrate, codec_thread_num, EncoderApi, Quality};
use crate::codec::{base_bitrate, codec_thread_num, EncoderApi};
use crate::{EncodeInput, EncodeYuvFormat, GoogleImage, Pixfmt, STRIDE_ALIGN};
use super::vpx::{vp8e_enc_control_id::*, vpx_codec_err_t::*, *};
@ -19,9 +20,6 @@ use std::{ptr, slice};
generate_call_macro!(call_vpx, false);
generate_call_ptr_macro!(call_vpx_ptr);
const DEFAULT_QP_MAX: u32 = 56; // no more than 63
const DEFAULT_QP_MIN: u32 = 12; // no more than 63
#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)]
pub enum VpxVideoCodecId {
VP8,
@ -85,21 +83,11 @@ impl EncoderApi for VpxEncoder {
c.kf_mode = vpx_kf_mode::VPX_KF_DISABLED; // reduce bandwidth a lot
}
let (q_min, q_max, b) = Self::convert_quality(config.quality);
if q_min > 0 && q_min < q_max && q_max < 64 {
c.rc_min_quantizer = q_min;
c.rc_max_quantizer = q_max;
} else {
c.rc_min_quantizer = DEFAULT_QP_MIN;
c.rc_max_quantizer = DEFAULT_QP_MAX;
}
let base_bitrate = base_bitrate(config.width as _, config.height as _);
let bitrate = base_bitrate * b / 100;
if bitrate > 0 {
c.rc_target_bitrate = bitrate;
} else {
c.rc_target_bitrate = base_bitrate;
}
let (q_min, q_max) = Self::calc_q_values(config.quality);
c.rc_min_quantizer = q_min;
c.rc_max_quantizer = q_max;
c.rc_target_bitrate =
Self::bitrate(config.width as _, config.height as _, config.quality);
// https://chromium.googlesource.com/webm/libvpx/+/refs/heads/main/vp9/common/vp9_enums.h#29
// https://chromium.googlesource.com/webm/libvpx/+/refs/heads/main/vp8/vp8_cx_iface.c#282
c.g_profile = if i444 && config.codec == VpxVideoCodecId::VP9 {
@ -212,17 +200,12 @@ impl EncoderApi for VpxEncoder {
false
}
fn set_quality(&mut self, quality: Quality) -> ResultType<()> {
fn set_quality(&mut self, ratio: f32) -> ResultType<()> {
let mut c = unsafe { *self.ctx.config.enc.to_owned() };
let (q_min, q_max, b) = Self::convert_quality(quality);
if q_min > 0 && q_min < q_max && q_max < 64 {
c.rc_min_quantizer = q_min;
c.rc_max_quantizer = q_max;
}
let bitrate = base_bitrate(self.width as _, self.height as _) * b / 100;
if bitrate > 0 {
c.rc_target_bitrate = bitrate;
}
let (q_min, q_max) = Self::calc_q_values(ratio);
c.rc_min_quantizer = q_min;
c.rc_max_quantizer = q_max;
c.rc_target_bitrate = Self::bitrate(self.width as _, self.height as _, ratio);
call_vpx!(vpx_codec_enc_config_set(&mut self.ctx, &c));
Ok(())
}
@ -232,9 +215,6 @@ impl EncoderApi for VpxEncoder {
c.rc_target_bitrate
}
fn support_abr(&self) -> bool {
true
}
fn support_changing_quality(&self) -> bool {
true
}
@ -331,30 +311,27 @@ impl VpxEncoder {
}
}
fn convert_quality(quality: Quality) -> (u32, u32, u32) {
match quality {
Quality::Best => (6, 45, 150),
Quality::Balanced => (12, 56, 100 * 2 / 3),
Quality::Low => (18, 56, 50),
Quality::Custom(b) => {
let (q_min, q_max) = Self::calc_q_values(b);
(q_min, q_max, b)
}
}
fn bitrate(width: u32, height: u32, ratio: f32) -> u32 {
let bitrate = base_bitrate(width, height) as f32;
(bitrate * ratio) as u32
}
#[inline]
fn calc_q_values(b: u32) -> (u32, u32) {
fn calc_q_values(ratio: f32) -> (u32, u32) {
let b = (ratio * 100.0) as u32;
let b = std::cmp::min(b, 200);
let q_min1: i32 = 36;
let q_min1 = 36;
let q_min2 = 0;
let q_max1 = 56;
let q_max2 = 37;
let t = b as f32 / 200.0;
let q_min: u32 = ((1.0 - t) * q_min1 as f32 + t * q_min2 as f32).round() as u32;
let q_max = ((1.0 - t) * q_max1 as f32 + t * q_max2 as f32).round() as u32;
let mut q_min: u32 = ((1.0 - t) * q_min1 as f32 + t * q_min2 as f32).round() as u32;
let mut q_max = ((1.0 - t) * q_max1 as f32 + t * q_max2 as f32).round() as u32;
q_min = q_min.clamp(q_min2, q_min1);
q_max = q_max.clamp(q_max2, q_max1);
(q_min, q_max)
}
@ -415,8 +392,8 @@ pub struct VpxEncoderConfig {
pub width: c_uint,
/// The height (in pixels).
pub height: c_uint,
/// The image quality
pub quality: Quality,
/// The bitrate ratio
pub quality: f32,
/// The codec
pub codec: VpxVideoCodecId,
/// keyframe interval

View file

@ -5,7 +5,7 @@ use std::{
};
use crate::{
codec::{base_bitrate, enable_vram_option, EncoderApi, EncoderCfg, Quality},
codec::{base_bitrate, enable_vram_option, EncoderApi, EncoderCfg},
hwcodec::HwCodecConfig,
AdapterDevice, CodecFormat, EncodeInput, EncodeYuvFormat, Pixfmt,
};
@ -17,7 +17,7 @@ use hbb_common::{
ResultType,
};
use hwcodec::{
common::{AdapterVendor::*, DataFormat, Driver, MAX_GOP},
common::{DataFormat, Driver, MAX_GOP},
vram::{
decode::{self, DecodeFrame, Decoder},
encode::{self, EncodeFrame, Encoder},
@ -39,7 +39,7 @@ pub struct VRamEncoderConfig {
pub device: AdapterDevice,
pub width: usize,
pub height: usize,
pub quality: Quality,
pub quality: f32,
pub feature: FeatureContext,
pub keyframe_interval: Option<usize>,
}
@ -51,7 +51,6 @@ pub struct VRamEncoder {
bitrate: u32,
last_frame_len: usize,
same_bad_len_counter: usize,
config: VRamEncoderConfig,
}
impl EncoderApi for VRamEncoder {
@ -61,12 +60,12 @@ impl EncoderApi for VRamEncoder {
{
match cfg {
EncoderCfg::VRAM(config) => {
let b = Self::convert_quality(config.quality, &config.feature);
let base_bitrate = base_bitrate(config.width as _, config.height as _);
let mut bitrate = base_bitrate * b / 100;
if bitrate <= 0 {
bitrate = base_bitrate;
}
let bitrate = Self::bitrate(
config.feature.data_format,
config.width,
config.height,
config.quality,
);
let gop = config.keyframe_interval.unwrap_or(MAX_GOP as _) as i32;
let ctx = EncodeContext {
f: config.feature.clone(),
@ -87,7 +86,6 @@ impl EncoderApi for VRamEncoder {
bitrate,
last_frame_len: 0,
same_bad_len_counter: 0,
config,
}),
Err(_) => Err(anyhow!(format!("Failed to create encoder"))),
}
@ -172,9 +170,13 @@ impl EncoderApi for VRamEncoder {
true
}
fn set_quality(&mut self, quality: Quality) -> ResultType<()> {
let b = Self::convert_quality(quality, &self.ctx.f);
let bitrate = base_bitrate(self.ctx.d.width as _, self.ctx.d.height as _) * b / 100;
fn set_quality(&mut self, ratio: f32) -> ResultType<()> {
let bitrate = Self::bitrate(
self.ctx.f.data_format,
self.ctx.d.width as _,
self.ctx.d.height as _,
ratio,
);
if bitrate > 0 {
if self.encoder.set_bitrate((bitrate) as _).is_ok() {
self.bitrate = bitrate;
@ -187,10 +189,6 @@ impl EncoderApi for VRamEncoder {
self.bitrate
}
fn support_abr(&self) -> bool {
self.config.device.vendor_id != ADAPTER_VENDOR_INTEL as u32
}
fn support_changing_quality(&self) -> bool {
true
}
@ -285,31 +283,8 @@ impl VRamEncoder {
}
}
pub fn convert_quality(quality: Quality, f: &FeatureContext) -> u32 {
match quality {
Quality::Best => {
if f.driver == Driver::MFX && f.data_format == DataFormat::H264 {
200
} else {
150
}
}
Quality::Balanced => {
if f.driver == Driver::MFX && f.data_format == DataFormat::H264 {
150
} else {
100
}
}
Quality::Low => {
if f.driver == Driver::MFX && f.data_format == DataFormat::H264 {
75
} else {
50
}
}
Quality::Custom(b) => b,
}
pub fn bitrate(fmt: DataFormat, width: usize, height: usize, ratio: f32) -> u32 {
crate::hwcodec::HwRamEncoder::calc_bitrate(width, height, ratio, fmt == DataFormat::H264)
}
pub fn set_not_use(display: usize, not_use: bool) {

View file

@ -228,7 +228,6 @@ pub struct Connection {
#[cfg(target_os = "linux")]
linux_headless_handle: LinuxHeadlessHandle,
closed: bool,
delay_response_instant: Instant,
#[cfg(not(any(target_os = "android", target_os = "ios")))]
start_cm_ipc_para: Option<StartCmIpcPara>,
auto_disconnect_timer: Option<(Instant, u64)>,
@ -376,7 +375,6 @@ impl Connection {
#[cfg(target_os = "linux")]
linux_headless_handle,
closed: false,
delay_response_instant: Instant::now(),
#[cfg(not(any(target_os = "android", target_os = "ios")))]
start_cm_ipc_para: Some(StartCmIpcPara {
rx_to_cm,
@ -736,7 +734,11 @@ impl Connection {
});
conn.send(msg_out.into()).await;
}
video_service::VIDEO_QOS.lock().unwrap().user_delay_response_elapsed(conn.inner.id(), conn.delay_response_instant.elapsed().as_millis());
if conn.is_authed_remote_conn() {
if let Some(last_test_delay) = conn.last_test_delay {
video_service::VIDEO_QOS.lock().unwrap().user_delay_response_elapsed(id, last_test_delay.elapsed().as_millis());
}
}
}
}
}
@ -1877,7 +1879,6 @@ impl Connection {
.user_network_delay(self.inner.id(), new_delay);
self.network_delay = new_delay;
}
self.delay_response_instant = Instant::now();
}
} else if let Some(message::Union::SwitchSidesResponse(_s)) = msg.union {
#[cfg(feature = "flutter")]
@ -3322,6 +3323,13 @@ impl Connection {
session_id: self.lr.session_id,
}
}
fn is_authed_remote_conn(&self) -> bool {
if let Some(id) = self.authed_conn_id.as_ref() {
return id.conn_type() == AuthConnType::Remote;
}
false
}
}
pub fn insert_switch_sides_uuid(id: String, uuid: uuid::Uuid) {
@ -3809,10 +3817,6 @@ mod raii {
fn drop(&mut self) {
let mut active_conns_lock = ALIVE_CONNS.lock().unwrap();
active_conns_lock.retain(|&c| c != self.0);
video_service::VIDEO_QOS
.lock()
.unwrap()
.on_connection_close(self.0);
}
}
@ -3830,6 +3834,12 @@ mod raii {
_ONCE.call_once(|| {
shutdown_hooks::add_shutdown_hook(connection_shutdown_hook);
});
if conn_type == AuthConnType::Remote {
video_service::VIDEO_QOS
.lock()
.unwrap()
.on_connection_open(conn_id);
}
Self(conn_id, conn_type)
}
@ -3927,12 +3937,20 @@ mod raii {
);
}
}
pub fn conn_type(&self) -> AuthConnType {
self.1
}
}
impl Drop for AuthedConnID {
fn drop(&mut self) {
if self.1 == AuthConnType::Remote {
scrap::codec::Encoder::update(scrap::codec::EncodingUpdate::Remove(self.0));
video_service::VIDEO_QOS
.lock()
.unwrap()
.on_connection_close(self.0);
}
AUTHED_CONNS.lock().unwrap().retain(|c| c.0 != self.0);
let remote_count = AUTHED_CONNS

View file

@ -1,287 +1,222 @@
use super::*;
use scrap::codec::Quality;
use std::time::Duration;
use scrap::codec::{Quality, BR_BALANCED, BR_BEST, BR_SPEED};
use std::{
collections::VecDeque,
time::{Duration, Instant},
};
/*
FPS adjust:
a. new user connected =>set to INIT_FPS
b. TestDelay receive => update user's fps according to network delay
When network delay < DELAY_THRESHOLD_150MS, set minimum fps according to image quality, and increase fps;
When network delay >= DELAY_THRESHOLD_150MS, set minimum fps according to image quality, and decrease fps;
c. second timeout / TestDelay receive => update real fps to the minimum fps from all users
ratio adjust:
a. user set image quality => update to the maximum ratio of the latest quality
b. 3 seconds timeout => update ratio according to network delay
When network delay < DELAY_THRESHOLD_150MS, increase ratio, max 150kbps;
When network delay >= DELAY_THRESHOLD_150MS, decrease ratio;
adjust betwen FPS and ratio:
When network delay < DELAY_THRESHOLD_150MS, fps is always higher than the minimum fps, and ratio is increasing;
When network delay >= DELAY_THRESHOLD_150MS, fps is always lower than the minimum fps, and ratio is decreasing;
delay:
use delay minus RTT as the actual network delay
*/
// Constants
pub const FPS: u32 = 30;
pub const MIN_FPS: u32 = 1;
pub const MAX_FPS: u32 = 120;
trait Percent {
fn as_percent(&self) -> u32;
pub const INIT_FPS: u32 = 15;
// Bitrate ratio constants for different quality levels
const BR_MAX: f32 = 40.0; // 2000 * 2 / 100
const BR_MIN: f32 = 0.2;
const BR_MIN_HIGH_RESOLUTION: f32 = 0.1; // For high resolution, BR_MIN is still too high, so we set a lower limit
const MAX_BR_MULTIPLE: f32 = 1.0;
const HISTORY_DELAY_LEN: usize = 2;
const ADJUST_RATIO_INTERVAL: usize = 3; // Adjust quality ratio every 3 seconds
const DYNAMIC_SCREEN_THRESHOLD: usize = 2; // Allow increase quality ratio if encode more than 2 times in one second
const DELAY_THRESHOLD_150MS: u32 = 150; // 150ms is the threshold for good network condition
#[derive(Default, Debug, Clone)]
struct UserDelay {
response_delayed: bool,
delay_history: VecDeque<u32>,
fps: Option<u32>,
rtt_calculator: RttCalculator,
quick_increase_fps_count: usize,
increase_fps_count: usize,
}
impl Percent for ImageQuality {
fn as_percent(&self) -> u32 {
match self {
ImageQuality::NotSet => 0,
ImageQuality::Low => 50,
ImageQuality::Balanced => 66,
ImageQuality::Best => 100,
impl UserDelay {
fn add_delay(&mut self, delay: u32) {
self.rtt_calculator.update(delay);
if self.delay_history.len() > HISTORY_DELAY_LEN {
self.delay_history.pop_front();
}
self.delay_history.push_back(delay);
}
// Average delay minus RTT
fn avg_delay(&self) -> u32 {
let len = self.delay_history.len();
if len > 0 {
let avg_delay = self.delay_history.iter().sum::<u32>() / len as u32;
// If RTT is available, subtract it from average delay to get actual network latency
if let Some(rtt) = self.rtt_calculator.get_rtt() {
if avg_delay > rtt {
avg_delay - rtt
} else {
avg_delay
}
} else {
avg_delay
}
} else {
DELAY_THRESHOLD_150MS
}
}
}
#[derive(Default, Debug, Copy, Clone)]
struct Delay {
state: DelayState,
staging_state: DelayState,
delay: u32,
counter: u32,
slower_than_old_state: Option<bool>,
}
#[derive(Default, Debug, Copy, Clone)]
// User session data structure
#[derive(Default, Debug, Clone)]
struct UserData {
auto_adjust_fps: Option<u32>, // reserve for compatibility
custom_fps: Option<u32>,
quality: Option<(i64, Quality)>, // (time, quality)
delay: Option<Delay>,
response_delayed: bool,
delay: UserDelay,
record: bool,
}
#[derive(Default, Debug, Clone)]
struct DisplayData {
send_counter: usize, // Number of times encode during period
support_changing_quality: bool,
}
// Main QoS controller structure
pub struct VideoQoS {
fps: u32,
quality: Quality,
ratio: f32,
users: HashMap<i32, UserData>,
displays: HashMap<usize, DisplayData>,
bitrate_store: u32,
support_abr: HashMap<usize, bool>,
}
#[derive(PartialEq, Debug, Clone, Copy)]
enum DelayState {
Normal = 0,
LowDelay = 200,
HighDelay = 500,
Broken = 1000,
}
impl Default for DelayState {
fn default() -> Self {
DelayState::Normal
}
}
impl DelayState {
fn from_delay(delay: u32) -> Self {
if delay > DelayState::Broken as u32 {
DelayState::Broken
} else if delay > DelayState::HighDelay as u32 {
DelayState::HighDelay
} else if delay > DelayState::LowDelay as u32 {
DelayState::LowDelay
} else {
DelayState::Normal
}
}
adjust_ratio_instant: Instant,
abr_config: bool,
new_user_instant: Instant,
}
impl Default for VideoQoS {
fn default() -> Self {
VideoQoS {
fps: FPS,
quality: Default::default(),
ratio: 1.0,
users: Default::default(),
displays: Default::default(),
bitrate_store: 0,
support_abr: Default::default(),
adjust_ratio_instant: Instant::now(),
abr_config: true,
new_user_instant: Instant::now(),
}
}
}
#[derive(Debug, PartialEq, Eq)]
pub enum RefreshType {
SetImageQuality,
}
// Basic functionality
impl VideoQoS {
// Calculate seconds per frame based on current FPS
pub fn spf(&self) -> Duration {
Duration::from_secs_f32(1. / (self.fps() as f32))
}
// Get current FPS within valid range
pub fn fps(&self) -> u32 {
if self.fps >= MIN_FPS && self.fps <= MAX_FPS {
self.fps
let fps = self.fps;
if fps >= MIN_FPS && fps <= MAX_FPS {
fps
} else {
FPS
}
}
// Store bitrate for later use
pub fn store_bitrate(&mut self, bitrate: u32) {
self.bitrate_store = bitrate;
}
// Get stored bitrate
pub fn bitrate(&self) -> u32 {
self.bitrate_store
}
pub fn quality(&self) -> Quality {
self.quality
// Get current bitrate ratio with bounds checking
pub fn ratio(&mut self) -> f32 {
if self.ratio < BR_MIN_HIGH_RESOLUTION || self.ratio > BR_MAX {
self.ratio = BR_BALANCED;
}
self.ratio
}
// Check if any user is in recording mode
pub fn record(&self) -> bool {
self.users.iter().any(|u| u.1.record)
}
pub fn set_support_abr(&mut self, display_idx: usize, support: bool) {
self.support_abr.insert(display_idx, support);
pub fn set_support_changing_quality(&mut self, display_idx: usize, support: bool) {
if let Some(display) = self.displays.get_mut(&display_idx) {
display.support_changing_quality = support;
}
}
// Check if variable bitrate encoding is supported and enabled
pub fn in_vbr_state(&self) -> bool {
Config::get_option("enable-abr") != "N" && self.support_abr.iter().all(|e| *e.1)
self.abr_config && self.displays.iter().all(|e| e.1.support_changing_quality)
}
}
// User session management
impl VideoQoS {
// Initialize new user session
pub fn on_connection_open(&mut self, id: i32) {
self.users.insert(id, UserData::default());
self.abr_config = Config::get_option("enable-abr") != "N";
self.new_user_instant = Instant::now();
}
pub fn refresh(&mut self, typ: Option<RefreshType>) {
// fps
let user_fps = |u: &UserData| {
// custom_fps
let mut fps = u.custom_fps.unwrap_or(FPS);
// auto adjust fps
if let Some(auto_adjust_fps) = u.auto_adjust_fps {
if fps == 0 || auto_adjust_fps < fps {
fps = auto_adjust_fps;
}
}
// delay
if let Some(delay) = u.delay {
fps = match delay.state {
DelayState::Normal => fps,
DelayState::LowDelay => fps * 3 / 4,
DelayState::HighDelay => fps / 2,
DelayState::Broken => fps / 4,
}
}
// delay response
if u.response_delayed {
if fps > MIN_FPS + 2 {
fps = MIN_FPS + 2;
}
}
return fps;
};
let mut fps = self
.users
.iter()
.map(|(_, u)| user_fps(u))
.filter(|u| *u >= MIN_FPS)
.min()
.unwrap_or(FPS);
if fps > MAX_FPS {
fps = MAX_FPS;
// Clean up user session
pub fn on_connection_close(&mut self, id: i32) {
self.users.remove(&id);
if self.users.is_empty() {
*self = Default::default();
}
self.fps = fps;
// quality
// latest image quality
let latest_quality = self
.users
.iter()
.map(|(_, u)| u.quality)
.filter(|q| *q != None)
.max_by(|a, b| a.unwrap_or_default().0.cmp(&b.unwrap_or_default().0))
.unwrap_or_default()
.unwrap_or_default()
.1;
let mut quality = latest_quality;
// network delay
let abr_enabled = self.in_vbr_state();
if abr_enabled && typ != Some(RefreshType::SetImageQuality) {
// max delay
let delay = self
.users
.iter()
.map(|u| u.1.delay)
.filter(|d| d.is_some())
.max_by(|a, b| {
(a.unwrap_or_default().state as u32).cmp(&(b.unwrap_or_default().state as u32))
});
let delay = delay.unwrap_or_default().unwrap_or_default().state;
if delay != DelayState::Normal {
match self.quality {
Quality::Best => {
quality = if delay == DelayState::Broken {
Quality::Low
} else {
Quality::Balanced
};
}
Quality::Balanced => {
quality = Quality::Low;
}
Quality::Low => {
quality = Quality::Low;
}
Quality::Custom(b) => match delay {
DelayState::LowDelay => {
quality =
Quality::Custom(if b >= 150 { 100 } else { std::cmp::min(50, b) });
}
DelayState::HighDelay => {
quality =
Quality::Custom(if b >= 100 { 50 } else { std::cmp::min(25, b) });
}
DelayState::Broken => {
quality =
Quality::Custom(if b >= 50 { 25 } else { std::cmp::min(10, b) });
}
DelayState::Normal => {}
},
}
} else {
match self.quality {
Quality::Low => {
if latest_quality == Quality::Best {
quality = Quality::Balanced;
}
}
Quality::Custom(current_b) => {
if let Quality::Custom(latest_b) = latest_quality {
if current_b < latest_b / 2 {
quality = Quality::Custom(latest_b / 2);
}
}
}
_ => {}
}
}
}
self.quality = quality;
}
pub fn user_custom_fps(&mut self, id: i32, fps: u32) {
if fps < MIN_FPS {
if fps < MIN_FPS || fps > MAX_FPS {
return;
}
if let Some(user) = self.users.get_mut(&id) {
user.custom_fps = Some(fps);
} else {
self.users.insert(
id,
UserData {
custom_fps: Some(fps),
..Default::default()
},
);
}
self.refresh(None);
}
pub fn user_auto_adjust_fps(&mut self, id: i32, fps: u32) {
if fps < MIN_FPS || fps > MAX_FPS {
return;
}
if let Some(user) = self.users.get_mut(&id) {
user.auto_adjust_fps = Some(fps);
} else {
self.users.insert(
id,
UserData {
auto_adjust_fps: Some(fps),
..Default::default()
},
);
}
self.refresh(None);
}
pub fn user_image_quality(&mut self, id: i32, image_quality: i32) {
// https://github.com/rustdesk/rustdesk/blob/d716e2b40c38737f1aa3f16de0dec67394a6ac68/src/server/video_service.rs#L493
let convert_quality = |q: i32| {
let convert_quality = |q: i32| -> Quality {
if q == ImageQuality::Balanced.value() {
Quality::Balanced
} else if q == ImageQuality::Low.value() {
@ -289,92 +224,16 @@ impl VideoQoS {
} else if q == ImageQuality::Best.value() {
Quality::Best
} else {
let mut b = (q >> 8 & 0xFFF) * 2;
b = std::cmp::max(b, 20);
b = std::cmp::min(b, 8000);
Quality::Custom(b as u32)
let b = ((q >> 8 & 0xFFF) * 2) as f32 / 100.0;
Quality::Custom(b.clamp(BR_MIN, BR_MAX))
}
};
let quality = Some((hbb_common::get_time(), convert_quality(image_quality)));
if let Some(user) = self.users.get_mut(&id) {
user.quality = quality;
} else {
self.users.insert(
id,
UserData {
quality,
..Default::default()
},
);
}
self.refresh(Some(RefreshType::SetImageQuality));
}
pub fn user_network_delay(&mut self, id: i32, delay: u32) {
let state = DelayState::from_delay(delay);
let debounce = 3;
if let Some(user) = self.users.get_mut(&id) {
if let Some(d) = &mut user.delay {
d.delay = (delay + d.delay) / 2;
let new_state = DelayState::from_delay(d.delay);
let slower_than_old_state = new_state as i32 - d.staging_state as i32;
let slower_than_old_state = if slower_than_old_state > 0 {
Some(true)
} else if slower_than_old_state < 0 {
Some(false)
} else {
None
};
if d.slower_than_old_state == slower_than_old_state {
let old_counter = d.counter;
d.counter += delay / 1000 + 1;
if old_counter < debounce && d.counter >= debounce {
d.counter = 0;
d.state = d.staging_state;
d.staging_state = new_state;
}
if d.counter % debounce == 0 {
self.refresh(None);
}
} else {
d.counter = 0;
d.staging_state = new_state;
d.slower_than_old_state = slower_than_old_state;
}
} else {
user.delay = Some(Delay {
state: DelayState::Normal,
staging_state: state,
delay,
counter: 0,
slower_than_old_state: None,
});
}
} else {
self.users.insert(
id,
UserData {
delay: Some(Delay {
state: DelayState::Normal,
staging_state: state,
delay,
counter: 0,
slower_than_old_state: None,
}),
..Default::default()
},
);
}
}
pub fn user_delay_response_elapsed(&mut self, id: i32, elapsed: u128) {
if let Some(user) = self.users.get_mut(&id) {
let old = user.response_delayed;
user.response_delayed = elapsed > 3000;
if old != user.response_delayed {
self.refresh(None);
}
// update ratio directly
self.ratio = self.latest_quality().ratio();
}
}
@ -384,8 +243,348 @@ impl VideoQoS {
}
}
pub fn on_connection_close(&mut self, id: i32) {
self.users.remove(&id);
self.refresh(None);
pub fn user_network_delay(&mut self, id: i32, delay: u32) {
let highest_fps = self.highest_fps();
let target_ratio = self.latest_quality().ratio();
// For bad network, small fps means quick reaction and high quality
let (min_fps, normal_fps) = if target_ratio >= BR_BEST {
(8, 16)
} else if target_ratio >= BR_BALANCED {
(10, 20)
} else {
(12, 24)
};
// Calculate minimum acceptable delay-fps product
let dividend_ms = DELAY_THRESHOLD_150MS * min_fps;
let mut adjust_ratio = false;
if let Some(user) = self.users.get_mut(&id) {
let delay = delay.max(10);
let old_avg_delay = user.delay.avg_delay();
user.delay.add_delay(delay);
let mut avg_delay = user.delay.avg_delay();
avg_delay = avg_delay.max(10);
let mut fps = self.fps;
// Adaptive FPS adjustment based on network delay:
if avg_delay < 50 {
user.delay.quick_increase_fps_count += 1;
let mut step = if fps < normal_fps { 1 } else { 0 };
if user.delay.quick_increase_fps_count >= 3 {
// After 3 consecutive good samples, increase more aggressively
user.delay.quick_increase_fps_count = 0;
step = 5;
}
fps = min_fps.max(fps + step);
} else if avg_delay < 100 {
let step = if avg_delay < old_avg_delay {
if fps < normal_fps {
1
} else {
0
}
} else {
0
};
fps = min_fps.max(fps + step);
} else if avg_delay < DELAY_THRESHOLD_150MS {
fps = min_fps.max(fps);
} else {
let devide_fps = ((fps as f32) / (avg_delay as f32 / DELAY_THRESHOLD_150MS as f32))
.ceil() as u32;
if avg_delay < 200 {
fps = min_fps.max(devide_fps);
} else if avg_delay < 300 {
fps = min_fps.min(devide_fps);
} else if avg_delay < 600 {
fps = dividend_ms / avg_delay;
} else {
fps = (dividend_ms / avg_delay).min(devide_fps);
}
}
if avg_delay < DELAY_THRESHOLD_150MS {
user.delay.increase_fps_count += 1;
} else {
user.delay.increase_fps_count = 0;
}
if user.delay.increase_fps_count >= 3 {
// After 3 stable samples, try increasing FPS
user.delay.increase_fps_count = 0;
fps += 1;
}
// Reset quick increase counter if network condition worsens
if avg_delay > 50 {
user.delay.quick_increase_fps_count = 0;
}
fps = fps.clamp(MIN_FPS, highest_fps);
// first network delay message
adjust_ratio = user.delay.fps.is_none();
user.delay.fps = Some(fps);
}
self.adjust_fps();
if adjust_ratio {
self.adjust_ratio(false);
}
}
pub fn user_delay_response_elapsed(&mut self, id: i32, elapsed: u128) {
if let Some(user) = self.users.get_mut(&id) {
user.delay.response_delayed = elapsed > 2000;
if user.delay.response_delayed {
user.delay.add_delay(elapsed as u32);
self.adjust_fps();
}
}
}
}
// Common adjust functions
impl VideoQoS {
pub fn new_display(&mut self, display_idx: usize) {
self.displays.insert(display_idx, DisplayData::default());
}
pub fn remove_display(&mut self, display_idx: usize) {
self.displays.remove(&display_idx);
}
pub fn update_display_data(&mut self, display_idx: usize, send_counter: usize) {
if let Some(display) = self.displays.get_mut(&display_idx) {
display.send_counter += send_counter;
}
self.adjust_fps();
let abr_enabled = self.in_vbr_state();
if abr_enabled {
if self.adjust_ratio_instant.elapsed().as_secs() >= ADJUST_RATIO_INTERVAL as u64 {
let dynamic_screen = self
.displays
.iter()
.any(|d| d.1.send_counter >= ADJUST_RATIO_INTERVAL * DYNAMIC_SCREEN_THRESHOLD);
self.displays.iter_mut().for_each(|d| {
d.1.send_counter = 0;
});
self.adjust_ratio(dynamic_screen);
}
} else {
self.ratio = self.latest_quality().ratio();
}
}
#[inline]
fn highest_fps(&self) -> u32 {
let user_fps = |u: &UserData| {
let mut fps = u.custom_fps.unwrap_or(FPS);
if let Some(auto_adjust_fps) = u.auto_adjust_fps {
if fps == 0 || auto_adjust_fps < fps {
fps = auto_adjust_fps;
}
}
fps
};
let fps = self
.users
.iter()
.map(|(_, u)| user_fps(u))
.filter(|u| *u >= MIN_FPS)
.min()
.unwrap_or(FPS);
fps.clamp(MIN_FPS, MAX_FPS)
}
// Get latest quality settings from all users
pub fn latest_quality(&self) -> Quality {
self.users
.iter()
.map(|(_, u)| u.quality)
.filter(|q| *q != None)
.max_by(|a, b| a.unwrap_or_default().0.cmp(&b.unwrap_or_default().0))
.flatten()
.unwrap_or((0, Quality::Balanced))
.1
}
// Adjust quality ratio based on network delay and screen changes
fn adjust_ratio(&mut self, dynamic_screen: bool) {
// Get maximum delay from all users
let max_delay = self.users.iter().map(|u| u.1.delay.avg_delay()).max();
let Some(max_delay) = max_delay else {
return;
};
let target_quality = self.latest_quality();
let target_ratio = self.latest_quality().ratio();
let current_ratio = self.ratio;
let current_bitrate = self.bitrate();
// Calculate minimum ratio for high resolution (1Mbps baseline)
let ratio_1mbps = if current_bitrate > 0 {
Some((current_ratio * 1000.0 / current_bitrate as f32).max(BR_MIN_HIGH_RESOLUTION))
} else {
None
};
// Calculate ratio for adding 150kbps bandwidth
let ratio_add_150kbps = if current_bitrate > 0 {
Some((current_bitrate + 150) as f32 * current_ratio / current_bitrate as f32)
} else {
None
};
// Set minimum ratio based on quality mode
let min = match target_quality {
Quality::Best => {
// For Best quality, ensure minimum 1Mbps for high resolution
let mut min = BR_BEST / 2.5;
if let Some(ratio_1mbps) = ratio_1mbps {
if min > ratio_1mbps {
min = ratio_1mbps;
}
}
min.max(BR_MIN)
}
Quality::Balanced => {
let mut min = (BR_BALANCED / 2.0).min(0.4);
if let Some(ratio_1mbps) = ratio_1mbps {
if min > ratio_1mbps {
min = ratio_1mbps;
}
}
min.max(BR_MIN_HIGH_RESOLUTION)
}
Quality::Low => BR_MIN_HIGH_RESOLUTION,
Quality::Custom(_) => BR_MIN_HIGH_RESOLUTION,
};
let max = target_ratio * MAX_BR_MULTIPLE;
let mut v = current_ratio;
// Adjust ratio based on network delay thresholds
if max_delay < 50 {
if dynamic_screen {
v = current_ratio * 1.15;
}
} else if max_delay < 100 {
if dynamic_screen {
v = current_ratio * 1.1;
}
} else if max_delay < DELAY_THRESHOLD_150MS {
if dynamic_screen {
v = current_ratio * 1.05;
}
} else if max_delay < 200 {
v = current_ratio * 0.95;
} else if max_delay < 300 {
v = current_ratio * 0.9;
} else if max_delay < 500 {
v = current_ratio * 0.85;
} else {
v = current_ratio * 0.8;
}
// Limit quality increase rate for better stability
if let Some(ratio_add_150kbps) = ratio_add_150kbps {
if v > ratio_add_150kbps
&& ratio_add_150kbps > current_ratio
&& current_ratio >= BR_SPEED
{
v = ratio_add_150kbps;
}
}
self.ratio = v.clamp(min, max);
self.adjust_ratio_instant = Instant::now();
}
// Adjust fps based on network delay and user response time
fn adjust_fps(&mut self) {
let highest_fps = self.highest_fps();
// Get minimum fps from all users
let mut fps = self
.users
.iter()
.map(|u| u.1.delay.fps.unwrap_or(INIT_FPS))
.min()
.unwrap_or(INIT_FPS);
if self.users.iter().any(|u| u.1.delay.response_delayed) {
if fps > MIN_FPS + 1 {
fps = MIN_FPS + 1;
}
}
// For new connections (within 1 second), cap fps to INIT_FPS to ensure stability
if self.new_user_instant.elapsed().as_secs() < 1 {
if fps > INIT_FPS {
fps = INIT_FPS;
}
}
// Ensure fps stays within valid range
self.fps = fps.clamp(MIN_FPS, highest_fps);
}
}
#[derive(Default, Debug, Clone)]
struct RttCalculator {
min_rtt: Option<u32>, // Historical minimum RTT ever observed
window_min_rtt: Option<u32>, // Minimum RTT within last 60 samples
smoothed_rtt: Option<u32>, // Smoothed RTT estimation
samples: VecDeque<u32>, // Last 60 RTT samples
}
impl RttCalculator {
const WINDOW_SAMPLES: usize = 60; // Keep last 60 samples
const MIN_SAMPLES: usize = 10; // Require at least 10 samples
const ALPHA: f32 = 0.5; // Smoothing factor for weighted average
/// Update RTT estimates with a new sample
pub fn update(&mut self, delay: u32) {
// 1. Update historical minimum RTT
match self.min_rtt {
Some(min_rtt) if delay < min_rtt => self.min_rtt = Some(delay),
None => self.min_rtt = Some(delay),
_ => {}
}
// 2. Update sample window
if self.samples.len() >= Self::WINDOW_SAMPLES {
self.samples.pop_front();
}
self.samples.push_back(delay);
// 3. Calculate minimum RTT within the window
self.window_min_rtt = self.samples.iter().min().copied();
// 4. Calculate smoothed RTT
// Use weighted average if we have enough samples
if self.samples.len() >= Self::WINDOW_SAMPLES {
if let (Some(min), Some(window_min)) = (self.min_rtt, self.window_min_rtt) {
// Weighted average of historical minimum and window minimum
let new_srtt =
((1.0 - Self::ALPHA) * min as f32 + Self::ALPHA * window_min as f32) as u32;
self.smoothed_rtt = Some(new_srtt);
}
}
}
/// Get current RTT estimate
/// Returns None if no valid estimation is available
pub fn get_rtt(&self) -> Option<u32> {
if let Some(rtt) = self.smoothed_rtt {
return Some(rtt);
}
if self.samples.len() >= Self::MIN_SAMPLES {
if let Some(rtt) = self.min_rtt {
return Some(rtt);
}
}
None
}
}

View file

@ -51,7 +51,7 @@ use scrap::vram::{VRamEncoder, VRamEncoderConfig};
use scrap::Capturer;
use scrap::{
aom::AomEncoderConfig,
codec::{Encoder, EncoderCfg, Quality},
codec::{Encoder, EncoderCfg},
record::{Recorder, RecorderContext},
vpxcodec::{VpxEncoderConfig, VpxVideoCodecId},
CodecFormat, Display, EncodeInput, TraitCapturer,
@ -413,9 +413,8 @@ fn run(vs: VideoService) -> ResultType<()> {
c.set_gdi();
}
let mut video_qos = VIDEO_QOS.lock().unwrap();
video_qos.refresh(None);
let mut spf;
let mut quality = video_qos.quality();
let mut spf = video_qos.spf();
let mut quality = video_qos.ratio();
let record_incoming = config::option2bool(
"allow-auto-record-incoming",
&Config::get_option("allow-auto-record-incoming"),
@ -461,7 +460,7 @@ fn run(vs: VideoService) -> ResultType<()> {
VIDEO_QOS
.lock()
.unwrap()
.set_support_abr(display_idx, encoder.support_abr());
.set_support_changing_quality(display_idx, encoder.support_changing_quality());
log::info!("initial quality: {quality:?}");
if sp.is_option_true(OPTION_REFRESH) {
@ -489,32 +488,20 @@ fn run(vs: VideoService) -> ResultType<()> {
let mut first_frame = true;
let capture_width = c.width;
let capture_height = c.height;
let (mut second_instant, mut send_counter) = (Instant::now(), 0);
while sp.ok() {
#[cfg(windows)]
check_uac_switch(c.privacy_mode_id, c._capturer_privacy_mode_id)?;
let mut video_qos = VIDEO_QOS.lock().unwrap();
spf = video_qos.spf();
if quality != video_qos.quality() {
log::debug!("quality: {:?} -> {:?}", quality, video_qos.quality());
quality = video_qos.quality();
if encoder.support_changing_quality() {
allow_err!(encoder.set_quality(quality));
video_qos.store_bitrate(encoder.bitrate());
} else {
if !video_qos.in_vbr_state() && !quality.is_custom() {
log::info!("switch to change quality");
bail!("SWITCH");
}
}
}
if client_record != video_qos.record() {
log::info!("switch due to record changed");
bail!("SWITCH");
}
drop(video_qos);
check_qos(
&mut encoder,
&mut quality,
&mut spf,
client_record,
&mut send_counter,
&mut second_instant,
display_idx,
)?;
if sp.is_option_true(OPTION_REFRESH) {
let _ = try_broadcast_display_changed(&sp, display_idx, &c, true);
log::info!("switch to refresh");
@ -582,6 +569,7 @@ fn run(vs: VideoService) -> ResultType<()> {
capture_height,
)?;
frame_controller.set_send(now, send_conn_ids);
send_counter += 1;
}
#[cfg(windows)]
{
@ -640,6 +628,7 @@ fn run(vs: VideoService) -> ResultType<()> {
capture_height,
)?;
frame_controller.set_send(now, send_conn_ids);
send_counter += 1;
}
}
}
@ -691,6 +680,7 @@ struct Raii(usize);
impl Raii {
fn new(display_idx: usize) -> Self {
VIDEO_QOS.lock().unwrap().new_display(display_idx);
Raii(display_idx)
}
}
@ -701,14 +691,14 @@ impl Drop for Raii {
VRamEncoder::set_not_use(self.0, false);
#[cfg(feature = "vram")]
Encoder::update(scrap::codec::EncodingUpdate::Check);
VIDEO_QOS.lock().unwrap().set_support_abr(self.0, true);
VIDEO_QOS.lock().unwrap().remove_display(self.0);
}
}
fn setup_encoder(
c: &CapturerInfo,
display_idx: usize,
quality: Quality,
quality: f32,
client_record: bool,
record_incoming: bool,
last_portable_service_running: bool,
@ -737,7 +727,7 @@ fn setup_encoder(
fn get_encoder_config(
c: &CapturerInfo,
_display_idx: usize,
quality: Quality,
quality: f32,
record: bool,
_portable_service: bool,
) -> EncoderCfg {
@ -1061,3 +1051,40 @@ pub fn make_display_changed_msg(
msg_out.set_misc(misc);
Some(msg_out)
}
fn check_qos(
encoder: &mut Encoder,
ratio: &mut f32,
spf: &mut Duration,
client_record: bool,
send_counter: &mut usize,
second_instant: &mut Instant,
display_idx: usize,
) -> ResultType<()> {
let mut video_qos = VIDEO_QOS.lock().unwrap();
*spf = video_qos.spf();
if *ratio != video_qos.ratio() {
*ratio = video_qos.ratio();
if encoder.support_changing_quality() {
allow_err!(encoder.set_quality(*ratio));
video_qos.store_bitrate(encoder.bitrate());
} else {
// Now only vaapi doesn't support changing quality
if !video_qos.in_vbr_state() && !video_qos.latest_quality().is_custom() {
log::info!("switch to change quality");
bail!("SWITCH");
}
}
}
if client_record != video_qos.record() {
log::info!("switch due to record changed");
bail!("SWITCH");
}
if second_instant.elapsed() > Duration::from_secs(1) {
*second_instant = Instant::now();
video_qos.update_display_data(display_idx, *send_counter);
*send_counter = 0;
}
drop(video_qos);
Ok(())
}