view camera (#11040)

* view camera

Signed-off-by: 21pages <sunboeasy@gmail.com>

* `No cameras` prompt if no cameras available,  `peerGetSessionsCount` use
connType as parameter

Signed-off-by: 21pages <sunboeasy@gmail.com>

* fix, use video_service_name rather than display_idx as key in qos,etc

Signed-off-by: 21pages <sunboeasy@gmail.com>

---------

Signed-off-by: 21pages <sunboeasy@gmail.com>
Co-authored-by: Adwin White <adwinw01@gmail.com>
Co-authored-by: RustDesk <71636191+rustdesk@users.noreply.github.com>
This commit is contained in:
21pages
2025-03-10 21:06:53 +08:00
committed by GitHub
parent df4a101316
commit f0f999dc27
96 changed files with 3999 additions and 458 deletions

View File

@@ -0,0 +1,232 @@
use std::{
io,
sync::{Arc, Mutex},
};
use nokhwa::{
pixel_format::RgbAFormat,
query,
utils::{ApiBackend, CameraIndex, RequestedFormat, RequestedFormatType},
Camera,
};
use hbb_common::message_proto::{DisplayInfo, Resolution};
#[cfg(feature = "vram")]
use crate::AdapterDevice;
use crate::common::{bail, ResultType};
use crate::{Frame, PixelBuffer, Pixfmt, TraitCapturer};
pub const PRIMARY_CAMERA_IDX: usize = 0;
lazy_static::lazy_static! {
static ref SYNC_CAMERA_DISPLAYS: Arc<Mutex<Vec<DisplayInfo>>> = Arc::new(Mutex::new(Vec::new()));
}
pub struct Cameras;
// pre-condition
pub fn primary_camera_exists() -> bool {
Cameras::exists(PRIMARY_CAMERA_IDX)
}
impl Cameras {
pub fn all_info() -> ResultType<Vec<DisplayInfo>> {
// TODO: support more platforms.
#[cfg(not(any(target_os = "linux", target_os = "windows")))]
return Ok(Vec::new());
match query(ApiBackend::Auto) {
Ok(cameras) => {
let mut camera_displays = SYNC_CAMERA_DISPLAYS.lock().unwrap();
camera_displays.clear();
// FIXME: nokhwa returns duplicate info for one physical camera on linux for now.
// issue: https://github.com/l1npengtul/nokhwa/issues/171
// Use only one camera as a temporary hack.
cfg_if::cfg_if! {
if #[cfg(target_os = "linux")] {
let Some(info) = cameras.first() else {
bail!("No camera found")
};
let camera = Self::create_camera(info.index())?;
let resolution = camera.resolution();
let (width, height) = (resolution.width() as i32, resolution.height() as i32);
camera_displays.push(DisplayInfo {
x: 0,
y: 0,
name: info.human_name().clone(),
width,
height,
online: true,
cursor_embedded: false,
scale:1.0,
original_resolution: Some(Resolution {
width,
height,
..Default::default()
}).into(),
..Default::default()
});
} else {
let mut x = 0;
for info in &cameras {
let camera = Self::create_camera(info.index())?;
let resolution = camera.resolution();
let (width, height) = (resolution.width() as i32, resolution.height() as i32);
camera_displays.push(DisplayInfo {
x,
y: 0,
name: info.human_name().clone(),
width,
height,
online: true,
cursor_embedded: false,
scale:1.0,
original_resolution: Some(Resolution {
width,
height,
..Default::default()
}).into(),
..Default::default()
});
x += width;
}
}
}
Ok(camera_displays.clone())
}
Err(e) => {
bail!("Query cameras error: {}", e)
}
}
}
pub fn exists(index: usize) -> bool {
// TODO: support more platforms.
#[cfg(not(any(target_os = "linux", target_os = "windows")))]
return false;
match query(ApiBackend::Auto) {
Ok(cameras) => index < cameras.len(),
_ => return false,
}
}
fn create_camera(index: &CameraIndex) -> ResultType<Camera> {
// TODO: support more platforms.
#[cfg(not(any(target_os = "linux", target_os = "windows")))]
bail!("This platform doesn't support camera yet");
let result = Camera::new(
index.clone(),
RequestedFormat::new::<RgbAFormat>(RequestedFormatType::AbsoluteHighestResolution),
);
match result {
Ok(camera) => Ok(camera),
Err(e) => bail!("create camera{} error: {}", index, e),
}
}
pub fn get_camera_resolution(index: usize) -> ResultType<Resolution> {
let index = CameraIndex::Index(index as u32);
let camera = Self::create_camera(&index)?;
let resolution = camera.resolution();
Ok(Resolution {
width: resolution.width() as i32,
height: resolution.height() as i32,
..Default::default()
})
}
pub fn get_sync_cameras() -> Vec<DisplayInfo> {
SYNC_CAMERA_DISPLAYS.lock().unwrap().clone()
}
pub fn get_capturer(current: usize) -> ResultType<Box<dyn TraitCapturer>> {
Ok(Box::new(CameraCapturer::new(current)?))
}
}
pub struct CameraCapturer {
camera: Camera,
data: Vec<u8>,
last_data: Vec<u8>, // for faster compare and copy
}
impl CameraCapturer {
fn new(current: usize) -> ResultType<Self> {
let index = CameraIndex::Index(current as u32);
let camera = Cameras::create_camera(&index)?;
Ok(CameraCapturer {
camera,
data: Vec::new(),
last_data: Vec::new(),
})
}
}
impl TraitCapturer for CameraCapturer {
fn frame<'a>(&'a mut self, _timeout: std::time::Duration) -> std::io::Result<Frame<'a>> {
// TODO: move this check outside `frame`.
if !self.camera.is_stream_open() {
if let Err(e) = self.camera.open_stream() {
return Err(io::Error::new(
io::ErrorKind::Other,
format!("Camera open stream error: {}", e),
));
}
}
match self.camera.frame() {
Ok(buffer) => {
match buffer.decode_image::<RgbAFormat>() {
Ok(decoded) => {
self.data = decoded.as_raw().to_vec();
crate::would_block_if_equal(&mut self.last_data, &self.data)?;
// FIXME: macos's PixelBuffer cannot be directly created from bytes slice.
cfg_if::cfg_if! {
if #[cfg(any(target_os = "linux", target_os = "windows"))] {
Ok(Frame::PixelBuffer(PixelBuffer::new(
&self.data,
Pixfmt::RGBA,
decoded.width() as usize,
decoded.height() as usize,
)))
} else {
Err(io::Error::new(
io::ErrorKind::Other,
format!("Camera is not supported on this platform yet"),
))
}
}
}
Err(e) => Err(io::Error::new(
io::ErrorKind::Other,
format!("Camera frame decode error: {}", e),
)),
}
}
Err(e) => Err(io::Error::new(
io::ErrorKind::Other,
format!("Camera frame error: {}", e),
)),
}
}
#[cfg(windows)]
fn is_gdi(&self) -> bool {
false
}
#[cfg(windows)]
fn set_gdi(&mut self) -> bool {
false
}
#[cfg(feature = "vram")]
fn device(&self) -> AdapterDevice {
AdapterDevice::default()
}
#[cfg(feature = "vram")]
fn set_output_texture(&mut self, _texture: bool) {}
}

View File

@@ -70,23 +70,30 @@ impl TraitCapturer for Capturer {
pub struct PixelBuffer<'a> {
data: &'a [u8],
pixfmt: Pixfmt,
width: usize,
height: usize,
stride: Vec<usize>,
}
impl<'a> PixelBuffer<'a> {
pub fn new(data: &'a [u8], width: usize, height: usize) -> Self {
pub fn new(data: &'a [u8], pixfmt: Pixfmt, width: usize, height: usize) -> Self {
let stride0 = data.len() / height;
let mut stride = Vec::new();
stride.push(stride0);
PixelBuffer {
data,
pixfmt,
width,
height,
stride,
}
}
#[allow(non_snake_case)]
pub fn with_BGRA(data: &'a [u8], width: usize, height: usize) -> Self {
Self::new(data, Pixfmt::BGRA, width, height)
}
}
impl<'a> crate::TraitPixelBuffer for PixelBuffer<'a> {
@@ -107,7 +114,7 @@ impl<'a> crate::TraitPixelBuffer for PixelBuffer<'a> {
}
fn pixfmt(&self) -> Pixfmt {
Pixfmt::BGRA
self.pixfmt
}
}
@@ -232,7 +239,7 @@ impl CapturerMag {
impl TraitCapturer for CapturerMag {
fn frame<'a>(&'a mut self, _timeout_ms: Duration) -> io::Result<Frame<'a>> {
self.inner.frame(&mut self.data)?;
Ok(Frame::PixelBuffer(PixelBuffer::new(
Ok(Frame::PixelBuffer(PixelBuffer::with_BGRA(
&self.data,
self.inner.get_rect().1,
self.inner.get_rect().2,

View File

@@ -48,6 +48,7 @@ pub use self::convert::*;
pub const STRIDE_ALIGN: usize = 64; // commonly used in libvpx vpx_img_alloc caller
pub const HW_STRIDE_ALIGN: usize = 0; // recommended by av_frame_get_buffer
pub mod camera;
pub mod aom;
pub mod record;
mod vpx;

View File

@@ -25,7 +25,7 @@ pub struct RecorderContext {
pub server: bool,
pub id: String,
pub dir: String,
pub display: usize,
pub video_service_name: String,
pub tx: Option<Sender<RecordState>>,
}
@@ -46,7 +46,7 @@ impl RecorderContext2 {
+ "_"
+ &ctx.id.clone()
+ &chrono::Local::now().format("_%Y%m%d%H%M%S%3f_").to_string()
+ &format!("display{}_", ctx.display)
+ &format!("{}_", ctx.video_service_name)
+ &self.format.to_string().to_lowercase()
+ if self.format == CodecFormat::VP9
|| self.format == CodecFormat::VP8

View File

@@ -5,7 +5,7 @@ use std::{
};
use crate::{
codec::{base_bitrate, enable_vram_option, EncoderApi, EncoderCfg},
codec::{enable_vram_option, EncoderApi, EncoderCfg},
hwcodec::HwCodecConfig,
AdapterDevice, CodecFormat, EncodeInput, EncodeYuvFormat, Pixfmt,
};
@@ -30,8 +30,8 @@ use hwcodec::{
// https://cybersided.com/two-monitors-two-gpus/
// https://learn.microsoft.com/en-us/windows/win32/api/d3d12/nf-d3d12-id3d12device-getadapterluid#remarks
lazy_static::lazy_static! {
static ref ENOCDE_NOT_USE: Arc<Mutex<HashMap<usize, bool>>> = Default::default();
static ref FALLBACK_GDI_DISPLAYS: Arc<Mutex<HashSet<usize>>> = Default::default();
static ref ENOCDE_NOT_USE: Arc<Mutex<HashMap<String, bool>>> = Default::default();
static ref FALLBACK_GDI_DISPLAYS: Arc<Mutex<HashSet<String>>> = Default::default();
}
#[derive(Debug, Clone)]
@@ -287,16 +287,25 @@ impl VRamEncoder {
crate::hwcodec::HwRamEncoder::calc_bitrate(width, height, ratio, fmt == DataFormat::H264)
}
pub fn set_not_use(display: usize, not_use: bool) {
log::info!("set display#{display} not use vram encode to {not_use}");
ENOCDE_NOT_USE.lock().unwrap().insert(display, not_use);
pub fn set_not_use(video_service_name: String, not_use: bool) {
log::info!("set {video_service_name} not use vram encode to {not_use}");
ENOCDE_NOT_USE
.lock()
.unwrap()
.insert(video_service_name, not_use);
}
pub fn set_fallback_gdi(display: usize, fallback: bool) {
pub fn set_fallback_gdi(video_service_name: String, fallback: bool) {
if fallback {
FALLBACK_GDI_DISPLAYS.lock().unwrap().insert(display);
FALLBACK_GDI_DISPLAYS
.lock()
.unwrap()
.insert(video_service_name);
} else {
FALLBACK_GDI_DISPLAYS.lock().unwrap().remove(&display);
FALLBACK_GDI_DISPLAYS
.lock()
.unwrap()
.remove(&video_service_name);
}
}
}