mirror of
https://github.com/rustdesk/rustdesk.git
synced 2026-03-29 16:11:01 +03:00
ios
This commit is contained in:
96
libs/scrap/src/ios/README.md
Normal file
96
libs/scrap/src/ios/README.md
Normal file
@@ -0,0 +1,96 @@
|
||||
# iOS Screen Capture Implementation
|
||||
|
||||
This implementation provides screen capture functionality for iOS using ReplayKit framework through Rust FFI.
|
||||
|
||||
## Architecture
|
||||
|
||||
### Components
|
||||
|
||||
1. **Native Layer** (`native/ScreenCapture.m`)
|
||||
- Implements ReplayKit screen recording for in-app capture
|
||||
- Handles message port communication for system-wide capture
|
||||
- Converts pixel formats (BGRA to RGBA)
|
||||
- Provides C interface for Rust FFI
|
||||
|
||||
2. **FFI Layer** (`ffi.rs`)
|
||||
- Rust bindings to native C functions
|
||||
- Frame buffer management
|
||||
- Callback mechanism for frame updates
|
||||
|
||||
3. **Rust Interface** (`mod.rs`)
|
||||
- Implements `TraitCapturer` for compatibility with RustDesk
|
||||
- Frame management and duplicate detection
|
||||
- Display information handling
|
||||
|
||||
4. **Broadcast Extension** (`flutter/ios/BroadcastExtension/`)
|
||||
- Separate app extension for system-wide screen capture
|
||||
- Uses message ports to send frames to main app
|
||||
- Required for capturing content outside the app
|
||||
|
||||
## Features
|
||||
|
||||
### In-App Capture
|
||||
- Uses `RPScreenRecorder` API
|
||||
- Captures only RustDesk app content
|
||||
- No additional permissions required beyond initial prompt
|
||||
|
||||
### System-Wide Capture
|
||||
- Uses Broadcast Upload Extension
|
||||
- Can capture entire screen including other apps
|
||||
- Requires user to explicitly start from Control Center
|
||||
- Communicates via CFMessagePort
|
||||
|
||||
## Usage
|
||||
|
||||
```rust
|
||||
// Initialize and start capture
|
||||
let display = Display::primary()?;
|
||||
let mut capturer = Capturer::new(display)?;
|
||||
|
||||
// Get frames
|
||||
match capturer.frame(Duration::from_millis(33)) {
|
||||
Ok(frame) => {
|
||||
// Process frame
|
||||
}
|
||||
Err(e) if e.kind() == io::ErrorKind::WouldBlock => {
|
||||
// No new frame available
|
||||
}
|
||||
Err(e) => {
|
||||
// Handle error
|
||||
}
|
||||
}
|
||||
|
||||
// For system-wide capture
|
||||
ffi::show_broadcast_picker();
|
||||
```
|
||||
|
||||
## Setup Requirements
|
||||
|
||||
1. **Xcode Configuration**
|
||||
- Add Broadcast Upload Extension target
|
||||
- Configure app groups (if using shared container)
|
||||
- Set up proper code signing
|
||||
|
||||
2. **Info.plist**
|
||||
- Add microphone usage description (for audio capture)
|
||||
- Configure broadcast extension settings
|
||||
|
||||
3. **Build Settings**
|
||||
- Link ReplayKit framework
|
||||
- Enable Objective-C ARC
|
||||
- Set minimum iOS version to 11.0 (12.0 for broadcast picker)
|
||||
|
||||
## Limitations
|
||||
|
||||
- Screen recording requires iOS 11.0+
|
||||
- System-wide capture requires iOS 12.0+
|
||||
- User must grant permission for screen recording
|
||||
- Performance depends on device capabilities
|
||||
- Broadcast extension has memory limits (~50MB)
|
||||
|
||||
## Security Considerations
|
||||
|
||||
- Screen recording is a sensitive permission
|
||||
- iOS shows recording indicator when active
|
||||
- Broadcast extension runs in separate process
|
||||
- Message port communication is local only
|
||||
165
libs/scrap/src/ios/ffi.rs
Normal file
165
libs/scrap/src/ios/ffi.rs
Normal file
@@ -0,0 +1,165 @@
|
||||
use std::os::raw::{c_uint, c_uchar, c_void};
|
||||
use std::sync::{Arc, Mutex};
|
||||
use std::ptr;
|
||||
|
||||
#[link(name = "ScreenCapture", kind = "static")]
|
||||
extern "C" {
|
||||
fn ios_capture_init();
|
||||
fn ios_capture_start() -> bool;
|
||||
fn ios_capture_stop();
|
||||
fn ios_capture_is_active() -> bool;
|
||||
fn ios_capture_get_frame(
|
||||
buffer: *mut c_uchar,
|
||||
buffer_size: c_uint,
|
||||
out_width: *mut c_uint,
|
||||
out_height: *mut c_uint,
|
||||
) -> c_uint;
|
||||
fn ios_capture_get_display_info(width: *mut c_uint, height: *mut c_uint);
|
||||
fn ios_capture_set_callback(callback: Option<extern "C" fn(*const c_uchar, c_uint, c_uint, c_uint)>);
|
||||
fn ios_capture_show_broadcast_picker();
|
||||
fn ios_capture_is_broadcasting() -> bool;
|
||||
fn ios_capture_set_audio_enabled(enable_mic: bool, enable_app_audio: bool);
|
||||
fn ios_capture_set_audio_callback(callback: Option<extern "C" fn(*const c_uchar, c_uint, bool)>);
|
||||
}
|
||||
|
||||
lazy_static::lazy_static! {
|
||||
static ref FRAME_BUFFER: Arc<Mutex<FrameBuffer>> = Arc::new(Mutex::new(FrameBuffer::new()));
|
||||
static ref INITIALIZED: Mutex<bool> = Mutex::new(false);
|
||||
}
|
||||
|
||||
struct FrameBuffer {
|
||||
data: Vec<u8>,
|
||||
width: u32,
|
||||
height: u32,
|
||||
updated: bool,
|
||||
}
|
||||
|
||||
impl FrameBuffer {
|
||||
fn new() -> Self {
|
||||
FrameBuffer {
|
||||
data: Vec::new(),
|
||||
width: 0,
|
||||
height: 0,
|
||||
updated: false,
|
||||
}
|
||||
}
|
||||
|
||||
fn update(&mut self, data: &[u8], width: u32, height: u32) {
|
||||
self.data.clear();
|
||||
self.data.extend_from_slice(data);
|
||||
self.width = width;
|
||||
self.height = height;
|
||||
self.updated = true;
|
||||
}
|
||||
|
||||
fn get(&mut self) -> Option<(Vec<u8>, u32, u32)> {
|
||||
if self.updated && !self.data.is_empty() {
|
||||
self.updated = false; // Reset flag after consuming
|
||||
Some((self.data.clone(), self.width, self.height))
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
extern "C" fn frame_callback(data: *const c_uchar, size: c_uint, width: c_uint, height: c_uint) {
|
||||
if !data.is_null() && size > 0 {
|
||||
let slice = unsafe { std::slice::from_raw_parts(data, size as usize) };
|
||||
let mut buffer = FRAME_BUFFER.lock().unwrap();
|
||||
buffer.update(slice, width, height);
|
||||
}
|
||||
}
|
||||
|
||||
pub fn init() {
|
||||
let mut initialized = INITIALIZED.lock().unwrap();
|
||||
if !*initialized {
|
||||
unsafe {
|
||||
ios_capture_init();
|
||||
ios_capture_set_callback(Some(frame_callback));
|
||||
}
|
||||
*initialized = true;
|
||||
log::info!("iOS screen capture initialized");
|
||||
}
|
||||
}
|
||||
|
||||
pub fn start_capture() -> bool {
|
||||
init();
|
||||
unsafe { ios_capture_start() }
|
||||
}
|
||||
|
||||
pub fn stop_capture() {
|
||||
unsafe { ios_capture_stop() }
|
||||
}
|
||||
|
||||
pub fn is_capturing() -> bool {
|
||||
unsafe { ios_capture_is_active() }
|
||||
}
|
||||
|
||||
lazy_static::lazy_static! {
|
||||
static ref TEMP_BUFFER: Mutex<Vec<u8>> = Mutex::new(vec![0u8; 4096 * 2160 * 4]);
|
||||
}
|
||||
|
||||
pub fn get_frame() -> Option<(Vec<u8>, u32, u32)> {
|
||||
// Try callback-based frame first
|
||||
if let Ok(mut buffer) = FRAME_BUFFER.try_lock() {
|
||||
if let Some(frame) = buffer.get() {
|
||||
return Some(frame);
|
||||
}
|
||||
}
|
||||
|
||||
// Fallback to polling
|
||||
let mut width: c_uint = 0;
|
||||
let mut height: c_uint = 0;
|
||||
|
||||
let mut temp_buffer = TEMP_BUFFER.lock().unwrap();
|
||||
|
||||
let size = unsafe {
|
||||
ios_capture_get_frame(
|
||||
temp_buffer.as_mut_ptr(),
|
||||
temp_buffer.len() as c_uint,
|
||||
&mut width,
|
||||
&mut height,
|
||||
)
|
||||
};
|
||||
|
||||
if size > 0 && width > 0 && height > 0 {
|
||||
// Only allocate new Vec for the actual data
|
||||
let frame_data = temp_buffer[..size as usize].to_vec();
|
||||
Some((frame_data, width, height))
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
pub fn get_display_info() -> (u32, u32) {
|
||||
let mut width: c_uint = 0;
|
||||
let mut height: c_uint = 0;
|
||||
unsafe {
|
||||
ios_capture_get_display_info(&mut width, &mut height);
|
||||
}
|
||||
(width, height)
|
||||
}
|
||||
|
||||
pub fn show_broadcast_picker() {
|
||||
unsafe {
|
||||
ios_capture_show_broadcast_picker();
|
||||
}
|
||||
}
|
||||
|
||||
pub fn is_broadcasting() -> bool {
|
||||
unsafe {
|
||||
ios_capture_is_broadcasting()
|
||||
}
|
||||
}
|
||||
|
||||
pub fn enable_audio(mic: bool, app_audio: bool) {
|
||||
unsafe {
|
||||
ios_capture_set_audio_enabled(mic, app_audio);
|
||||
}
|
||||
}
|
||||
|
||||
pub fn set_audio_callback(callback: Option<extern "C" fn(*const c_uchar, c_uint, bool)>) {
|
||||
unsafe {
|
||||
ios_capture_set_audio_callback(callback);
|
||||
}
|
||||
}
|
||||
179
libs/scrap/src/ios/mod.rs
Normal file
179
libs/scrap/src/ios/mod.rs
Normal file
@@ -0,0 +1,179 @@
|
||||
pub mod ffi;
|
||||
|
||||
use std::io;
|
||||
use std::time::{Duration, Instant};
|
||||
use crate::{would_block_if_equal, TraitCapturer};
|
||||
|
||||
pub struct Capturer {
|
||||
width: usize,
|
||||
height: usize,
|
||||
display: Display,
|
||||
frame_data: Vec<u8>,
|
||||
last_frame: Vec<u8>,
|
||||
}
|
||||
|
||||
impl Capturer {
|
||||
pub fn new(display: Display) -> io::Result<Capturer> {
|
||||
ffi::init();
|
||||
|
||||
let (width, height) = ffi::get_display_info();
|
||||
|
||||
if !ffi::start_capture() {
|
||||
return Err(io::Error::new(
|
||||
io::ErrorKind::PermissionDenied,
|
||||
"Failed to start iOS screen capture. User permission may be required."
|
||||
));
|
||||
}
|
||||
|
||||
Ok(Capturer {
|
||||
width: width as usize,
|
||||
height: height as usize,
|
||||
display,
|
||||
frame_data: Vec::new(),
|
||||
last_frame: Vec::new(),
|
||||
})
|
||||
}
|
||||
|
||||
pub fn width(&self) -> usize {
|
||||
self.width
|
||||
}
|
||||
|
||||
pub fn height(&self) -> usize {
|
||||
self.height
|
||||
}
|
||||
}
|
||||
|
||||
impl Drop for Capturer {
|
||||
fn drop(&mut self) {
|
||||
ffi::stop_capture();
|
||||
}
|
||||
}
|
||||
|
||||
impl TraitCapturer for Capturer {
|
||||
fn frame<'a>(&'a mut self, timeout: Duration) -> io::Result<crate::Frame<'a>> {
|
||||
let start = Instant::now();
|
||||
|
||||
loop {
|
||||
if let Some((data, width, height)) = ffi::get_frame() {
|
||||
// Update dimensions if they changed
|
||||
self.width = width as usize;
|
||||
self.height = height as usize;
|
||||
|
||||
// Check if frame is different from last
|
||||
// would_block_if_equal returns Err when frames are EQUAL (should block)
|
||||
match would_block_if_equal(&self.last_frame, &data) {
|
||||
Ok(_) => {
|
||||
// Frame is different, use it
|
||||
self.frame_data = data;
|
||||
std::mem::swap(&mut self.frame_data, &mut self.last_frame);
|
||||
|
||||
let pixel_buffer = PixelBuffer {
|
||||
data: &self.last_frame,
|
||||
width: self.width,
|
||||
height: self.height,
|
||||
stride: vec![self.width * 4],
|
||||
};
|
||||
|
||||
return Ok(crate::Frame::PixelBuffer(pixel_buffer));
|
||||
}
|
||||
Err(_) => {
|
||||
// Frame is same as last, skip
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if start.elapsed() >= timeout {
|
||||
return Err(io::ErrorKind::WouldBlock.into());
|
||||
}
|
||||
|
||||
// Small sleep to avoid busy waiting
|
||||
std::thread::sleep(Duration::from_millis(1));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub struct PixelBuffer<'a> {
|
||||
data: &'a [u8],
|
||||
width: usize,
|
||||
height: usize,
|
||||
stride: Vec<usize>,
|
||||
}
|
||||
|
||||
impl<'a> crate::TraitPixelBuffer for PixelBuffer<'a> {
|
||||
fn data(&self) -> &[u8] {
|
||||
self.data
|
||||
}
|
||||
|
||||
fn width(&self) -> usize {
|
||||
self.width
|
||||
}
|
||||
|
||||
fn height(&self) -> usize {
|
||||
self.height
|
||||
}
|
||||
|
||||
fn stride(&self) -> Vec<usize> {
|
||||
self.stride.clone()
|
||||
}
|
||||
|
||||
fn pixfmt(&self) -> crate::Pixfmt {
|
||||
crate::Pixfmt::RGBA
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Copy)]
|
||||
pub struct Display {
|
||||
pub primary: bool,
|
||||
}
|
||||
|
||||
impl Display {
|
||||
pub fn primary() -> io::Result<Display> {
|
||||
Ok(Display { primary: true })
|
||||
}
|
||||
|
||||
pub fn all() -> io::Result<Vec<Display>> {
|
||||
Ok(vec![Display { primary: true }])
|
||||
}
|
||||
|
||||
pub fn width(&self) -> usize {
|
||||
let (width, _) = ffi::get_display_info();
|
||||
width as usize
|
||||
}
|
||||
|
||||
pub fn height(&self) -> usize {
|
||||
let (_, height) = ffi::get_display_info();
|
||||
height as usize
|
||||
}
|
||||
|
||||
pub fn name(&self) -> String {
|
||||
"iOS Display".to_string()
|
||||
}
|
||||
|
||||
pub fn is_online(&self) -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
pub fn is_primary(&self) -> bool {
|
||||
self.primary
|
||||
}
|
||||
|
||||
pub fn origin(&self) -> (i32, i32) {
|
||||
(0, 0)
|
||||
}
|
||||
|
||||
pub fn id(&self) -> usize {
|
||||
1
|
||||
}
|
||||
}
|
||||
|
||||
pub fn is_supported() -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
pub fn is_cursor_embedded() -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
pub fn is_mag_supported() -> bool {
|
||||
false
|
||||
}
|
||||
56
libs/scrap/src/ios/native/ScreenCapture.h
Normal file
56
libs/scrap/src/ios/native/ScreenCapture.h
Normal file
@@ -0,0 +1,56 @@
|
||||
#ifndef SCREEN_CAPTURE_H
|
||||
#define SCREEN_CAPTURE_H
|
||||
|
||||
#include <stdint.h>
|
||||
#include <stdbool.h>
|
||||
|
||||
#ifdef __cplusplus
|
||||
extern "C" {
|
||||
#endif
|
||||
|
||||
// Initialize iOS screen capture
|
||||
void ios_capture_init(void);
|
||||
|
||||
// Start screen capture
|
||||
bool ios_capture_start(void);
|
||||
|
||||
// Stop screen capture
|
||||
void ios_capture_stop(void);
|
||||
|
||||
// Check if capturing
|
||||
bool ios_capture_is_active(void);
|
||||
|
||||
// Get current frame data
|
||||
// Returns frame size, or 0 if no frame available
|
||||
// Buffer must be large enough to hold width * height * 4 bytes (RGBA)
|
||||
uint32_t ios_capture_get_frame(uint8_t* buffer, uint32_t buffer_size,
|
||||
uint32_t* out_width, uint32_t* out_height);
|
||||
|
||||
// Get display info
|
||||
void ios_capture_get_display_info(uint32_t* width, uint32_t* height);
|
||||
|
||||
// Callback for frame updates from native side
|
||||
typedef void (*frame_callback_t)(const uint8_t* data, uint32_t size,
|
||||
uint32_t width, uint32_t height);
|
||||
|
||||
// Set frame callback
|
||||
void ios_capture_set_callback(frame_callback_t callback);
|
||||
|
||||
// Show broadcast picker for system-wide capture
|
||||
void ios_capture_show_broadcast_picker(void);
|
||||
|
||||
// Check if broadcasting (system-wide capture)
|
||||
bool ios_capture_is_broadcasting(void);
|
||||
|
||||
// Audio capture control
|
||||
void ios_capture_set_audio_enabled(bool enable_mic, bool enable_app_audio);
|
||||
|
||||
// Audio callback
|
||||
typedef void (*audio_callback_t)(const uint8_t* data, uint32_t size, bool is_mic);
|
||||
void ios_capture_set_audio_callback(audio_callback_t callback);
|
||||
|
||||
#ifdef __cplusplus
|
||||
}
|
||||
#endif
|
||||
|
||||
#endif // SCREEN_CAPTURE_H
|
||||
455
libs/scrap/src/ios/native/ScreenCapture.m
Normal file
455
libs/scrap/src/ios/native/ScreenCapture.m
Normal file
@@ -0,0 +1,455 @@
|
||||
#import <Foundation/Foundation.h>
|
||||
#import <ReplayKit/ReplayKit.h>
|
||||
#import <UIKit/UIKit.h>
|
||||
#import "ScreenCapture.h"
|
||||
|
||||
@interface ScreenCaptureHandler : NSObject <RPScreenRecorderDelegate>
|
||||
@property (nonatomic, strong) RPScreenRecorder *screenRecorder;
|
||||
@property (nonatomic, assign) BOOL isCapturing;
|
||||
@property (nonatomic, strong) NSMutableData *frameBuffer;
|
||||
@property (nonatomic, assign) CGSize lastFrameSize;
|
||||
@property (nonatomic, strong) dispatch_queue_t processingQueue;
|
||||
@property (nonatomic, assign) frame_callback_t frameCallback;
|
||||
@property (nonatomic, assign) CFMessagePortRef localPort;
|
||||
@property (nonatomic, assign) BOOL isBroadcasting;
|
||||
@property (nonatomic, assign) BOOL enableMicAudio;
|
||||
@property (nonatomic, assign) BOOL enableAppAudio;
|
||||
@property (nonatomic, assign) audio_callback_t audioCallback;
|
||||
@property (nonatomic, assign) UIInterfaceOrientation lastOrientation;
|
||||
@end
|
||||
|
||||
@implementation ScreenCaptureHandler
|
||||
|
||||
static ScreenCaptureHandler *sharedHandler = nil;
|
||||
|
||||
+ (instancetype)sharedInstance {
|
||||
static dispatch_once_t onceToken;
|
||||
dispatch_once(&onceToken, ^{
|
||||
sharedHandler = [[ScreenCaptureHandler alloc] init];
|
||||
});
|
||||
return sharedHandler;
|
||||
}
|
||||
|
||||
- (instancetype)init {
|
||||
self = [super init];
|
||||
if (self) {
|
||||
_screenRecorder = [RPScreenRecorder sharedRecorder];
|
||||
_screenRecorder.delegate = self;
|
||||
_isCapturing = NO;
|
||||
_frameBuffer = [NSMutableData dataWithCapacity:1920 * 1080 * 4]; // Initial capacity
|
||||
_lastFrameSize = CGSizeZero;
|
||||
_processingQueue = dispatch_queue_create("com.rustdesk.screencapture", DISPATCH_QUEUE_SERIAL);
|
||||
_isBroadcasting = NO;
|
||||
_lastOrientation = UIInterfaceOrientationUnknown;
|
||||
|
||||
// Default audio settings - microphone OFF for privacy
|
||||
_enableMicAudio = NO;
|
||||
_enableAppAudio = NO; // App audio only captures RustDesk's own audio, not useful
|
||||
|
||||
[self setupMessagePort];
|
||||
|
||||
// Register for orientation change notifications
|
||||
[[NSNotificationCenter defaultCenter] addObserver:self
|
||||
selector:@selector(orientationDidChange:)
|
||||
name:UIDeviceOrientationDidChangeNotification
|
||||
object:nil];
|
||||
}
|
||||
return self;
|
||||
}
|
||||
|
||||
- (void)setupMessagePort {
|
||||
NSString *portName = @"com.rustdesk.screencast.port";
|
||||
|
||||
CFMessagePortContext context = {0, (__bridge void *)self, NULL, NULL, NULL};
|
||||
Boolean shouldFreeInfo = false;
|
||||
self.localPort = CFMessagePortCreateLocal(kCFAllocatorDefault,
|
||||
(__bridge CFStringRef)portName,
|
||||
messagePortCallback,
|
||||
&context,
|
||||
&shouldFreeInfo);
|
||||
|
||||
if (self.localPort) {
|
||||
CFRunLoopSourceRef runLoopSource = CFMessagePortCreateRunLoopSource(kCFAllocatorDefault, self.localPort, 0);
|
||||
if (runLoopSource) {
|
||||
CFRunLoopAddSource(CFRunLoopGetMain(), runLoopSource, kCFRunLoopCommonModes);
|
||||
CFRelease(runLoopSource);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
- (void)dealloc {
|
||||
[[NSNotificationCenter defaultCenter] removeObserver:self];
|
||||
|
||||
if (self.localPort) {
|
||||
CFMessagePortInvalidate(self.localPort);
|
||||
CFRelease(self.localPort);
|
||||
self.localPort = NULL;
|
||||
}
|
||||
}
|
||||
|
||||
- (void)orientationDidChange:(NSNotification *)notification {
|
||||
UIInterfaceOrientation currentOrientation = [[UIApplication sharedApplication] statusBarOrientation];
|
||||
if (currentOrientation != self.lastOrientation) {
|
||||
self.lastOrientation = currentOrientation;
|
||||
NSLog(@"Orientation changed to: %ld", (long)currentOrientation);
|
||||
// The next frame capture will automatically pick up the new dimensions
|
||||
}
|
||||
}
|
||||
|
||||
static CFDataRef messagePortCallback(CFMessagePortRef local, SInt32 msgid, CFDataRef data, void *info) {
|
||||
ScreenCaptureHandler *handler = (__bridge ScreenCaptureHandler *)info;
|
||||
|
||||
if (msgid == 1 && data) {
|
||||
// Frame header
|
||||
struct FrameHeader {
|
||||
uint32_t width;
|
||||
uint32_t height;
|
||||
uint32_t dataSize;
|
||||
} header;
|
||||
|
||||
CFDataGetBytes(data, CFRangeMake(0, sizeof(header)), (UInt8 *)&header);
|
||||
handler.lastFrameSize = CGSizeMake(header.width, header.height);
|
||||
|
||||
} else if (msgid == 2 && data) {
|
||||
// Frame data
|
||||
dispatch_async(handler.processingQueue, ^{
|
||||
@synchronized(handler.frameBuffer) {
|
||||
[handler.frameBuffer setData:(__bridge NSData *)data];
|
||||
handler.isBroadcasting = YES;
|
||||
|
||||
// Call callback if set
|
||||
if (handler.frameCallback) {
|
||||
handler.frameCallback((const uint8_t *)handler.frameBuffer.bytes,
|
||||
(uint32_t)handler.frameBuffer.length,
|
||||
(uint32_t)handler.lastFrameSize.width,
|
||||
(uint32_t)handler.lastFrameSize.height);
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
return NULL;
|
||||
}
|
||||
|
||||
- (BOOL)startCapture {
|
||||
if (self.isCapturing || ![self.screenRecorder isAvailable]) {
|
||||
return NO;
|
||||
}
|
||||
|
||||
// Configure audio based on user setting
|
||||
// This must be set before starting capture and cannot be changed during capture
|
||||
// To change microphone setting, must stop and restart capture
|
||||
self.screenRecorder.microphoneEnabled = self.enableMicAudio;
|
||||
|
||||
__weak typeof(self) weakSelf = self;
|
||||
|
||||
[self.screenRecorder startCaptureWithHandler:^(CMSampleBufferRef sampleBuffer, RPSampleBufferType bufferType, NSError *error) {
|
||||
if (error) {
|
||||
NSLog(@"Screen capture error: %@", error.localizedDescription);
|
||||
return;
|
||||
}
|
||||
|
||||
switch (bufferType) {
|
||||
case RPSampleBufferTypeVideo:
|
||||
[weakSelf processSampleBuffer:sampleBuffer];
|
||||
break;
|
||||
|
||||
case RPSampleBufferTypeAudioApp:
|
||||
// App audio only captures RustDesk's own audio, not useful
|
||||
// iOS doesn't allow capturing other apps' audio
|
||||
break;
|
||||
|
||||
case RPSampleBufferTypeAudioMic:
|
||||
if (weakSelf.enableMicAudio && weakSelf.audioCallback) {
|
||||
[weakSelf processAudioSampleBuffer:sampleBuffer isMic:YES];
|
||||
}
|
||||
break;
|
||||
|
||||
default:
|
||||
break;
|
||||
}
|
||||
} completionHandler:^(NSError *error) {
|
||||
if (error) {
|
||||
NSLog(@"Failed to start capture: %@", error.localizedDescription);
|
||||
weakSelf.isCapturing = NO;
|
||||
} else {
|
||||
weakSelf.isCapturing = YES;
|
||||
}
|
||||
}];
|
||||
|
||||
return YES;
|
||||
}
|
||||
|
||||
- (void)stopCapture {
|
||||
if (!self.isCapturing) {
|
||||
return;
|
||||
}
|
||||
|
||||
__weak typeof(self) weakSelf = self;
|
||||
[self.screenRecorder stopCaptureWithHandler:^(NSError *error) {
|
||||
if (error) {
|
||||
NSLog(@"Error stopping capture: %@", error.localizedDescription);
|
||||
}
|
||||
weakSelf.isCapturing = NO;
|
||||
}];
|
||||
}
|
||||
|
||||
- (void)processSampleBuffer:(CMSampleBufferRef)sampleBuffer {
|
||||
CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
|
||||
if (!imageBuffer) {
|
||||
return;
|
||||
}
|
||||
|
||||
dispatch_async(self.processingQueue, ^{
|
||||
CVPixelBufferLockBaseAddress(imageBuffer, kCVPixelBufferLock_ReadOnly);
|
||||
|
||||
size_t width = CVPixelBufferGetWidth(imageBuffer);
|
||||
size_t height = CVPixelBufferGetHeight(imageBuffer);
|
||||
size_t bytesPerRow = CVPixelBufferGetBytesPerRow(imageBuffer);
|
||||
void *baseAddress = CVPixelBufferGetBaseAddress(imageBuffer);
|
||||
|
||||
self.lastFrameSize = CGSizeMake(width, height);
|
||||
|
||||
// Ensure buffer is large enough
|
||||
size_t requiredSize = width * height * 4;
|
||||
@synchronized(self.frameBuffer) {
|
||||
if (self.frameBuffer.length < requiredSize) {
|
||||
[self.frameBuffer setLength:requiredSize];
|
||||
}
|
||||
}
|
||||
|
||||
@synchronized(self.frameBuffer) {
|
||||
uint8_t *src = (uint8_t *)baseAddress;
|
||||
uint8_t *dst = (uint8_t *)self.frameBuffer.mutableBytes;
|
||||
|
||||
// Convert BGRA to RGBA
|
||||
OSType pixelFormat = CVPixelBufferGetPixelFormatType(imageBuffer);
|
||||
if (pixelFormat == kCVPixelFormatType_32BGRA) {
|
||||
for (size_t y = 0; y < height; y++) {
|
||||
for (size_t x = 0; x < width; x++) {
|
||||
size_t srcIdx = y * bytesPerRow + x * 4;
|
||||
size_t dstIdx = y * width * 4 + x * 4;
|
||||
|
||||
// Bounds check
|
||||
if (srcIdx + 3 < bytesPerRow * height && dstIdx + 3 < requiredSize) {
|
||||
dst[dstIdx + 0] = src[srcIdx + 2]; // R
|
||||
dst[dstIdx + 1] = src[srcIdx + 1]; // G
|
||||
dst[dstIdx + 2] = src[srcIdx + 0]; // B
|
||||
dst[dstIdx + 3] = src[srcIdx + 3]; // A
|
||||
}
|
||||
}
|
||||
}
|
||||
} else {
|
||||
// Copy as-is if already RGBA
|
||||
memcpy(dst, src, MIN(requiredSize, bytesPerRow * height));
|
||||
}
|
||||
|
||||
CVPixelBufferUnlockBaseAddress(imageBuffer, kCVPixelBufferLock_ReadOnly);
|
||||
|
||||
// Call the callback if set
|
||||
if (self.frameCallback) {
|
||||
self.frameCallback(dst, (uint32_t)requiredSize, (uint32_t)width, (uint32_t)height);
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
- (NSData *)getCurrentFrame {
|
||||
@synchronized(self.frameBuffer) {
|
||||
return [self.frameBuffer copy];
|
||||
}
|
||||
}
|
||||
|
||||
- (void)processAudioSampleBuffer:(CMSampleBufferRef)sampleBuffer isMic:(BOOL)isMic {
|
||||
// Get audio format information
|
||||
CMFormatDescriptionRef formatDesc = CMSampleBufferGetFormatDescription(sampleBuffer);
|
||||
const AudioStreamBasicDescription *asbd = CMAudioFormatDescriptionGetStreamBasicDescription(formatDesc);
|
||||
|
||||
if (!asbd) {
|
||||
NSLog(@"Failed to get audio format description");
|
||||
return;
|
||||
}
|
||||
|
||||
// Verify it's PCM format we can handle
|
||||
if (asbd->mFormatID != kAudioFormatLinearPCM) {
|
||||
NSLog(@"Unsupported audio format: %u", asbd->mFormatID);
|
||||
return;
|
||||
}
|
||||
|
||||
// Log format info once
|
||||
static BOOL loggedFormat = NO;
|
||||
if (!loggedFormat) {
|
||||
NSLog(@"Audio format - Sample rate: %.0f, Channels: %d, Bits per channel: %d, Format: %u, Flags: %u",
|
||||
asbd->mSampleRate, asbd->mChannelsPerFrame, asbd->mBitsPerChannel,
|
||||
asbd->mFormatID, asbd->mFormatFlags);
|
||||
loggedFormat = YES;
|
||||
}
|
||||
|
||||
// Get audio buffer list
|
||||
CMBlockBufferRef blockBuffer = CMSampleBufferGetDataBuffer(sampleBuffer);
|
||||
if (!blockBuffer) {
|
||||
// Try to get audio buffer list for interleaved audio
|
||||
AudioBufferList audioBufferList;
|
||||
size_t bufferListSizeNeededOut;
|
||||
OSStatus status = CMSampleBufferGetAudioBufferListWithRetainedBlockBuffer(
|
||||
sampleBuffer,
|
||||
&bufferListSizeNeededOut,
|
||||
&audioBufferList,
|
||||
sizeof(audioBufferList),
|
||||
NULL,
|
||||
NULL,
|
||||
kCMSampleBufferFlag_AudioBufferList_Assure16ByteAlignment,
|
||||
&blockBuffer
|
||||
);
|
||||
|
||||
if (status != noErr || audioBufferList.mNumberBuffers == 0) {
|
||||
NSLog(@"Failed to get audio buffer list: %d", status);
|
||||
return;
|
||||
}
|
||||
|
||||
// Process first buffer (assuming non-interleaved)
|
||||
AudioBuffer *audioBuffer = &audioBufferList.mBuffers[0];
|
||||
if (self.audioCallback && audioBuffer->mData && audioBuffer->mDataByteSize > 0) {
|
||||
self.audioCallback((const uint8_t *)audioBuffer->mData,
|
||||
(uint32_t)audioBuffer->mDataByteSize, isMic);
|
||||
}
|
||||
|
||||
if (blockBuffer) {
|
||||
CFRelease(blockBuffer);
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
size_t lengthAtOffset;
|
||||
size_t totalLength;
|
||||
char *dataPointer;
|
||||
|
||||
OSStatus status = CMBlockBufferGetDataPointer(blockBuffer, 0, &lengthAtOffset, &totalLength, &dataPointer);
|
||||
if (status != kCMBlockBufferNoErr || !dataPointer) {
|
||||
return;
|
||||
}
|
||||
|
||||
// Call the audio callback with proper format info
|
||||
if (self.audioCallback) {
|
||||
// Pass raw PCM data - the Rust side will handle conversion based on format
|
||||
self.audioCallback((const uint8_t *)dataPointer, (uint32_t)totalLength, isMic);
|
||||
}
|
||||
}
|
||||
|
||||
#pragma mark - RPScreenRecorderDelegate
|
||||
|
||||
- (void)screenRecorderDidChangeAvailability:(RPScreenRecorder *)screenRecorder {
|
||||
NSLog(@"Screen recorder availability changed: %@", screenRecorder.isAvailable ? @"Available" : @"Not available");
|
||||
}
|
||||
|
||||
- (void)screenRecorder:(RPScreenRecorder *)screenRecorder didStopRecordingWithPreviewViewController:(RPPreviewViewController *)previewViewController error:(NSError *)error {
|
||||
self.isCapturing = NO;
|
||||
if (error) {
|
||||
NSLog(@"Recording stopped with error: %@", error.localizedDescription);
|
||||
}
|
||||
}
|
||||
|
||||
@end
|
||||
|
||||
// C interface implementation
|
||||
|
||||
void ios_capture_init(void) {
|
||||
[ScreenCaptureHandler sharedInstance];
|
||||
}
|
||||
|
||||
bool ios_capture_start(void) {
|
||||
return [[ScreenCaptureHandler sharedInstance] startCapture];
|
||||
}
|
||||
|
||||
void ios_capture_stop(void) {
|
||||
[[ScreenCaptureHandler sharedInstance] stopCapture];
|
||||
}
|
||||
|
||||
bool ios_capture_is_active(void) {
|
||||
return [ScreenCaptureHandler sharedInstance].isCapturing;
|
||||
}
|
||||
|
||||
uint32_t ios_capture_get_frame(uint8_t* buffer, uint32_t buffer_size,
|
||||
uint32_t* out_width, uint32_t* out_height) {
|
||||
ScreenCaptureHandler *handler = [ScreenCaptureHandler sharedInstance];
|
||||
|
||||
@synchronized(handler.frameBuffer) {
|
||||
if (handler.frameBuffer.length == 0 || handler.lastFrameSize.width == 0) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
uint32_t width = (uint32_t)handler.lastFrameSize.width;
|
||||
uint32_t height = (uint32_t)handler.lastFrameSize.height;
|
||||
uint32_t frameSize = width * height * 4;
|
||||
|
||||
if (buffer_size < frameSize) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
memcpy(buffer, handler.frameBuffer.bytes, frameSize);
|
||||
|
||||
if (out_width) *out_width = width;
|
||||
if (out_height) *out_height = height;
|
||||
|
||||
return frameSize;
|
||||
}
|
||||
}
|
||||
|
||||
void ios_capture_get_display_info(uint32_t* width, uint32_t* height) {
|
||||
UIScreen *mainScreen = [UIScreen mainScreen];
|
||||
CGFloat scale = mainScreen.scale;
|
||||
CGSize screenSize = mainScreen.bounds.size;
|
||||
|
||||
if (width) *width = (uint32_t)(screenSize.width * scale);
|
||||
if (height) *height = (uint32_t)(screenSize.height * scale);
|
||||
}
|
||||
|
||||
void ios_capture_set_callback(frame_callback_t callback) {
|
||||
[ScreenCaptureHandler sharedInstance].frameCallback = callback;
|
||||
}
|
||||
|
||||
void ios_capture_show_broadcast_picker(void) {
|
||||
dispatch_async(dispatch_get_main_queue(), ^{
|
||||
if (@available(iOS 12.0, *)) {
|
||||
RPSystemBroadcastPickerView *picker = [[RPSystemBroadcastPickerView alloc] init];
|
||||
picker.preferredExtension = @"com.carriez.rustdesk.BroadcastExtension";
|
||||
picker.showsMicrophoneButton = NO;
|
||||
|
||||
// Add to current window temporarily
|
||||
UIWindow *window = UIApplication.sharedApplication.windows.firstObject;
|
||||
if (window) {
|
||||
picker.frame = CGRectMake(-100, -100, 100, 100);
|
||||
[window addSubview:picker];
|
||||
|
||||
// Programmatically tap the button
|
||||
dispatch_after(dispatch_time(DISPATCH_TIME_NOW, (int64_t)(0.1 * NSEC_PER_SEC)), dispatch_get_main_queue(), ^{
|
||||
for (UIView *subview in picker.subviews) {
|
||||
if ([subview isKindOfClass:[UIButton class]]) {
|
||||
[(UIButton *)subview sendActionsForControlEvents:UIControlEventTouchUpInside];
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
// Remove after a delay
|
||||
dispatch_after(dispatch_time(DISPATCH_TIME_NOW, (int64_t)(1.0 * NSEC_PER_SEC)), dispatch_get_main_queue(), ^{
|
||||
[picker removeFromSuperview];
|
||||
});
|
||||
});
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
bool ios_capture_is_broadcasting(void) {
|
||||
return [ScreenCaptureHandler sharedInstance].isBroadcasting;
|
||||
}
|
||||
|
||||
void ios_capture_set_audio_enabled(bool enable_mic, bool enable_app_audio) {
|
||||
ScreenCaptureHandler *handler = [ScreenCaptureHandler sharedInstance];
|
||||
handler.enableMicAudio = enable_mic;
|
||||
handler.enableAppAudio = enable_app_audio;
|
||||
}
|
||||
|
||||
void ios_capture_set_audio_callback(audio_callback_t callback) {
|
||||
[ScreenCaptureHandler sharedInstance].audioCallback = callback;
|
||||
}
|
||||
Reference in New Issue
Block a user