Skip to content

Commit

Permalink
fix: Camera shader issue for YUYV
Browse files Browse the repository at this point in the history
  • Loading branch information
richiemcilroy committed Jan 7, 2025
1 parent 1637274 commit 47570a1
Show file tree
Hide file tree
Showing 4 changed files with 195 additions and 56 deletions.
4 changes: 2 additions & 2 deletions apps/desktop/src-tauri/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -912,8 +912,8 @@ async fn copy_video_to_clipboard(

#[derive(Serialize, Deserialize, specta::Type)]
pub struct VideoRecordingMetadata {
duration: f64,
size: f64,
pub duration: f64,
pub size: f64,
}

#[tauri::command]
Expand Down
6 changes: 6 additions & 0 deletions crates/gpu-converters/src/yuyv_rgba/shader.wgsl
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
// Each texel stores two Y’s => two output RGBA pixels.
let out_coords = gid.xy * vec2<u32>(2, 1);

textureStore(rgba_output, out_coords, vec4<f32>(r1, g1, b1, 1.0));
textureStore(rgba_output, out_coords + vec2<u32>(1, 0), vec4<f32>(r2, g2, b2, 1.0));
}
35 changes: 35 additions & 0 deletions crates/media/src/data/video.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,35 @@
pub struct FFVideo(ffmpeg::frame::Video);

impl FFVideo {
pub fn new(format: ffmpeg::format::Pixel, width: u32, height: u32) -> Self {
Self(ffmpeg::frame::Video::new(format, width, height))
}

pub fn data(&self, index: usize) -> &[u8] {
self.0.data(index)
}

pub fn data_mut(&mut self, index: usize) -> &mut [u8] {
self.0.data_mut(index)
}

pub fn stride(&self, index: usize) -> usize {
self.0.stride(index)
}

pub fn as_mut_ptr(&mut self) -> *mut ffmpeg_sys_next::AVFrame {
self.0.as_mut_ptr()
}

pub fn as_ptr(&self) -> *const ffmpeg_sys_next::AVFrame {
self.0.as_ptr()
}

pub fn convert_with(
&mut self,
context: &mut ffmpeg::software::scaling::Context,
output: &mut FFVideo,
) -> Result<(), ffmpeg::Error> {
unsafe { context.run(&self.0, &mut output.0) }
}
}
206 changes: 152 additions & 54 deletions crates/media/src/feeds/camera.rs
Original file line number Diff line number Diff line change
Expand Up @@ -84,7 +84,7 @@ impl CameraFeed {
.into_iter()
.map(|i| i.human_name().to_string())
.collect::<Vec<String>>(),
Err(_) => Vec::new()
Err(_) => Vec::new(),
}
}

Expand Down Expand Up @@ -144,7 +144,7 @@ fn create_camera(info: &CameraInfo) -> Result<Camera, MediaError> {
RequestedFormatType::AbsoluteHighestFrameRate,
&[FrameFormat::YUYV],
);

#[cfg(feature = "debug-logging")]
trace!("Requested camera format: {:?}", format);

Expand All @@ -160,10 +160,10 @@ fn create_camera(info: &CameraInfo) -> Result<Camera, MediaError> {
}

let camera = Camera::new(index, format)?;

#[cfg(feature = "debug-logging")]
debug!("Created camera with format: {:?}", camera.camera_format());

Ok(camera)
}

Expand Down Expand Up @@ -226,7 +226,7 @@ fn run_camera_feed(
loop {
match control.try_recv() {
Err(TryRecvError::Disconnected) => break,
Err(TryRecvError::Empty) => {},
Err(TryRecvError::Empty) => {}
Ok(CameraControl::Shutdown) => break,
Ok(CameraControl::AttachRawConsumer(rgba_sender)) => {
maybe_raw_data = Some(rgba_sender);
Expand Down Expand Up @@ -364,68 +364,126 @@ impl FrameConverter {
let context = ffmpeg::software::converter(
(video_info.width, video_info.height),
if camera_format.format() == FrameFormat::YUYV {
ffmpeg::format::Pixel::YUYV422
ffmpeg::format::Pixel::UYVY422
} else {
video_info.pixel_format
},
ffmpeg::format::Pixel::RGBA,
).unwrap();
)
.unwrap();

Self {
video_info,
context,
format: camera_format.format(),
hw_converter: None, // Don't use hardware converters
hw_converter: None, // Don't use hardware converters
}
}

fn rgba(&mut self, buffer: &nokhwa::Buffer) -> Vec<u8> {
let resolution = buffer.resolution();

match self.format {
FrameFormat::YUYV => {
self.convert_with_ffmpeg(buffer, resolution)
}
_ => {
match &self.hw_converter {
Some(HwConverter::NV12(converter)) => {
converter.convert(
NV12Input::from_buffer(buffer.buffer(), resolution.width(), resolution.height()),
resolution.width(),
resolution.height(),
)
}
_ => {
self.convert_with_ffmpeg(buffer, resolution)
}
}
}
FrameFormat::YUYV => self.convert_with_ffmpeg(buffer, resolution),
_ => match &self.hw_converter {
Some(HwConverter::NV12(converter)) => converter.convert(
NV12Input::from_buffer(
buffer.buffer(),
resolution.width(),
resolution.height(),
),
resolution.width(),
resolution.height(),
),
_ => self.convert_with_ffmpeg(buffer, resolution),
},
}
}

fn convert_with_ffmpeg(&mut self, buffer: &nokhwa::Buffer, resolution: Resolution) -> Vec<u8> {
if self.format == FrameFormat::YUYV {
// For YUYV, we need to handle the conversion differently
let stride = resolution.width() as usize * 2; // YUYV uses 2 bytes per pixel

// Create input frame with YUYV format
let mut input_frame = FFVideo::new(ffmpeg::format::Pixel::YUYV422, resolution.width(), resolution.height());
input_frame.data_mut(0).copy_from_slice(buffer.buffer());

// Convert directly to RGBA
let mut rgba_frame = FFVideo::new(ffmpeg::format::Pixel::RGBA, resolution.width(), resolution.height());

let stride = resolution.width() as usize * 2; // YUYV uses 2 bytes per pixel
let src = buffer.buffer();

// Create input frame with YUYV format and copy data
let mut input_frame = FFVideo::new(
ffmpeg::format::Pixel::UYVY422,
resolution.width(),
resolution.height(),
);

// Copy data line by line
{
let dst_stride = input_frame.stride(0);
let dst = input_frame.data_mut(0);
for y in 0..resolution.height() as usize {
let src_offset = y * stride;
let dst_offset = y * dst_stride;
dst[dst_offset..dst_offset + stride]
.copy_from_slice(&src[src_offset..src_offset + stride]);
}
}

// Create output frame
let mut rgba_frame = FFVideo::new(
ffmpeg::format::Pixel::RGBA,
resolution.width(),
resolution.height(),
);

// Convert the frame
if self.context.run(&input_frame, &mut rgba_frame).is_ok() {
rgba_frame.data(0).to_vec()
} else {
vec![0; (resolution.width() * resolution.height() * 4) as usize]
}
} else {
// For other formats, use the normal conversion path
let stride = resolution.width() as usize * 4; // RGBA uses 4 bytes per pixel
let input_frame = self.video_info.wrap_frame(buffer.buffer(), 0, stride);

let mut rgba_frame = FFVideo::empty();
let stride = match self.format {
FrameFormat::NV12 => resolution.width() as usize,
FrameFormat::BGRA => resolution.width() as usize * 4,
FrameFormat::MJPEG => resolution.width() as usize * 4,
FrameFormat::GRAY => resolution.width() as usize,
FrameFormat::RAWRGB => resolution.width() as usize * 3,
_ => buffer.buffer_bytes().len() / resolution.height() as usize,
};

// Create input frame and copy data
let mut input_frame = FFVideo::new(
match self.format {
FrameFormat::NV12 => ffmpeg::format::Pixel::NV12,
FrameFormat::BGRA => ffmpeg::format::Pixel::BGRA,
FrameFormat::MJPEG => ffmpeg::format::Pixel::RGBA,
FrameFormat::GRAY => ffmpeg::format::Pixel::GRAY8,
FrameFormat::RAWRGB => ffmpeg::format::Pixel::RGB24,
_ => ffmpeg::format::Pixel::RGBA,
},
resolution.width(),
resolution.height(),
);

// Copy data line by line
{
let dst_stride = input_frame.stride(0);
let dst = input_frame.data_mut(0);
let src = buffer.buffer();
for y in 0..resolution.height() as usize {
let src_offset = y * stride;
let dst_offset = y * dst_stride;
dst[dst_offset..dst_offset + stride]
.copy_from_slice(&src[src_offset..src_offset + stride]);
}
}

// Create output frame
let mut rgba_frame = FFVideo::new(
ffmpeg::format::Pixel::RGBA,
resolution.width(),
resolution.height(),
);

// Convert the frame
if self.context.run(&input_frame, &mut rgba_frame).is_ok() {
rgba_frame.data(0).to_vec()
} else {
Expand All @@ -436,31 +494,71 @@ impl FrameConverter {

fn raw(&mut self, buffer: &nokhwa::Buffer) -> FFVideo {
let resolution = buffer.resolution();

if self.format == FrameFormat::YUYV {
// For YUYV, we need to handle the conversion differently
let stride = resolution.width() as usize * 2; // YUYV uses 2 bytes per pixel

// Create input frame with YUYV format
let mut input_frame = FFVideo::new(ffmpeg::format::Pixel::YUYV422, resolution.width(), resolution.height());
input_frame.data_mut(0).copy_from_slice(buffer.buffer());
let stride = resolution.width() as usize * 2; // YUYV uses 2 bytes per pixel
let src = buffer.buffer();

// Create input frame with YUYV format and copy data
let mut input_frame = FFVideo::new(
ffmpeg::format::Pixel::UYVY422,
resolution.width(),
resolution.height(),
);

// Copy data line by line
{
let dst_stride = input_frame.stride(0);
let dst = input_frame.data_mut(0);
for y in 0..resolution.height() as usize {
let src_offset = y * stride;
let dst_offset = y * dst_stride;
dst[dst_offset..dst_offset + stride]
.copy_from_slice(&src[src_offset..src_offset + stride]);
}
}

input_frame
} else {
// For other formats, use the normal conversion path
let stride = match self.format {
FrameFormat::NV12 => resolution.width() as usize, // 1 byte per pixel for Y plane
FrameFormat::BGRA => resolution.width() as usize * 4, // 4 bytes per pixel
FrameFormat::MJPEG => resolution.width() as usize * 4, // 4 bytes per pixel
FrameFormat::GRAY => resolution.width() as usize, // 1 byte per pixel
FrameFormat::RAWRGB => resolution.width() as usize * 3, // 3 bytes per pixel
FrameFormat::NV12 => resolution.width() as usize,
FrameFormat::BGRA => resolution.width() as usize * 4,
FrameFormat::MJPEG => resolution.width() as usize * 4,
FrameFormat::GRAY => resolution.width() as usize,
FrameFormat::RAWRGB => resolution.width() as usize * 3,
_ => buffer.buffer_bytes().len() / resolution.height() as usize,
};

self.video_info.wrap_frame(
buffer.buffer(),
0,
stride,
)
// Create input frame and copy data
let mut input_frame = FFVideo::new(
match self.format {
FrameFormat::NV12 => ffmpeg::format::Pixel::NV12,
FrameFormat::BGRA => ffmpeg::format::Pixel::BGRA,
FrameFormat::MJPEG => ffmpeg::format::Pixel::RGBA,
FrameFormat::GRAY => ffmpeg::format::Pixel::GRAY8,
FrameFormat::RAWRGB => ffmpeg::format::Pixel::RGB24,
_ => ffmpeg::format::Pixel::RGBA,
},
resolution.width(),
resolution.height(),
);

// Copy data line by line
{
let dst_stride = input_frame.stride(0);
let dst = input_frame.data_mut(0);
let src = buffer.buffer();
for y in 0..resolution.height() as usize {
let src_offset = y * stride;
let dst_offset = y * dst_stride;
dst[dst_offset..dst_offset + stride]
.copy_from_slice(&src[src_offset..src_offset + stride]);
}
}

input_frame
}
}
}
Expand Down

1 comment on commit 47570a1

@vercel
Copy link

@vercel vercel bot commented on 47570a1 Jan 7, 2025

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Please sign in to comment.