From fe92b384fd2e07196c89563fbf2e159172c4cc33 Mon Sep 17 00:00:00 2001 From: David Anyatonwu Date: Fri, 27 Dec 2024 22:19:15 +0100 Subject: [PATCH 1/3] fix(export): prevent export progress from freezing at 102% Signed-off-by: David Anyatonwu --- apps/desktop/src-tauri/src/export.rs | 8 +++++--- apps/desktop/src/routes/editor/Header.tsx | 16 ++++++++++++++-- apps/desktop/src/routes/recordings-overlay.tsx | 16 ++++++++++++++-- 3 files changed, 33 insertions(+), 7 deletions(-) diff --git a/apps/desktop/src-tauri/src/export.rs b/apps/desktop/src-tauri/src/export.rs index fc022ef4..6c130094 100644 --- a/apps/desktop/src-tauri/src/export.rs +++ b/apps/desktop/src-tauri/src/export.rs @@ -21,8 +21,8 @@ pub async fn export_video( .await .unwrap(); - // 30 FPS (calculated for output video) - let total_frames = (duration * 30.0).round() as u32; + // Calculate total frames with ceiling to ensure we don't exceed 100% + let total_frames = ((duration * 30.0).ceil() as u32).max(1); let editor_instance = upsert_editor_instance(&app, video_id.clone()).await; @@ -41,9 +41,11 @@ pub async fn export_video( project, output_path.clone(), move |frame_index| { + // Ensure progress never exceeds total frames + let current_frame = (frame_index + 1).min(total_frames); progress .send(RenderProgress::FrameRendered { - current_frame: frame_index + 1, + current_frame, }) .ok(); }, diff --git a/apps/desktop/src/routes/editor/Header.tsx b/apps/desktop/src/routes/editor/Header.tsx index 2742f59b..f2b14cbf 100644 --- a/apps/desktop/src/routes/editor/Header.tsx +++ b/apps/desktop/src/routes/editor/Header.tsx @@ -260,14 +260,26 @@ function ExportButton() { const progress = new Channel(); progress.onmessage = (p) => { if (p.type === "FrameRendered" && progressState.type === "saving") { - const percentComplete = Math.round( - (p.current_frame / (progressState.totalFrames || 1)) * 100 + const percentComplete = Math.min( + Math.round( + (p.current_frame / (progressState.totalFrames || 1)) * 100 + ), + 100 ); + setProgressState({ ...progressState, renderProgress: p.current_frame, message: `Rendering video - ${percentComplete}%`, }); + + // If rendering is complete, update the message + if (percentComplete === 100) { + setProgressState({ + ...progressState, + message: "Finalizing export...", + }); + } } if ( p.type === "EstimatedTotalFrames" && diff --git a/apps/desktop/src/routes/recordings-overlay.tsx b/apps/desktop/src/routes/recordings-overlay.tsx index b0a6b2d6..83a1d636 100644 --- a/apps/desktop/src/routes/recordings-overlay.tsx +++ b/apps/desktop/src/routes/recordings-overlay.tsx @@ -375,14 +375,26 @@ export default function () { undefined && progressState.totalFrames ) { - return `${Math.min( + const progress = Math.min( Math.round( (progressState.renderProgress / progressState.totalFrames) * 100 ), 100 - )}%`; + ); + + // If we hit 100%, transition to the next stage + if (progress === 100 && progressState.type === "uploading") { + setProgressState({ + ...progressState, + stage: "uploading", + message: "Starting upload...", + uploadProgress: 0 + }); + } + + return `${progress}%`; } return progressState.message; From 173abf95cb630cd4833fa8091ea3ff0477e8d6fc Mon Sep 17 00:00:00 2001 From: David Anyatonwu Date: Sat, 28 Dec 2024 08:51:52 +0100 Subject: [PATCH 2/3] fix(export): check pro access before export and fix progress UI Signed-off-by: David Anyatonwu --- apps/desktop/src-tauri/src/export.rs | 36 +++++++++------ apps/desktop/src/routes/editor/Header.tsx | 56 +++++++++++++---------- 2 files changed, 54 insertions(+), 38 deletions(-) diff --git a/apps/desktop/src-tauri/src/export.rs b/apps/desktop/src-tauri/src/export.rs index 6c130094..44468bd3 100644 --- a/apps/desktop/src-tauri/src/export.rs +++ b/apps/desktop/src-tauri/src/export.rs @@ -16,10 +16,15 @@ pub async fn export_video( force: bool, use_custom_muxer: bool, ) -> Result { - let VideoRecordingMetadata { duration, .. } = - get_video_metadata(app.clone(), video_id.clone(), Some(VideoType::Screen)) - .await - .unwrap(); + let metadata = match get_video_metadata(app.clone(), video_id.clone(), Some(VideoType::Screen)).await { + Ok(meta) => meta, + Err(e) => { + sentry::capture_message(&format!("Failed to get video metadata: {}", e), sentry::Level::Error); + return Err("Failed to read video metadata. The recording may be from an incompatible version.".to_string()); + } + }; + + let VideoRecordingMetadata { duration, .. } = metadata; // Calculate total frames with ceiling to ensure we don't exceed 100% let total_frames = ((duration * 30.0).ceil() as u32).max(1); @@ -28,7 +33,7 @@ pub async fn export_video( let output_path = editor_instance.meta().output_path(); - // If the file exists, return it immediately + // If the file exists and we're not forcing a re-render, return it if output_path.exists() && !force { return Ok(output_path); } @@ -59,17 +64,20 @@ pub async fn export_video( e.to_string() })?; - if use_custom_muxer { + let result = if use_custom_muxer { exporter.export_with_custom_muxer().await } else { exporter.export_with_ffmpeg_cli().await - } - .map_err(|e| { - sentry::capture_message(&e.to_string(), sentry::Level::Error); - e.to_string() - })?; + }; - ShowCapWindow::PrevRecordings.show(&app).ok(); - - Ok(output_path) + match result { + Ok(_) => { + ShowCapWindow::PrevRecordings.show(&app).ok(); + Ok(output_path) + } + Err(e) => { + sentry::capture_message(&e.to_string(), sentry::Level::Error); + Err(e.to_string()) + } + } } diff --git a/apps/desktop/src/routes/editor/Header.tsx b/apps/desktop/src/routes/editor/Header.tsx index f2b14cbf..0d67ce12 100644 --- a/apps/desktop/src/routes/editor/Header.tsx +++ b/apps/desktop/src/routes/editor/Header.tsx @@ -235,6 +235,7 @@ import { save } from "@tauri-apps/plugin-dialog"; import { DEFAULT_PROJECT_CONFIG } from "./projectConfig"; import { createMutation } from "@tanstack/solid-query"; import { getRequestEvent } from "solid-js/web"; +import { checkIsUpgradedAndUpdate } from "~/utils/plans"; function ExportButton() { const { videoId, project, prettyName } = useEditorContext(); @@ -261,9 +262,7 @@ function ExportButton() { progress.onmessage = (p) => { if (p.type === "FrameRendered" && progressState.type === "saving") { const percentComplete = Math.min( - Math.round( - (p.current_frame / (progressState.totalFrames || 1)) * 100 - ), + Math.round((p.current_frame / (progressState.totalFrames || 1)) * 100), 100 ); @@ -273,7 +272,7 @@ function ExportButton() { message: `Rendering video - ${percentComplete}%`, }); - // If rendering is complete, update the message + // If rendering is complete, update to finalizing state if (percentComplete === 100) { setProgressState({ ...progressState, @@ -281,10 +280,7 @@ function ExportButton() { }); } } - if ( - p.type === "EstimatedTotalFrames" && - progressState.type === "saving" - ) { + if (p.type === "EstimatedTotalFrames" && progressState.type === "saving") { setProgressState({ ...progressState, totalFrames: p.total_frames, @@ -293,25 +289,30 @@ function ExportButton() { } }; - const videoPath = await commands.exportVideo( - videoId, - project, - progress, - true, - useCustomMuxer - ); - await commands.copyFileToPath(videoPath, path); + try { + const videoPath = await commands.exportVideo( + videoId, + project, + progress, + true, + useCustomMuxer + ); + await commands.copyFileToPath(videoPath, path); - setProgressState({ - type: "saving", - progress: 100, - message: "Saved successfully!", - mediaPath: path, - }); + setProgressState({ + type: "saving", + progress: 100, + message: "Saved successfully!", + mediaPath: path, + }); - setTimeout(() => { + setTimeout(() => { + setProgressState({ type: "idle" }); + }, 1500); + } catch (error) { setProgressState({ type: "idle" }); - }, 1500); + throw error; + } }, })); @@ -342,6 +343,13 @@ function ShareButton() { throw new Error("Recording metadata not available"); } + // Check for pro access first before starting the export + const isUpgraded = await checkIsUpgradedAndUpdate(); + if (!isUpgraded) { + await commands.showWindow("Upgrade"); + throw new Error("Upgrade required to share recordings"); + } + let unlisten: (() => void) | undefined; try { From 1e91c64e71133d88946c47c3f5fbc14d9cac13b7 Mon Sep 17 00:00:00 2001 From: Richie McIlroy <33632126+richiemcilroy@users.noreply.github.com> Date: Mon, 6 Jan 2025 16:20:36 +0000 Subject: [PATCH 3/3] feat: Rendering fix for mismatch camera/display files --- apps/desktop/src-tauri/src/export.rs | 51 +++-- apps/desktop/src-tauri/src/lib.rs | 14 +- apps/desktop/src/routes/editor/Header.tsx | 11 +- apps/desktop/src/utils/tauri.ts | 2 +- crates/editor/src/editor.rs | 26 ++- crates/editor/src/editor_instance.rs | 6 +- crates/project/src/meta.rs | 12 ++ crates/rendering/src/decoder.rs | 214 +++++++++++++++---- crates/rendering/src/lib.rs | 233 ++++++++++++++++----- crates/rendering/src/project_recordings.rs | 44 ++-- 10 files changed, 485 insertions(+), 128 deletions(-) diff --git a/apps/desktop/src-tauri/src/export.rs b/apps/desktop/src-tauri/src/export.rs index 44468bd3..abcfb7b3 100644 --- a/apps/desktop/src-tauri/src/export.rs +++ b/apps/desktop/src-tauri/src/export.rs @@ -16,15 +16,33 @@ pub async fn export_video( force: bool, use_custom_muxer: bool, ) -> Result { - let metadata = match get_video_metadata(app.clone(), video_id.clone(), Some(VideoType::Screen)).await { - Ok(meta) => meta, - Err(e) => { - sentry::capture_message(&format!("Failed to get video metadata: {}", e), sentry::Level::Error); - return Err("Failed to read video metadata. The recording may be from an incompatible version.".to_string()); - } - }; + let screen_metadata = + match get_video_metadata(app.clone(), video_id.clone(), Some(VideoType::Screen)).await { + Ok(meta) => meta, + Err(e) => { + sentry::capture_message( + &format!("Failed to get video metadata: {}", e), + sentry::Level::Error, + ); + return Err( + "Failed to read video metadata. The recording may be from an incompatible version." + .to_string(), + ); + } + }; + + // Get camera metadata if it exists + let camera_metadata = + get_video_metadata(app.clone(), video_id.clone(), Some(VideoType::Camera)) + .await + .ok(); - let VideoRecordingMetadata { duration, .. } = metadata; + // Use the longer duration between screen and camera + let duration = screen_metadata.duration.max( + camera_metadata + .map(|m| m.duration) + .unwrap_or(screen_metadata.duration), + ); // Calculate total frames with ceiling to ensure we don't exceed 100% let total_frames = ((duration * 30.0).ceil() as u32).max(1); @@ -42,16 +60,25 @@ pub async fn export_video( .send(RenderProgress::EstimatedTotalFrames { total_frames }) .ok(); + // Create a modified project configuration that accounts for different video lengths + let mut modified_project = project.clone(); + if let Some(timeline) = &mut modified_project.timeline { + // Ensure timeline duration matches the longest video + for segment in timeline.segments.iter_mut() { + if segment.end > duration { + segment.end = duration; + } + } + } + let exporter = cap_export::Exporter::new( - project, + modified_project, output_path.clone(), move |frame_index| { // Ensure progress never exceeds total frames let current_frame = (frame_index + 1).min(total_frames); progress - .send(RenderProgress::FrameRendered { - current_frame, - }) + .send(RenderProgress::FrameRendered { current_frame }) .ok(); }, editor_instance.project_path.clone(), diff --git a/apps/desktop/src-tauri/src/lib.rs b/apps/desktop/src-tauri/src/lib.rs index c2a8afae..d81778c9 100644 --- a/apps/desktop/src-tauri/src/lib.rs +++ b/apps/desktop/src-tauri/src/lib.rs @@ -88,6 +88,7 @@ pub struct App { pub enum VideoType { Screen, Output, + Camera, } #[derive(Serialize, Deserialize, specta::Type)] @@ -952,6 +953,17 @@ async fn get_video_metadata( let paths = match video_type { Some(VideoType::Screen) => content_paths(&project_path, &meta), + Some(VideoType::Camera) => match &meta.content { + Content::SingleSegment { segment } => segment + .camera + .as_ref() + .map_or(vec![], |c| vec![segment.path(&meta, &c.path)]), + Content::MultipleSegments { inner } => inner + .segments + .iter() + .filter_map(|s| s.camera.as_ref().map(|c| inner.path(&meta, &c.path))) + .collect(), + }, Some(VideoType::Output) | None => { let output_video_path = project_path.join("output").join("result.mp4"); println!("Using output video path: {:?}", output_video_path); @@ -1039,7 +1051,7 @@ fn focus_captures_panel(app: AppHandle) { #[derive(Serialize, Deserialize, specta::Type, Clone)] #[serde(tag = "type")] -enum RenderProgress { +pub enum RenderProgress { Starting { total_frames: u32 }, EstimatedTotalFrames { total_frames: u32 }, FrameRendered { current_frame: u32 }, diff --git a/apps/desktop/src/routes/editor/Header.tsx b/apps/desktop/src/routes/editor/Header.tsx index 0d67ce12..66f3537c 100644 --- a/apps/desktop/src/routes/editor/Header.tsx +++ b/apps/desktop/src/routes/editor/Header.tsx @@ -262,10 +262,12 @@ function ExportButton() { progress.onmessage = (p) => { if (p.type === "FrameRendered" && progressState.type === "saving") { const percentComplete = Math.min( - Math.round((p.current_frame / (progressState.totalFrames || 1)) * 100), + Math.round( + (p.current_frame / (progressState.totalFrames || 1)) * 100 + ), 100 ); - + setProgressState({ ...progressState, renderProgress: p.current_frame, @@ -280,7 +282,10 @@ function ExportButton() { }); } } - if (p.type === "EstimatedTotalFrames" && progressState.type === "saving") { + if ( + p.type === "EstimatedTotalFrames" && + progressState.type === "saving" + ) { setProgressState({ ...progressState, totalFrames: p.total_frames, diff --git a/apps/desktop/src/utils/tauri.ts b/apps/desktop/src/utils/tauri.ts index a8b14ca3..d669bc47 100644 --- a/apps/desktop/src/utils/tauri.ts +++ b/apps/desktop/src/utils/tauri.ts @@ -284,7 +284,7 @@ export type UploadProgress = { stage: string; progress: number; message: string export type UploadResult = { Success: string } | "NotAuthenticated" | "PlanCheckFailed" | "UpgradeRequired" export type Video = { duration: number; width: number; height: number } export type VideoRecordingMetadata = { duration: number; size: number } -export type VideoType = "screen" | "output" +export type VideoType = "screen" | "output" | "camera" export type XY = { x: T; y: T } export type ZoomMode = "auto" | { manual: { x: number; y: number } } export type ZoomSegment = { start: number; end: number; amount: number; mode: ZoomMode } diff --git a/crates/editor/src/editor.rs b/crates/editor/src/editor.rs index 6bc463cc..dbdd2ebb 100644 --- a/crates/editor/src/editor.rs +++ b/crates/editor/src/editor.rs @@ -1,8 +1,10 @@ use std::{sync::Arc, time::Instant}; use cap_media::frame_ws::WSFrame; -use cap_project::{BackgroundSource, ProjectConfiguration}; -use cap_rendering::{decoder::DecodedFrame, produce_frame, ProjectUniforms, RenderVideoConstants}; +use cap_project::{BackgroundSource, ProjectConfiguration, RecordingMeta}; +use cap_rendering::{ + decoder::DecodedFrame, produce_frame, ProjectRecordings, ProjectUniforms, RenderVideoConstants, +}; use tokio::{ sync::{mpsc, oneshot}, task::JoinHandle, @@ -26,6 +28,7 @@ pub struct Renderer { rx: mpsc::Receiver, frame_tx: flume::Sender, render_constants: Arc, + total_frames: u32, } pub struct RendererHandle { @@ -36,13 +39,28 @@ impl Renderer { pub fn spawn( render_constants: Arc, frame_tx: flume::Sender, + meta: &RecordingMeta, ) -> RendererHandle { + let recordings = ProjectRecordings::new(meta); + let mut max_duration = recordings.duration(); + + // Check camera duration if it exists + if let Some(camera_path) = meta.content.camera_path() { + if let Ok(camera_duration) = recordings.get_source_duration(&camera_path) { + max_duration = max_duration.max(camera_duration); + } + } + + let total_frames = (30_f64 * max_duration).ceil() as u32; + println!("Editor total frames: {total_frames}"); + let (tx, rx) = mpsc::channel(4); let this = Self { rx, frame_tx, render_constants, + total_frames, }; tokio::spawn(this.run()); @@ -61,7 +79,7 @@ impl Renderer { camera_frame, background, uniforms, - time, // Add this + time, finished, } => { if let Some(task) = frame_task.as_ref() { @@ -74,6 +92,7 @@ impl Renderer { let render_constants = self.render_constants.clone(); let frame_tx = self.frame_tx.clone(); + let total_frames = self.total_frames; frame_task = Some(tokio::spawn(async move { let frame = produce_frame( @@ -83,6 +102,7 @@ impl Renderer { cap_rendering::Background::from(background), &uniforms, time, + total_frames, ) .await .unwrap(); diff --git a/crates/editor/src/editor_instance.rs b/crates/editor/src/editor_instance.rs index e45084ba..e3a3cdf8 100644 --- a/crates/editor/src/editor_instance.rs +++ b/crates/editor/src/editor_instance.rs @@ -86,7 +86,11 @@ impl EditorInstance { .unwrap(), ); - let renderer = Arc::new(editor::Renderer::spawn(render_constants.clone(), frame_tx)); + let renderer = Arc::new(editor::Renderer::spawn( + render_constants.clone(), + frame_tx, + &meta, + )); let (preview_tx, preview_rx) = watch::channel(None); diff --git a/crates/project/src/meta.rs b/crates/project/src/meta.rs index f13cca56..7a1da07c 100644 --- a/crates/project/src/meta.rs +++ b/crates/project/src/meta.rs @@ -82,6 +82,18 @@ pub enum Content { }, } +impl Content { + pub fn camera_path(&self) -> Option { + match self { + Content::SingleSegment { segment } => segment.camera.as_ref().map(|c| c.path.clone()), + Content::MultipleSegments { inner } => inner + .segments + .first() + .and_then(|s| s.camera.as_ref().map(|c| c.path.clone())), + } + } +} + #[derive(Debug, Clone, Serialize, Deserialize, Type)] pub struct SingleSegment { pub display: Display, diff --git a/crates/rendering/src/decoder.rs b/crates/rendering/src/decoder.rs index 30f0f93b..0e869f7e 100644 --- a/crates/rendering/src/decoder.rs +++ b/crates/rendering/src/decoder.rs @@ -1,7 +1,7 @@ use std::{ collections::BTreeMap, path::PathBuf, - sync::{mpsc, Arc}, + sync::{mpsc, Arc, Mutex}, }; use ffmpeg::{ @@ -179,6 +179,7 @@ impl AsyncVideoDecoder { let mut last_decoded_frame = None::; let mut last_sent_frame = None::<(u32, DecodedFrame)>; + let mut reached_end = false; let mut peekable_requests = PeekableReceiver { rx, peeked: None }; @@ -187,6 +188,14 @@ impl AsyncVideoDecoder { while let Ok(r) = peekable_requests.recv() { match r { VideoDecoderMessage::GetFrame(requested_frame, sender) => { + // If we've already reached the end and have a last frame, return it + if reached_end { + if let Some((_, last_frame)) = &last_sent_frame { + sender.send(Some(last_frame.clone())).ok(); + continue; + } + } + let mut sender = if let Some(cached) = cache.get_mut(&requested_frame) { let data = cached.process(&mut scaler_input_format, &mut scaler, &decoder); @@ -201,30 +210,95 @@ impl AsyncVideoDecoder { let cache_min = requested_frame.saturating_sub(FRAME_CACHE_SIZE as u32 / 2); let cache_max = requested_frame + FRAME_CACHE_SIZE as u32 / 2; - if requested_frame <= 0 - || last_sent_frame - .as_ref() - .map(|last| { - requested_frame < last.0 || - // seek forward for big jumps. this threshold is arbitrary but should be derived from i-frames in future - requested_frame - last.0 > FRAME_CACHE_SIZE as u32 + if cache.len() >= FRAME_CACHE_SIZE { + // When cache is full, remove old frames that are far from the requested frame + let frames_to_remove: Vec<_> = cache + .keys() + .filter(|&&k| { + // Keep frames within a window of the requested frame + let distance = if k <= requested_frame { + requested_frame - k + } else { + k - requested_frame + }; + // Remove frames that are more than half the cache size away + distance > FRAME_CACHE_SIZE as u32 / 2 }) - .unwrap_or(true) + .copied() + .collect(); + + for frame in frames_to_remove { + println!( + "Removing old frame {} from cache (requested_frame: {})", + frame, requested_frame + ); + cache.remove(&frame); + } + + // If we still need to remove frames, remove the ones furthest from the requested frame + if cache.len() >= FRAME_CACHE_SIZE { + let frame_to_remove = cache + .keys() + .max_by_key(|&&k| { + if k <= requested_frame { + requested_frame - k + } else { + k - requested_frame + } + }) + .copied() + .unwrap(); + println!( + "Removing distant frame {} from cache (requested_frame: {})", + frame_to_remove, requested_frame + ); + cache.remove(&frame_to_remove); + } + } + + // Only seek if we're going backwards or if we're jumping more than half the cache size + // AND we don't have the frame in cache already + // AND we haven't reached the end of the video + if !reached_end + && !cache.contains_key(&requested_frame) + && (requested_frame <= 0 + || last_sent_frame + .as_ref() + .map(|last| { + let backwards = requested_frame < last.0; + let big_jump = requested_frame > last.0 + && requested_frame.saturating_sub(last.0) + > FRAME_CACHE_SIZE as u32 / 2; + backwards || big_jump + }) + .unwrap_or(true)) { let timestamp_us = ((requested_frame as f32 / frame_rate.numerator() as f32) * 1_000_000.0) as i64; let position = timestamp_us.rescale((1, 1_000_000), rescale::TIME_BASE); - println!("seeking to {position} for frame {requested_frame}"); - decoder.flush(); - input.seek(position, ..position).unwrap(); - cache.clear(); - last_decoded_frame = None; - last_sent_frame = None; - + // Drop the old packets iterator to release the mutable borrow + drop(packets); + let seek_result = input.seek(position, ..position); + // Create new packets iterator regardless of seek result packets = input.packets(); + + match seek_result { + Ok(_) => { + cache.clear(); + last_decoded_frame = None; + } + Err(_) => { + // If seek fails, we've likely reached the end + reached_end = true; + if let Some((_, last_frame)) = &last_sent_frame { + sender.take().map(|s| s.send(Some(last_frame.clone()))); + } + continue; + } + } } // handle when requested_frame == last_decoded_frame or last_decoded_frame > requested_frame. @@ -310,35 +384,65 @@ impl AsyncVideoDecoder { last_sent_frame = Some((current_frame, data.clone())); sender.send(Some(data)).ok(); - break; } + } else if current_frame + > last_sent_frame.as_ref().map(|f| f.0).unwrap_or(0) + { + // Keep last_sent_frame up to date even for frames we're not sending + let data = cache_frame.process( + &mut scaler_input_format, + &mut scaler, + &decoder, + ); + last_sent_frame = Some((current_frame, data)); } if cache.len() >= FRAME_CACHE_SIZE { - if let Some(last_active_frame) = &last_active_frame { - let frame = if requested_frame > *last_active_frame - { - *cache.keys().next().unwrap() - } else if requested_frame < *last_active_frame { - *cache.keys().next_back().unwrap() - } else { - let min = *cache.keys().min().unwrap(); - let max = *cache.keys().max().unwrap(); - - if current_frame > max { - min + // When cache is full, remove old frames that are far from the requested frame + let frames_to_remove: Vec<_> = cache + .keys() + .filter(|&&k| { + // Keep frames within a window of the requested frame + let distance = if k <= requested_frame { + requested_frame - k } else { - max - } - }; - + k - requested_frame + }; + // Remove frames that are more than half the cache size away + distance > FRAME_CACHE_SIZE as u32 / 2 + }) + .copied() + .collect(); + + for frame in frames_to_remove { + println!("Removing old frame {} from cache (requested_frame: {})", frame, requested_frame); cache.remove(&frame); - } else { - cache.clear() + } + + // If we still need to remove frames, remove the ones furthest from the requested frame + if cache.len() >= FRAME_CACHE_SIZE { + let frame_to_remove = cache + .keys() + .max_by_key(|&&k| { + if k <= requested_frame { + requested_frame - k + } else { + k - requested_frame + } + }) + .copied() + .unwrap(); + println!("Removing distant frame {} from cache (requested_frame: {})", frame_to_remove, requested_frame); + cache.remove(&frame_to_remove); } } + println!( + "Inserting frame {} into cache (size: {})", + current_frame, + cache.len() + ); cache.insert(current_frame, cache_frame); } @@ -359,23 +463,51 @@ impl AsyncVideoDecoder { } }); - AsyncVideoDecoderHandle { sender: tx } + AsyncVideoDecoderHandle { + sender: tx, + last_valid_frame: Arc::new(Mutex::new(None)), + reached_end: Arc::new(Mutex::new(false)), + } } } #[derive(Clone)] pub struct AsyncVideoDecoderHandle { sender: mpsc::Sender, + last_valid_frame: Arc>>, + reached_end: Arc>, } impl AsyncVideoDecoderHandle { - pub async fn get_frame(&self, time: u32) -> Option>> { + pub async fn get_frame(&self, frame_number: u32) -> Option { + // If we've already reached the end of the video, just return the last valid frame + if *self.reached_end.lock().unwrap() { + return self.last_valid_frame.lock().unwrap().clone(); + } + let (tx, rx) = tokio::sync::oneshot::channel(); self.sender - .send(VideoDecoderMessage::GetFrame(time, tx)) - .unwrap(); - let res = rx.await.ok().flatten(); - res + .send(VideoDecoderMessage::GetFrame(frame_number, tx)) + .ok()?; + + // Wait for response with a timeout + match tokio::time::timeout(std::time::Duration::from_secs(5), rx).await { + Ok(Ok(frame)) => { + if let Some(frame) = &frame { + // Store this as the last valid frame + *self.last_valid_frame.lock().unwrap() = Some(frame.clone()); + } else { + // If we got no frame, we've reached the end + *self.reached_end.lock().unwrap() = true; + } + // If we got no frame but have a last valid frame, return that instead + frame.or_else(|| self.last_valid_frame.lock().unwrap().clone()) + } + _ => { + // On timeout, return last valid frame if we have one + self.last_valid_frame.lock().unwrap().clone() + } + } } } diff --git a/crates/rendering/src/lib.rs b/crates/rendering/src/lib.rs index 511c6887..06bd5575 100644 --- a/crates/rendering/src/lib.rs +++ b/crates/rendering/src/lib.rs @@ -1,6 +1,13 @@ use anyhow::Result; use bytemuck::{Pod, Zeroable}; use cap_flags::FLAGS; +use cap_project::{ + AspectRatio, BackgroundSource, CameraXPosition, CameraYPosition, Content, Crop, + CursorAnimationStyle, CursorClickEvent, CursorData, CursorEvents, CursorMoveEvent, + ProjectConfiguration, RecordingMeta, ZoomSegment, FAST_SMOOTHING_SAMPLES, + FAST_VELOCITY_THRESHOLD, REGULAR_SMOOTHING_SAMPLES, REGULAR_VELOCITY_THRESHOLD, + SLOW_SMOOTHING_SAMPLES, SLOW_VELOCITY_THRESHOLD, XY, +}; use core::f64; use decoder::{AsyncVideoDecoder, AsyncVideoDecoderHandle}; use futures::future::OptionFuture; @@ -13,14 +20,6 @@ use tokio::sync::mpsc; use wgpu::util::DeviceExt; use wgpu::{CommandEncoder, COPY_BYTES_PER_ROW_ALIGNMENT}; -use cap_project::{ - AspectRatio, BackgroundSource, CameraXPosition, CameraYPosition, Content, Crop, - CursorAnimationStyle, CursorClickEvent, CursorData, CursorEvents, CursorMoveEvent, - ProjectConfiguration, RecordingMeta, ZoomSegment, FAST_SMOOTHING_SAMPLES, - FAST_VELOCITY_THRESHOLD, REGULAR_SMOOTHING_SAMPLES, REGULAR_VELOCITY_THRESHOLD, - SLOW_SMOOTHING_SAMPLES, SLOW_VELOCITY_THRESHOLD, XY, -}; - use image::GenericImageView; use std::path::Path; use std::time::Instant; @@ -118,7 +117,19 @@ impl RecordingSegmentDecoders { OptionFuture::from(self.camera.as_ref().map(|d| d.get_frame(frame_number))) ); - screen_frame.map(|f| (f, camera_frame.flatten())) + // Create black frames with the correct dimensions + let black_screen = vec![0; (1920 * 804 * 4) as usize]; + let black_camera = vec![0; (1920 * 1080 * 4) as usize]; + + // Return frames or black frames as needed + Some(( + screen_frame.unwrap_or_else(|| Arc::new(black_screen)), + self.camera.as_ref().map(|_| { + camera_frame + .flatten() + .unwrap_or_else(|| Arc::new(black_camera)) + }), + )) } } @@ -143,7 +154,7 @@ pub struct RenderSegment { pub async fn render_video_to_channel( options: RenderOptions, - project: ProjectConfiguration, + mut project: ProjectConfiguration, sender: mpsc::Sender, meta: &RecordingMeta, segments: Vec, @@ -155,76 +166,197 @@ pub async fn render_video_to_channel( let start_time = Instant::now(); - let duration = project - .timeline() - .map(|t| t.duration()) - .unwrap_or(recordings.duration()); + // Get the duration from the timeline if it exists, otherwise use the longest source duration + let duration = { + let mut max_duration = recordings.duration(); + println!("Initial screen recording duration: {}", max_duration); + + // Check camera duration if it exists + if let Some(camera_path) = meta.content.camera_path() { + if let Ok(camera_duration) = recordings.get_source_duration(&camera_path) { + println!("Camera recording duration: {}", camera_duration); + max_duration = max_duration.max(camera_duration); + println!("New max duration after camera check: {}", max_duration); + } + } - println!("export duration: {duration}"); - println!("export duration: {duration}"); + // If there's a timeline, ensure all segments extend to the max duration + if let Some(timeline) = &mut project.timeline { + println!("Found timeline with {} segments", timeline.segments.len()); + for (i, segment) in timeline.segments.iter_mut().enumerate() { + println!( + "Segment {} - current end: {}, max_duration: {}", + i, segment.end, max_duration + ); + if segment.end < max_duration { + segment.end = max_duration; + println!("Extended segment {} to new end: {}", i, segment.end); + } + } + let final_duration = timeline.duration(); + println!( + "Final timeline duration after adjustments: {}", + final_duration + ); + final_duration + } else { + println!("No timeline found, using max_duration: {}", max_duration); + max_duration + } + }; - let mut frame_number = 0; + let total_frames = (30_f64 * duration).ceil() as u32; + println!( + "Final export duration: {} seconds ({} frames at 30fps)", + duration, total_frames + ); + // Send initial frame to communicate total frames + let initial_frame = RenderedFrame { + data: vec![], + width: 0, + height: 0, + padded_bytes_per_row: 0, + total_frames: Some(total_frames), + }; + sender.send(initial_frame).await?; + + let mut frame_number = 0; let background = Background::from(project.background.source.clone()); loop { - if frame_number as f64 > 30_f64 * duration { + if frame_number >= total_frames { + println!("Reached total frames: {frame_number}/{total_frames}"); break; - }; + } - let (time, segment_i) = if let Some(timeline) = project.timeline() { + println!("Processing frame {frame_number}/{total_frames}"); + + let (time, segment_i) = if let Some(timeline) = &project.timeline { + println!("Getting time from timeline for frame {}", frame_number); match timeline.get_recording_time(frame_number as f64 / 30_f64) { - Some(value) => value, + Some(value) => { + println!( + "Timeline returned time: {}, segment: {:?}", + value.0, value.1 + ); + value + } None => { - println!("no time"); + println!( + "Timeline returned None for frame {} (time: {})", + frame_number, + frame_number as f64 / 30_f64 + ); + println!( + "Timeline segments: {:?}", + timeline + .segments + .iter() + .map(|s| (s.start, s.end)) + .collect::>() + ); break; } } } else { - (frame_number as f64 / 30_f64, None) + let time = frame_number as f64 / 30_f64; + println!("No timeline, using direct time calculation: {}", time); + (time, None) }; - let segment = &segments[segment_i.unwrap_or(0) as usize]; - + let segment_index = segment_i.unwrap_or(0) as usize; + println!("Using segment {} for frame {}", segment_index, frame_number); + let segment = &segments[segment_index]; let uniforms = ProjectUniforms::new(&constants, &project, time as f32); - if let Some((screen_frame, camera_frame)) = - segment.decoders.get_frames((time * 30.0) as u32).await - { - let frame = produce_frame( - &constants, - &screen_frame, - &camera_frame, - background, - &uniforms, - time as f32, - ) - .await?; + println!("Getting frames for time: {} (frame {})", time, frame_number); + // Get frames or use last valid frames if past duration + let (screen_frame, camera_frame) = + match segment.decoders.get_frames((time * 30.0) as u32).await { + Some((screen, camera)) => { + println!( + "Successfully got frames for time {} (frame {})", + time, frame_number + ); + (screen, camera) + } + None => { + println!( + "No frames from decoder at time {} (frame {}), using last valid frames", + time, frame_number + ); + // Get the last valid frame from each decoder + let screen = segment + .decoders + .screen + .get_frame((time * 30.0) as u32) + .await + .unwrap_or_else(|| { + println!("Using empty frame for screen"); + Arc::new(vec![ + 0; + (constants.options.screen_size.x + * constants.options.screen_size.y + * 4) as usize + ]) + }); + + let camera = match &segment.decoders.camera { + Some(camera_decoder) => { + println!("Getting camera frame at time {}", time); + Some( + camera_decoder + .get_frame((time * 30.0) as u32) + .await + .unwrap_or_else(|| { + println!("Using empty frame for camera"); + Arc::new(match constants.options.camera_size { + Some(size) => vec![0; (size.x * size.y * 4) as usize], + None => vec![0; 0], + }) + }), + ) + } + None => None, + }; - sender.send(frame).await?; - } else { - println!("no decoder frames: {:?}", (time, segment_i)); - }; + (screen, camera) + } + }; + + println!("Producing frame {frame_number}"); + let frame = produce_frame( + &constants, + &screen_frame, + &camera_frame, + background, + &uniforms, + time as f32, + total_frames, + ) + .await?; + + println!("Sending frame {frame_number}"); + if let Err(e) = sender.send(frame).await { + println!("Failed to send frame: {e}"); + break; + } frame_number += 1; if frame_number % 60 == 0 { let elapsed = start_time.elapsed(); println!( - "Rendered {} frames in {:?} seconds", - frame_number, + "Rendered {frame_number}/{total_frames} frames in {:?} seconds", elapsed.as_secs_f32() ); } } - println!("Render loop exited"); - - let total_frames = frame_number; - + println!("Render loop exited at frame {frame_number}/{total_frames}"); let total_time = start_time.elapsed(); println!( - "Render complete. Processed {} frames in {:?} seconds", - total_frames, + "Render complete. Processed {frame_number} frames in {:?} seconds", total_time.as_secs_f32() ); @@ -712,6 +844,7 @@ pub struct RenderedFrame { pub width: u32, pub height: u32, pub padded_bytes_per_row: u32, + pub total_frames: Option, } pub async fn produce_frame( @@ -721,6 +854,7 @@ pub async fn produce_frame( background: Background, uniforms: &ProjectUniforms, time: f32, + total_frames: u32, ) -> Result { let mut encoder = constants.device.create_command_encoder( &(wgpu::CommandEncoderDescriptor { @@ -997,6 +1131,7 @@ pub async fn produce_frame( padded_bytes_per_row, width: uniforms.output_size.0, height: uniforms.output_size.1, + total_frames: Some(total_frames), }) } diff --git a/crates/rendering/src/project_recordings.rs b/crates/rendering/src/project_recordings.rs index 16c45a0a..b6205744 100644 --- a/crates/rendering/src/project_recordings.rs +++ b/crates/rendering/src/project_recordings.rs @@ -12,21 +12,25 @@ pub struct Video { } impl Video { - pub fn new(path: &PathBuf) -> Self { - let input = ffmpeg::format::input(path).unwrap(); - let stream = input.streams().best(ffmpeg::media::Type::Video).unwrap(); + pub fn new(path: &PathBuf) -> Result { + let input = + ffmpeg::format::input(path).map_err(|e| format!("Failed to open video: {}", e))?; + let stream = input + .streams() + .best(ffmpeg::media::Type::Video) + .ok_or_else(|| "No video stream found".to_string())?; let video_decoder = ffmpeg::codec::Context::from_parameters(stream.parameters()) - .unwrap() + .map_err(|e| format!("Failed to create decoder: {}", e))? .decoder() .video() - .unwrap(); + .map_err(|e| format!("Failed to get video decoder: {}", e))?; - Video { + Ok(Video { width: video_decoder.width(), height: video_decoder.height(), duration: input.duration() as f64 / 1_000_000.0, - } + }) } } @@ -65,11 +69,12 @@ impl ProjectRecordings { pub fn new(meta: &RecordingMeta) -> Self { let segments = match &meta.content { crate::Content::SingleSegment { segment } => { - let display = Video::new(&meta.project_path.join(&segment.display.path)); - let camera = segment - .camera - .as_ref() - .map(|camera| Video::new(&meta.project_path.join(&camera.path))); + let display = Video::new(&meta.project_path.join(&segment.display.path)) + .expect("Failed to read display video"); + let camera = segment.camera.as_ref().map(|camera| { + Video::new(&meta.project_path.join(&camera.path)) + .expect("Failed to read camera video") + }); let audio = segment .audio .as_ref() @@ -85,11 +90,12 @@ impl ProjectRecordings { .segments .iter() .map(|s| { - let display = Video::new(&meta.project_path.join(&s.display.path)); - let camera = s - .camera - .as_ref() - .map(|camera| Video::new(&meta.project_path.join(&camera.path))); + let display = Video::new(&meta.project_path.join(&s.display.path)) + .expect("Failed to read display video"); + let camera = s.camera.as_ref().map(|camera| { + Video::new(&meta.project_path.join(&camera.path)) + .expect("Failed to read camera video") + }); let audio = s .audio .as_ref() @@ -110,6 +116,10 @@ impl ProjectRecordings { pub fn duration(&self) -> f64 { self.segments.iter().map(|s| s.duration()).sum() } + + pub fn get_source_duration(&self, path: &PathBuf) -> Result { + Video::new(path).map(|v| v.duration) + } } #[derive(Debug, Clone, Serialize, Type)]