diff --git a/apps/desktop/src-tauri/src/camera.rs b/apps/desktop/src-tauri/src/camera.rs index 9532807114..8ef4c53296 100644 --- a/apps/desktop/src-tauri/src/camera.rs +++ b/apps/desktop/src-tauri/src/camera.rs @@ -116,6 +116,16 @@ impl CameraPreviewManager { self.preview.is_some() } + pub fn notify_window_resized(&self, width: u32, height: u32) { + if let Some(preview) = &self.preview { + preview + .reconfigure + .send(ReconfigureEvent::WindowResized { width, height }) + .map_err(|err| error!("Error notifying camera preview of resize: {err}")) + .ok(); + } + } + /// Initialize the camera preview for a specific Tauri window pub async fn init_window( &mut self, @@ -169,6 +179,7 @@ impl CameraPreviewManager { #[derive(Clone)] enum ReconfigureEvent { State(CameraPreviewState), + WindowResized { width: u32, height: u32 }, Shutdown, } @@ -604,6 +615,10 @@ impl Renderer { self.reconfigure_gpu_surface(width, height); } } + Err(ReconfigureEvent::WindowResized { width, height }) => { + trace!("CameraPreview/ReconfigureEvent.WindowResized({width}x{height})"); + self.reconfigure_gpu_surface(width, height); + } Err(ReconfigureEvent::Shutdown) => return, } } diff --git a/apps/desktop/src-tauri/src/captions.rs b/apps/desktop/src-tauri/src/captions.rs index dadfb17afc..a31c56c7cd 100644 --- a/apps/desktop/src-tauri/src/captions.rs +++ b/apps/desktop/src-tauri/src/captions.rs @@ -1050,6 +1050,10 @@ pub async fn save_captions( serde_json::Number::from_f64(settings.word_transition_duration as f64).unwrap(), ), ); + settings_obj.insert( + "activeWordHighlight".to_string(), + serde_json::Value::Bool(settings.active_word_highlight), + ); json_obj.insert( "settings".to_string(), @@ -1200,6 +1204,12 @@ pub fn parse_captions_json(json: &str) -> Result Result { if let Ok(CapWindowId::Camera) = CapWindowId::from_str(label) { + tracing::warn!("Camera window CloseRequested event received!"); tokio::spawn(cleanup_camera_window(app.clone())); } } WindowEvent::Destroyed => { if let Ok(window_id) = CapWindowId::from_str(label) { + if matches!(window_id, CapWindowId::Camera) { + tracing::warn!("Camera window Destroyed event received!"); + } match window_id { CapWindowId::Main => { let app = app.clone(); diff --git a/apps/desktop/src-tauri/src/recording.rs b/apps/desktop/src-tauri/src/recording.rs index c4471b56e9..284d1f44e3 100644 --- a/apps/desktop/src-tauri/src/recording.rs +++ b/apps/desktop/src-tauri/src/recording.rs @@ -607,10 +607,29 @@ pub async fn start_recording( win.close().ok(); } } + #[cfg(windows)] + let had_camera_window = CapWindowId::Camera.get(&app).is_some(); + #[cfg(windows)] + if had_camera_window { + tracing::info!( + "Closing camera window BEFORE InProgressRecording show (will recreate after)" + ); + if let Some(cam_win) = CapWindowId::Camera.get(&app) { + cam_win.close().ok(); + } + } + let _ = ShowCapWindow::InProgressRecording { countdown } .show(&app) .await; + #[cfg(windows)] + if had_camera_window { + tracing::info!("Recreating camera window after InProgressRecording"); + tokio::time::sleep(std::time::Duration::from_millis(150)).await; + ShowCapWindow::Camera.show(&app).await.ok(); + } + if let Some(window) = CapWindowId::Main.get(&app) { let _ = general_settings .map(|v| v.main_window_recording_start_behaviour) diff --git a/apps/desktop/src-tauri/src/target_select_overlay.rs b/apps/desktop/src-tauri/src/target_select_overlay.rs index e41efe5e91..7bb4e31cde 100644 --- a/apps/desktop/src-tauri/src/target_select_overlay.rs +++ b/apps/desktop/src-tauri/src/target_select_overlay.rs @@ -9,7 +9,7 @@ use base64::prelude::*; use cap_recording::screen_capture::ScreenCaptureTarget; use crate::{ - general_settings, + App, ArcLock, general_settings, window_exclusion::WindowExclusion, windows::{CapWindowId, ShowCapWindow}, }; @@ -163,10 +163,13 @@ pub async fn update_camera_overlay_bounds( .get_webview_window("camera") .ok_or("Camera window not found")?; + let width_u32 = width as u32; + let height_u32 = height as u32; + window .set_size(tauri::Size::Physical(tauri::PhysicalSize { - width: width as u32, - height: height as u32, + width: width_u32, + height: height_u32, })) .map_err(|e| e.to_string())?; window @@ -176,6 +179,16 @@ pub async fn update_camera_overlay_bounds( })) .map_err(|e| e.to_string())?; + let scale_factor = window.scale_factor().unwrap_or(1.0); + let logical_width = (width / scale_factor) as u32; + let logical_height = (height / scale_factor) as u32; + + let state = app.state::>(); + let app_state = state.read().await; + app_state + .camera_preview + .notify_window_resized(logical_width, logical_height); + Ok(()) } diff --git a/apps/desktop/src/routes/camera.tsx b/apps/desktop/src/routes/camera.tsx index ef950cbce6..760a245ee9 100644 --- a/apps/desktop/src/routes/camera.tsx +++ b/apps/desktop/src/routes/camera.tsx @@ -589,16 +589,6 @@ function LegacyCameraPreviewPage(props: { disconnected: Accessor }) { let cameraCanvasRef: HTMLCanvasElement | undefined; - createEffect( - on( - () => rawOptions.cameraLabel, - (label) => { - if (label === null) getCurrentWindow().close(); - }, - { defer: true }, - ), - ); - onMount(() => getCurrentWindow().show()); return ( @@ -700,6 +690,7 @@ function Canvas(props: { latestFrame: Accessor<{ width: number; data: ImageData } | null | undefined>; state: CameraWindowState; ref: HTMLCanvasElement | undefined; + containerSize?: { width: number; height: number }; }) { const style = () => { const frame = props.latestFrame(); @@ -707,8 +698,10 @@ function Canvas(props: { const aspectRatio = frame.data.width / frame.data.height; - // Use state.size directly for immediate feedback - const base = props.state.size; + // Use container size if available (for external resize), otherwise use state.size + const base = props.containerSize + ? Math.min(props.containerSize.width, props.containerSize.height) + : props.state.size; // Replicate window size logic synchronously for the canvas const winWidth = @@ -741,7 +734,7 @@ function Canvas(props: { else return { width: base, - height: base * aspectRatio, + height: base / aspectRatio, }; })(); diff --git a/apps/desktop/src/routes/editor/CaptionsTab.tsx b/apps/desktop/src/routes/editor/CaptionsTab.tsx index 3c1a0689d1..356bfa493b 100644 --- a/apps/desktop/src/routes/editor/CaptionsTab.tsx +++ b/apps/desktop/src/routes/editor/CaptionsTab.tsx @@ -150,7 +150,7 @@ function RgbInput(props: { value: string; onChange: (value: string) => void }) { } export function CaptionsTab() { - const { project, setProject, editorInstance, editorState } = + const { project, setProject, editorInstance, editorState, setEditorState } = useEditorContext(); const getSetting = ( @@ -172,12 +172,18 @@ export function CaptionsTab() { const [selectedLanguage, setSelectedLanguage] = createSignal("auto"); const [downloadedModels, setDownloadedModels] = createSignal([]); - const [isDownloading, setIsDownloading] = createSignal(false); - const [downloadProgress, setDownloadProgress] = createSignal(0); - const [downloadingModel, setDownloadingModel] = createSignal( - null, - ); - const [isGenerating, setIsGenerating] = createSignal(false); + const isDownloading = () => editorState.captions.isDownloading; + const setIsDownloading = (value: boolean) => + setEditorState("captions", "isDownloading", value); + const downloadProgress = () => editorState.captions.downloadProgress; + const setDownloadProgress = (value: number) => + setEditorState("captions", "downloadProgress", value); + const downloadingModel = () => editorState.captions.downloadingModel; + const setDownloadingModel = (value: string | null) => + setEditorState("captions", "downloadingModel", value); + const isGenerating = () => editorState.captions.isGenerating; + const setIsGenerating = (value: boolean) => + setEditorState("captions", "isGenerating", value); const [hasAudio, setHasAudio] = createSignal(false); createEffect( @@ -662,6 +668,27 @@ export function CaptionsTab() { /> +
+
+ + Active Word Highlight + + + updateCaptionSetting("activeWordHighlight", checked) + } + disabled={!hasCaptions()} + /> +
+

+ This is the first version of captions in Cap. Active word + highlighting may be inaccurate in some situations. We're + working on a fix for this and it will be released in + upcoming versions. +

+
+
Font Color setExportState({ type: "idle" })} + onClick={() => { + setExportState({ type: "idle" }); + handleBack(); + }} > Back to Editor diff --git a/apps/desktop/src/routes/editor/context.ts b/apps/desktop/src/routes/editor/context.ts index 80295b0ff9..1c6bec435b 100644 --- a/apps/desktop/src/routes/editor/context.ts +++ b/apps/desktop/src/routes/editor/context.ts @@ -612,6 +612,12 @@ export const [EditorContextProvider, useEditorContext] = createContextProvider( previewTime: null as number | null, playbackTime: 0, playing: false, + captions: { + isGenerating: false, + isDownloading: false, + downloadProgress: 0, + downloadingModel: null as string | null, + }, timeline: { interactMode: "seek" as "seek" | "split", selection: null as diff --git a/apps/desktop/src/routes/screenshot-editor/AnnotationLayer.tsx b/apps/desktop/src/routes/screenshot-editor/AnnotationLayer.tsx index 93f241b911..248b7d2734 100644 --- a/apps/desktop/src/routes/screenshot-editor/AnnotationLayer.tsx +++ b/apps/desktop/src/routes/screenshot-editor/AnnotationLayer.tsx @@ -86,6 +86,7 @@ export function AnnotationLayer(props: { createEffect(() => { const rect = props.imageRect; if (rect.width <= 0 || rect.height <= 0) return; + const masksToRemove: string[] = []; for (const ann of annotations) { if (ann.type !== "mask") continue; const left = clampValue( @@ -110,6 +111,10 @@ export function AnnotationLayer(props: { ); const width = Math.max(0, right - left); const height = Math.max(0, bottom - top); + if (width < 5 || height < 5) { + masksToRemove.push(ann.id); + continue; + } if ( left !== Math.min(ann.x, ann.x + ann.width) || top !== Math.min(ann.y, ann.y + ann.height) || @@ -124,6 +129,11 @@ export function AnnotationLayer(props: { }); } } + if (masksToRemove.length > 0) { + setAnnotations((prev) => + prev.filter((a) => !masksToRemove.includes(a.id)), + ); + } }); // Helper to get coordinates in SVG space diff --git a/apps/desktop/src/routes/screenshot-editor/Editor.tsx b/apps/desktop/src/routes/screenshot-editor/Editor.tsx index b49e83213e..d59ca08a50 100644 --- a/apps/desktop/src/routes/screenshot-editor/Editor.tsx +++ b/apps/desktop/src/routes/screenshot-editor/Editor.tsx @@ -173,7 +173,7 @@ function Dialogs() { contentClass="" open={dialog().open} onOpenChange={(o) => { - if (!o) setDialog((d) => ({ ...d, open: false })); + if (!o) setDialog({ open: false }); }} > ({ ...d, open: false })); + setDialog({ open: false }); }} > Save diff --git a/apps/desktop/src/routes/screenshot-editor/Header.tsx b/apps/desktop/src/routes/screenshot-editor/Header.tsx index d28ce0a407..081c9a2c32 100644 --- a/apps/desktop/src/routes/screenshot-editor/Header.tsx +++ b/apps/desktop/src/routes/screenshot-editor/Header.tsx @@ -5,7 +5,7 @@ import { remove } from "@tauri-apps/plugin-fs"; import { revealItemInDir } from "@tauri-apps/plugin-opener"; import { type as ostype } from "@tauri-apps/plugin-os"; import { cx } from "cva"; -import { createEffect, onCleanup, Suspense } from "solid-js"; +import { createEffect, createMemo, onCleanup, Show, Suspense } from "solid-js"; import CaptionControlsWindows11 from "~/components/titlebar/controls/CaptionControlsWindows11"; import IconCapCrop from "~icons/cap/crop"; import IconCapTrash from "~icons/cap/trash"; @@ -21,6 +21,7 @@ import { BorderPopover } from "./popovers/BorderPopover"; import { PaddingPopover } from "./popovers/PaddingPopover"; import { RoundingPopover } from "./popovers/RoundingPopover"; import { ShadowPopover } from "./popovers/ShadowPopover"; + import { DropdownItem, EditorButton, @@ -32,11 +33,34 @@ import { useScreenshotExport } from "./useScreenshotExport"; export function Header() { const ctx = useScreenshotEditorContext(); - const { setDialog, project, latestFrame } = ctx; + const { setDialog, project, originalImageSize } = ctx; const path = () => ctx.editorInstance()?.path ?? ""; const { exportImage, isExporting } = useScreenshotExport(); + const showStylingControls = createMemo(() => { + const source = project.background.source; + const sourceType = source.type; + + if (sourceType === "wallpaper") { + return source.path !== null && source.path !== ""; + } + if (sourceType === "image") { + return source.path !== null && source.path !== ""; + } + if (sourceType === "gradient") { + return true; + } + if (sourceType === "color") { + const alpha = source.alpha ?? 255; + if (alpha === 0) return false; + const value = source.value; + const isWhite = value[0] === 255 && value[1] === 255 && value[2] === 255; + return !(isWhite && alpha === 255); + } + return false; + }); + createEffect(() => { const handleKeyDown = (e: KeyboardEvent) => { const target = e.target as HTMLElement | null; @@ -65,17 +89,17 @@ export function Header() { }); const cropDialogHandler = () => { - const frame = latestFrame(); - if (!frame?.bitmap) return; + const imgSize = originalImageSize(); + if (!imgSize) return; setDialog({ open: true, type: "crop", - originalSize: { x: frame.width, y: frame.height }, + originalSize: { x: imgSize.width, y: imgSize.height }, currentCrop: project.background.crop, }); }; - const isCropDisabled = () => !latestFrame()?.bitmap; + const isCropDisabled = () => !originalImageSize(); return (
- - - - + + + + + +
diff --git a/apps/desktop/src/routes/screenshot-editor/LayersPanel.tsx b/apps/desktop/src/routes/screenshot-editor/LayersPanel.tsx index 3ae2db4bb8..93d949717a 100644 --- a/apps/desktop/src/routes/screenshot-editor/LayersPanel.tsx +++ b/apps/desktop/src/routes/screenshot-editor/LayersPanel.tsx @@ -35,7 +35,6 @@ export function LayersPanel() { setLayersPanelOpen, projectHistory, setActiveTool, - setFocusAnnotationId, } = useScreenshotEditorContext(); const [dragState, setDragState] = createSignal<{ @@ -176,7 +175,6 @@ export function LayersPanel() { if ((e.target as HTMLElement).closest("[data-grip-handle]")) return; setSelectedAnnotationId(ann.id); setActiveTool("select"); - setFocusAnnotationId(ann.id); }; const handleDelete = (id: string, e: MouseEvent) => { diff --git a/apps/desktop/src/routes/screenshot-editor/Preview.tsx b/apps/desktop/src/routes/screenshot-editor/Preview.tsx index 0f3527ff89..d7da75e2b1 100644 --- a/apps/desktop/src/routes/screenshot-editor/Preview.tsx +++ b/apps/desktop/src/routes/screenshot-editor/Preview.tsx @@ -9,7 +9,6 @@ import { } from "solid-js"; import IconCapZoomIn from "~icons/cap/zoom-in"; import IconCapZoomOut from "~icons/cap/zoom-out"; -import { ASPECT_RATIOS } from "../editor/projectConfig"; import { EditorButton, Slider } from "../editor/ui"; import { AnnotationLayer } from "./AnnotationLayer"; import { useScreenshotEditorContext } from "./context"; @@ -24,13 +23,8 @@ const gridStyle = { }; export function Preview(props: { zoom: number; setZoom: (z: number) => void }) { - const { - project, - latestFrame, - annotations, - focusAnnotationId, - setFocusAnnotationId, - } = useScreenshotEditorContext(); + const { latestFrame, annotations, focusAnnotationId, setFocusAnnotationId } = + useScreenshotEditorContext(); let canvasRef: HTMLCanvasElement | undefined; const [canvasContainerRef, setCanvasContainerRef] = @@ -38,6 +32,13 @@ export function Preview(props: { zoom: number; setZoom: (z: number) => void }) { const containerBounds = createElementBounds(canvasContainerRef); const [pan, setPan] = createSignal({ x: 0, y: 0 }); + const [isDragging, setIsDragging] = createSignal(false); + const [dragStart, setDragStart] = createSignal({ + x: 0, + y: 0, + panX: 0, + panY: 0, + }); const [previousBitmap, setPreviousBitmap] = createSignal( null, @@ -114,39 +115,61 @@ export function Preview(props: { zoom: number; setZoom: (z: number) => void }) { } }; + const handleMouseDown = (e: MouseEvent) => { + if (e.button !== 0) return; + e.preventDefault(); + setIsDragging(true); + setDragStart({ + x: e.clientX, + y: e.clientY, + panX: pan().x, + panY: pan().y, + }); + }; + + const handleMiddleMouseDown = (e: MouseEvent) => { + if (e.button !== 1) return; + e.preventDefault(); + setIsDragging(true); + setDragStart({ + x: e.clientX, + y: e.clientY, + panX: pan().x, + panY: pan().y, + }); + }; + + const handleMouseMove = (e: MouseEvent) => { + if (!isDragging()) return; + const dx = e.clientX - dragStart().x; + const dy = e.clientY - dragStart().y; + setPan({ + x: dragStart().panX + dx, + y: dragStart().panY + dy, + }); + }; + + const handleMouseUp = () => { + setIsDragging(false); + }; + + createEffect(() => { + if (isDragging()) { + window.addEventListener("mousemove", handleMouseMove); + window.addEventListener("mouseup", handleMouseUp); + } + onCleanup(() => { + window.removeEventListener("mousemove", handleMouseMove); + window.removeEventListener("mouseup", handleMouseUp); + }); + }); + createEffect(() => { const frame = latestFrame(); if (frame?.bitmap && canvasRef) { const ctx = canvasRef.getContext("2d"); if (ctx) { ctx.drawImage(frame.bitmap, 0, 0); - const crop = project.background.crop; - if (crop) { - const width = canvasRef.width; - const height = canvasRef.height; - const cropX = Math.max(0, Math.round(crop.position.x)); - const cropY = Math.max(0, Math.round(crop.position.y)); - const cropW = Math.max( - 0, - Math.min(Math.round(crop.size.x), width - cropX), - ); - const cropH = Math.max( - 0, - Math.min(Math.round(crop.size.y), height - cropY), - ); - const topH = Math.max(0, cropY); - const bottomY = cropY + cropH; - const bottomH = Math.max(0, height - bottomY); - const leftW = Math.max(0, cropX); - const rightX = cropX + cropW; - const rightW = Math.max(0, width - rightX); - ctx.fillStyle = "white"; - if (topH > 0) ctx.fillRect(0, 0, width, topH); - if (bottomH > 0) ctx.fillRect(0, bottomY, width, bottomH); - if (cropH > 0 && leftW > 0) ctx.fillRect(0, cropY, leftW, cropH); - if (cropH > 0 && rightW > 0) - ctx.fillRect(rightX, cropY, rightW, cropH); - } } } }); @@ -159,6 +182,7 @@ export function Preview(props: { zoom: number; setZoom: (z: number) => void }) { class="flex-1 relative flex items-center justify-center overflow-hidden outline-none" style={gridStyle} onWheel={handleWheel} + onMouseDown={handleMiddleMouseDown} >
void }) { const frameHeight = () => frame().height; const imageRect = createMemo(() => { - const crop = project.background.crop; - if (crop) { - return { - x: crop.position.x, - y: crop.position.y, - width: crop.size.x, - height: crop.size.y, - }; - } return { x: 0, y: 0, @@ -224,72 +239,11 @@ export function Preview(props: { zoom: number; setZoom: (z: number) => void }) { }); const bounds = createMemo(() => { - const crop = project.background.crop; - const workspacePadding = crop - ? Math.min( - 500, - Math.max( - 100, - Math.round(Math.max(crop.size.x, crop.size.y) * 0.5), - ), - ) - : 0; - let minX = crop ? crop.position.x - workspacePadding : 0; - let minY = crop ? crop.position.y - workspacePadding : 0; - let maxX = crop - ? crop.position.x + crop.size.x + workspacePadding - : frameWidth(); - let maxY = crop - ? crop.position.y + crop.size.y + workspacePadding - : frameHeight(); - - for (const ann of annotations) { - const ax1 = ann.x; - const ay1 = ann.y; - const ax2 = ann.x + ann.width; - const ay2 = ann.y + ann.height; - - const left = Math.min(ax1, ax2); - const right = Math.max(ax1, ax2); - const top = Math.min(ay1, ay2); - const bottom = Math.max(ay1, ay2); - - minX = Math.min(minX, left); - maxX = Math.max(maxX, right); - minY = Math.min(minY, top); - maxY = Math.max(maxY, bottom); - } - - let x = minX; - let y = minY; - let width = maxX - minX; - let height = maxY - minY; - - if (project.aspectRatio) { - const ratioConf = ASPECT_RATIOS[project.aspectRatio]; - if (ratioConf) { - const targetRatio = ratioConf.ratio[0] / ratioConf.ratio[1]; - const currentRatio = width / height; - - if (currentRatio > targetRatio) { - const newHeight = width / targetRatio; - const padY = (newHeight - height) / 2; - y -= padY; - height = newHeight; - } else { - const newWidth = height * targetRatio; - const padX = (newWidth - width) / 2; - x -= padX; - width = newWidth; - } - } - } - return { - x, - y, - width, - height, + x: 0, + y: 0, + width: frameWidth(), + height: frameHeight(), }; }); @@ -527,6 +481,13 @@ export function Preview(props: { zoom: number; setZoom: (z: number) => void }) { return (
+
void }) { position: "relative", transform: `translate(${pan().x}px, ${pan().y}px)`, "will-change": "transform", + "z-index": 1, + cursor: "default", + overflow: "hidden", + "box-shadow": + "0 4px 20px rgba(0, 0, 0, 0.15), 0 2px 8px rgba(0, 0, 0, 0.1)", + "border-radius": "4px", }} class="block" > diff --git a/apps/desktop/src/routes/screenshot-editor/context.tsx b/apps/desktop/src/routes/screenshot-editor/context.tsx index 480e888fd6..e9cddf9493 100644 --- a/apps/desktop/src/routes/screenshot-editor/context.tsx +++ b/apps/desktop/src/routes/screenshot-editor/context.tsx @@ -134,6 +134,10 @@ function createScreenshotEditorContext() { }); const [latestFrame, setLatestFrame] = createLazySignal(); + const [originalImageSize, setOriginalImageSize] = createSignal<{ + width: number; + height: number; + } | null>(null); const [isRenderReady, setIsRenderReady] = createSignal(false); let wsRef: WebSocket | null = null; @@ -147,43 +151,71 @@ function createScreenshotEditorContext() { } } - let hasReceivedWebSocketFrame = false; + const hasReceivedWebSocketFrame = { value: false }; if (instance.path) { - const img = new Image(); - img.crossOrigin = "anonymous"; - img.src = convertFileSrc(instance.path); - img.onload = async () => { - if (hasReceivedWebSocketFrame) { - return; - } - try { - const bitmap = await createImageBitmap(img); - if (hasReceivedWebSocketFrame) { - bitmap.close(); - return; - } - const existing = latestFrame(); - if (existing?.bitmap) { - existing.bitmap.close(); - } - setLatestFrame({ + const loadImage = (imagePath: string) => { + const img = new Image(); + img.crossOrigin = "anonymous"; + img.src = convertFileSrc(imagePath); + img.onload = async () => { + setOriginalImageSize({ width: img.naturalWidth, height: img.naturalHeight, - bitmap, }); - setIsRenderReady(true); - } catch (e: unknown) { - console.error("Failed to create ImageBitmap from fallback image:", e); - } - }; - img.onerror = (event) => { - console.error("Failed to load screenshot image:", { - path: instance.path, - src: img.src, - event, - }); + if (hasReceivedWebSocketFrame.value) { + return; + } + try { + const bitmap = await createImageBitmap(img); + if (hasReceivedWebSocketFrame.value) { + bitmap.close(); + return; + } + const existing = latestFrame(); + if (existing?.bitmap) { + existing.bitmap.close(); + } + setLatestFrame({ + width: img.naturalWidth, + height: img.naturalHeight, + bitmap, + }); + setIsRenderReady(true); + } catch (e: unknown) { + console.error( + "Failed to create ImageBitmap from fallback image:", + e, + ); + } + }; + return img; }; + + const pathStr = instance.path; + const isCapDir = pathStr.endsWith(".cap"); + + if (isCapDir) { + const originalPath = `${pathStr}/original.png`; + const img = loadImage(originalPath); + img.onerror = () => { + const fallbackImg = loadImage(pathStr); + fallbackImg.onerror = (event) => { + console.error("Failed to load screenshot image:", { + path: instance.path, + event, + }); + }; + }; + } else { + const img = loadImage(pathStr); + img.onerror = (event) => { + console.error("Failed to load screenshot image:", { + path: instance.path, + event, + }); + }; + } } const ws = new WebSocket(instance.framesSocketUrl); @@ -201,7 +233,7 @@ function createScreenshotEditorContext() { if (!width || !height) return; - hasReceivedWebSocketFrame = true; + hasReceivedWebSocketFrame.value = true; setIsRenderReady(true); const expectedRowBytes = width * 4; @@ -423,6 +455,7 @@ function createScreenshotEditorContext() { dialog, setDialog, latestFrame, + originalImageSize, isRenderReady, editorInstance, }; diff --git a/apps/desktop/src/routes/target-select-overlay.tsx b/apps/desktop/src/routes/target-select-overlay.tsx index 638974c369..f341b96b69 100644 --- a/apps/desktop/src/routes/target-select-overlay.tsx +++ b/apps/desktop/src/routes/target-select-overlay.tsx @@ -536,23 +536,34 @@ function Inner() { const originalLogicalSize = original.size.toLogical(scaleFactor); const padding = 16; + const TOOLBAR_HEIGHT = 56; + const originalContentWidth = originalLogicalSize.width; + const originalContentHeight = Math.max( + 0, + originalLogicalSize.height - TOOLBAR_HEIGHT, + ); + const selectionMinDim = Math.min(bounds.width, bounds.height); - const targetMaxDim = Math.max( - 150, + const targetContentMaxDim = Math.max( + 100, Math.min( - Math.max(originalLogicalSize.width, originalLogicalSize.height), - selectionMinDim * 0.5, + Math.max(originalContentWidth, originalContentHeight), + selectionMinDim * 0.5 - TOOLBAR_HEIGHT, ), ); - const originalMaxDim = Math.max( - originalLogicalSize.width, - originalLogicalSize.height, + const originalContentMaxDim = Math.max( + originalContentWidth, + originalContentHeight, ); - const scale = targetMaxDim / originalMaxDim; + const scale = + originalContentMaxDim > 0 + ? targetContentMaxDim / originalContentMaxDim + : 1; - const newWidth = Math.round(originalLogicalSize.width * scale); - const newHeight = Math.round(originalLogicalSize.height * scale); + const newWidth = Math.round(originalContentWidth * scale); + const newHeight = + Math.round(originalContentHeight * scale) + TOOLBAR_HEIGHT; if ( bounds.width > newWidth + padding * 2 && @@ -580,6 +591,12 @@ function Inner() { if (original && win) { await win.setPosition(original.position); await win.setSize(original.size); + await commands.updateCameraOverlayBounds( + original.position.x, + original.position.y, + original.size.width, + original.size.height, + ); setOriginalCameraBounds(null); setTargetState(null); lastApplied = null; diff --git a/apps/desktop/src/store/captions.ts b/apps/desktop/src/store/captions.ts index 7f951f5332..866d7718c2 100644 --- a/apps/desktop/src/store/captions.ts +++ b/apps/desktop/src/store/captions.ts @@ -29,6 +29,7 @@ export const defaultCaptionSettings: CaptionSettings = { fadeDuration: 0.2, lingerDuration: 0.4, wordTransitionDuration: 0.25, + activeWordHighlight: false, }; function createCaptionsStore() { @@ -145,6 +146,7 @@ function createCaptionsStore() { fadeDuration: state.settings.fadeDuration, lingerDuration: state.settings.lingerDuration, wordTransitionDuration: state.settings.wordTransitionDuration, + activeWordHighlight: state.settings.activeWordHighlight, }, }; diff --git a/apps/desktop/src/utils/tauri.ts b/apps/desktop/src/utils/tauri.ts index 0d003f487b..9a596090b1 100644 --- a/apps/desktop/src/utils/tauri.ts +++ b/apps/desktop/src/utils/tauri.ts @@ -369,6 +369,7 @@ uploadProgressEvent: "upload-progress-event" /** user-defined types **/ +export type AllGpusInfo = { gpus: GpuInfoDiag[]; primaryGpuIndex: number | null; isMultiGpuSystem: boolean; hasDiscreteGpu: boolean } export type Annotation = { id: string; type: AnnotationType; x: number; y: number; width: number; height: number; strokeColor: string; strokeWidth: number; fillColor: string; opacity: number; rotation: number; text: string | null; maskType?: MaskType | null; maskLevel?: number | null } export type AnnotationType = "arrow" | "circle" | "rectangle" | "text" | "mask" export type AppTheme = "system" | "light" | "dark" @@ -392,7 +393,7 @@ export type CameraXPosition = "left" | "center" | "right" export type CameraYPosition = "top" | "bottom" export type CaptionData = { segments: CaptionSegment[]; settings: CaptionSettings | null } export type CaptionSegment = { id: string; start: number; end: number; text: string; words?: CaptionWord[] } -export type CaptionSettings = { enabled: boolean; font: string; size: number; color: string; backgroundColor: string; backgroundOpacity: number; position?: string; italic: boolean; fontWeight?: number; outline: boolean; outlineColor: string; exportWithSubtitles: boolean; highlightColor?: string; fadeDuration?: number; lingerDuration?: number; wordTransitionDuration?: number } +export type CaptionSettings = { enabled: boolean; font: string; size: number; color: string; backgroundColor: string; backgroundOpacity: number; position?: string; italic: boolean; fontWeight?: number; outline: boolean; outlineColor: string; exportWithSubtitles: boolean; highlightColor?: string; fadeDuration?: number; lingerDuration?: number; wordTransitionDuration?: number; activeWordHighlight?: boolean } export type CaptionWord = { text: string; start: number; end: number } export type CaptionsData = { segments: CaptionSegment[]; settings: CaptionSettings } export type CaptureDisplay = { id: DisplayId; name: string; refresh_rate: number } @@ -437,6 +438,7 @@ quality: number | null; * Whether to prioritize speed over quality (default: false) */ fast: boolean | null } +export type GpuInfoDiag = { vendor: string; description: string; dedicatedVideoMemoryMb: number; adapterIndex: number; isSoftwareAdapter: boolean; isBasicRenderDriver: boolean; supportsHardwareEncoding: boolean } export type HapticPattern = "alignment" | "levelChange" | "generic" export type HapticPerformanceTime = "default" | "now" | "drawCompleted" export type Hotkey = { code: string; meta: boolean; ctrl: boolean; alt: boolean; shift: boolean } @@ -449,7 +451,6 @@ export type JsonValue = [T] export type LogicalBounds = { position: LogicalPosition; size: LogicalSize } export type LogicalPosition = { x: number; y: number } export type LogicalSize = { width: number; height: number } -export type MacOSVersionInfo = { displayName: string } export type MainWindowRecordingStartBehaviour = "close" | "minimise" export type MaskKeyframes = { position?: MaskVectorKeyframe[]; size?: MaskVectorKeyframe[]; intensity?: MaskScalarKeyframe[] } export type MaskKind = "sensitive" | "highlight" @@ -492,6 +493,7 @@ export type RecordingStatus = "pending" | "recording" export type RecordingStopped = null export type RecordingTargetMode = "display" | "window" | "area" export type RenderFrameEvent = { frame_number: number; fps: number; resolution_base: XY } +export type RenderingStatus = { isUsingSoftwareRendering: boolean; isUsingBasicRenderDriver: boolean; hardwareEncodingAvailable: boolean; warningMessage: string | null } export type RequestOpenRecordingPicker = { target_mode: RecordingTargetMode | null } export type RequestOpenSettings = { page: string } export type RequestScreenCapturePrewarm = { force?: boolean } @@ -512,7 +514,7 @@ export type StartRecordingInputs = { capture_target: ScreenCaptureTarget; captur export type StereoMode = "stereo" | "monoL" | "monoR" export type StudioRecordingMeta = { segment: SingleSegment } | { inner: MultipleSegments } export type StudioRecordingStatus = { status: "InProgress" } | { status: "NeedsRemux" } | { status: "Failed"; error: string } | { status: "Complete" } -export type SystemDiagnostics = { macosVersion: MacOSVersionInfo | null; availableEncoders: string[]; screenCaptureSupported: boolean } +export type SystemDiagnostics = { windowsVersion: WindowsVersionInfo | null; gpuInfo: GpuInfoDiag | null; allGpus: AllGpusInfo | null; renderingStatus: RenderingStatus; availableEncoders: string[]; graphicsCaptureSupported: boolean; d3D11VideoProcessorAvailable: boolean } export type TargetUnderCursor = { display_id: DisplayId | null; window: WindowUnderCursor | null } export type TextSegment = { start: number; end: number; enabled?: boolean; content?: string; center?: XY; size?: XY; fontFamily?: string; fontSize?: number; fontWeight?: number; italic?: boolean; color?: string; fadeDuration?: number } export type TimelineConfiguration = { segments: TimelineSegment[]; zoomSegments: ZoomSegment[]; sceneSegments?: SceneSegment[]; maskSegments?: MaskSegment[]; textSegments?: TextSegment[] } @@ -529,6 +531,7 @@ export type VideoUploadInfo = { id: string; link: string; config: S3UploadMeta } export type WindowExclusion = { bundleIdentifier?: string | null; ownerName?: string | null; windowTitle?: string | null } export type WindowId = string export type WindowUnderCursor = { id: WindowId; app_name: string; bounds: LogicalBounds } +export type WindowsVersionInfo = { major: number; minor: number; build: number; displayName: string; meetsRequirements: boolean; isWindows11: boolean } export type XY = { x: T; y: T } export type ZoomMode = "auto" | { manual: { x: number; y: number } } export type ZoomSegment = { start: number; end: number; amount: number; mode: ZoomMode } diff --git a/bash.exe.stackdump b/bash.exe.stackdump new file mode 100644 index 0000000000..9c9810c04b --- /dev/null +++ b/bash.exe.stackdump @@ -0,0 +1,28 @@ +Stack trace: +Frame Function Args +0007FFFFB360 00021005FE8E (000210285F68, 00021026AB6E, 000000000000, 0007FFFFA260) msys-2.0.dll+0x1FE8E +0007FFFFB360 0002100467F9 (000000000000, 000000000000, 000000000000, 0007FFFFB638) msys-2.0.dll+0x67F9 +0007FFFFB360 000210046832 (000210286019, 0007FFFFB218, 000000000000, 000000000000) msys-2.0.dll+0x6832 +0007FFFFB360 000210068CF6 (000000000000, 000000000000, 000000000000, 000000000000) msys-2.0.dll+0x28CF6 +0007FFFFB360 000210068E24 (0007FFFFB370, 000000000000, 000000000000, 000000000000) msys-2.0.dll+0x28E24 +0007FFFFB640 00021006A225 (0007FFFFB370, 000000000000, 000000000000, 000000000000) msys-2.0.dll+0x2A225 +End of stack trace +Loaded modules: +000100400000 bash.exe +7FFE664C0000 ntdll.dll +7FFE65400000 KERNEL32.DLL +7FFE63610000 KERNELBASE.dll +7FFE64260000 USER32.dll +000210040000 msys-2.0.dll +7FFE64180000 win32u.dll +7FFE64EE0000 GDI32.dll +7FFE63B60000 gdi32full.dll +7FFE641B0000 msvcp_win.dll +7FFE63A10000 ucrtbase.dll +7FFE657E0000 advapi32.dll +7FFE648D0000 msvcrt.dll +7FFE65A90000 sechost.dll +7FFE64DC0000 RPCRT4.dll +7FFE62C40000 CRYPTBASE.DLL +7FFE64040000 bcryptPrimitives.dll +7FFE65600000 IMM32.DLL diff --git a/crates/editor/src/audio.rs b/crates/editor/src/audio.rs index e8053606d7..20c53de315 100644 --- a/crates/editor/src/audio.rs +++ b/crates/editor/src/audio.rs @@ -7,6 +7,7 @@ use cap_project::{AudioConfiguration, ClipOffsets, ProjectConfiguration, Timelin use ffmpeg::{ ChannelLayout, Dictionary, format as avformat, frame::Audio as FFAudio, software::resampling, }; +#[cfg(not(target_os = "windows"))] use ringbuf::{ HeapRb, traits::{Consumer, Observer, Producer}, @@ -245,15 +246,19 @@ impl AudioRenderer { } } +#[cfg(not(target_os = "windows"))] pub struct AudioPlaybackBuffer { frame_buffer: AudioRenderer, resampler: AudioResampler, resampled_buffer: HeapRb, } +#[cfg(not(target_os = "windows"))] impl AudioPlaybackBuffer { pub const PLAYBACK_SAMPLES_COUNT: u32 = 512; + pub const WIRELESS_PLAYBACK_SAMPLES_COUNT: u32 = 1024; + const PROCESSING_SAMPLES_COUNT: u32 = 1024; pub fn new(data: Vec, output_info: AudioInfo) -> Self { @@ -266,7 +271,6 @@ impl AudioPlaybackBuffer { let resampler = AudioResampler::new(output_info).unwrap(); - // Up to 1 second of pre-rendered audio let capacity = (output_info.sample_rate as usize) * output_info.channels * output_info.sample_format.bytes(); @@ -369,8 +373,6 @@ impl AudioPlaybackBuffer { project: &ProjectConfiguration, min_headroom_samples: usize, ) { - self.prefill(project, min_headroom_samples.max(playback_buffer.len())); - let filled = self.resampled_buffer.pop_slice(playback_buffer); playback_buffer[filled..].fill(T::EQUILIBRIUM); @@ -416,6 +418,7 @@ impl AudioResampler { }) } + #[cfg(not(target_os = "windows"))] pub fn reset(&mut self) { *self = Self::new(self.output).unwrap(); } @@ -440,3 +443,109 @@ impl AudioResampler { Some(self.current_frame_data()) } } + +#[cfg(target_os = "windows")] +pub struct PrerenderedAudioBuffer { + samples: Vec, + read_position: usize, + sample_rate: u32, + channels: usize, +} + +#[cfg(target_os = "windows")] +impl PrerenderedAudioBuffer { + pub fn new( + segments: Vec, + project: &ProjectConfiguration, + output_info: AudioInfo, + duration_secs: f64, + ) -> Self { + info!( + duration_secs = duration_secs, + sample_rate = output_info.sample_rate, + channels = output_info.channels, + "Pre-rendering audio for Windows playback" + ); + + let mut renderer = AudioRenderer::new(segments); + let mut resampler = AudioResampler::new(output_info).unwrap(); + + let total_source_samples = (duration_secs * AudioData::SAMPLE_RATE as f64) as usize; + let estimated_output_samples = + (duration_secs * output_info.sample_rate as f64) as usize * output_info.channels; + + let mut samples: Vec = Vec::with_capacity(estimated_output_samples + 10000); + let bytes_per_sample = output_info.sample_size(); + let chunk_size = 1024usize; + + renderer.set_playhead(0.0, project); + + let mut rendered_source_samples = 0usize; + while rendered_source_samples < total_source_samples { + let frame_opt = renderer.render_frame(chunk_size, project); + + let resampled = match frame_opt { + Some(frame) => resampler.queue_and_process_frame(&frame), + None => match resampler.flush_frame() { + Some(data) => data, + None => break, + }, + }; + + if !resampled.is_empty() { + for chunk in resampled.chunks(bytes_per_sample) { + samples.push(T::from_bytes(chunk)); + } + } + + rendered_source_samples += chunk_size; + } + + while let Some(flushed) = resampler.flush_frame() { + if flushed.is_empty() { + break; + } + for chunk in flushed.chunks(bytes_per_sample) { + samples.push(T::from_bytes(chunk)); + } + } + + info!( + total_samples = samples.len(), + memory_mb = (samples.len() * std::mem::size_of::()) / (1024 * 1024), + "Audio pre-rendering complete" + ); + + Self { + samples, + read_position: 0, + sample_rate: output_info.sample_rate, + channels: output_info.channels, + } + } + + pub fn set_playhead(&mut self, playhead_secs: f64) { + let sample_position = (playhead_secs * self.sample_rate as f64) as usize * self.channels; + self.read_position = sample_position.min(self.samples.len()); + } + + #[allow(dead_code)] + pub fn current_playhead_secs(&self) -> f64 { + (self.read_position / self.channels) as f64 / self.sample_rate as f64 + } + + pub fn fill(&mut self, buffer: &mut [T]) { + let available = self.samples.len().saturating_sub(self.read_position); + let to_copy = buffer.len().min(available); + + if to_copy > 0 { + buffer[..to_copy] + .copy_from_slice(&self.samples[self.read_position..self.read_position + to_copy]); + self.read_position += to_copy; + } + + if to_copy < buffer.len() { + buffer[to_copy..].fill(T::EQUILIBRIUM); + } + } +} diff --git a/crates/editor/src/editor_instance.rs b/crates/editor/src/editor_instance.rs index a33a5c844a..d9fdc7c74b 100644 --- a/crates/editor/src/editor_instance.rs +++ b/crates/editor/src/editor_instance.rs @@ -401,21 +401,13 @@ impl EditorInstance { if cancel_token.is_cancelled() || *playback_rx.borrow() { return; } - if decoders + let _ = decoders .get_frames( prefetch_segment_time as f32, !hide_camera, prefetch_clip_offsets, ) - .await - .is_none() - { - tracing::warn!( - prefetch_segment_time, - hide_camera, - "prefetch get_frames returned None" - ); - } + .await; }); } } diff --git a/crates/editor/src/playback.rs b/crates/editor/src/playback.rs index 824e86e91f..33352d737c 100644 --- a/crates/editor/src/playback.rs +++ b/crates/editor/src/playback.rs @@ -1,12 +1,14 @@ -use cap_audio::{ - FromSampleBytes, LatencyCorrectionConfig, LatencyCorrector, default_output_latency_hint, -}; +use cap_audio::FromSampleBytes; +#[cfg(not(target_os = "windows"))] +use cap_audio::{LatencyCorrectionConfig, LatencyCorrector, default_output_latency_hint}; use cap_media::MediaError; use cap_media_info::AudioInfo; use cap_project::{ProjectConfiguration, XY}; use cap_rendering::{DecodedSegmentFrames, ProjectUniforms, RenderVideoConstants}; +#[cfg(not(target_os = "windows"))] +use cpal::{BufferSize, SupportedBufferSize}; use cpal::{ - BufferSize, SampleFormat, SupportedBufferSize, + SampleFormat, traits::{DeviceTrait, HostTrait, StreamTrait}, }; use futures::stream::{FuturesUnordered, StreamExt}; @@ -23,11 +25,10 @@ use tokio::{ }; use tracing::{error, info, warn}; +#[cfg(not(target_os = "windows"))] +use crate::audio::AudioPlaybackBuffer; use crate::{ - audio::{AudioPlaybackBuffer, AudioSegment}, - editor, - editor_instance::SegmentMedia, - segments::get_audio_segments, + audio::AudioSegment, editor, editor_instance::SegmentMedia, segments::get_audio_segments, }; const PREFETCH_BUFFER_SIZE: usize = 60; @@ -343,6 +344,8 @@ impl Playback { project: self.project.clone(), fps, playhead_rx: audio_playhead_rx, + #[cfg(target_os = "windows")] + duration_secs: duration, } .spawn(); @@ -693,6 +696,8 @@ struct AudioPlayback { project: watch::Receiver, fps: u32, playhead_rx: watch::Receiver, + #[cfg(target_os = "windows")] + duration_secs: f64, } impl AudioPlayback { @@ -724,12 +729,45 @@ impl AudioPlayback { } }; + #[cfg(target_os = "windows")] + let duration_secs = self.duration_secs; + let result = match supported_config.sample_format() { + #[cfg(target_os = "windows")] + SampleFormat::I16 => { + self.create_stream_prerendered::(device, supported_config, duration_secs) + } + #[cfg(target_os = "windows")] + SampleFormat::I32 => { + self.create_stream_prerendered::(device, supported_config, duration_secs) + } + #[cfg(target_os = "windows")] + SampleFormat::F32 => { + self.create_stream_prerendered::(device, supported_config, duration_secs) + } + #[cfg(target_os = "windows")] + SampleFormat::I64 => { + self.create_stream_prerendered::(device, supported_config, duration_secs) + } + #[cfg(target_os = "windows")] + SampleFormat::U8 => { + self.create_stream_prerendered::(device, supported_config, duration_secs) + } + #[cfg(target_os = "windows")] + SampleFormat::F64 => { + self.create_stream_prerendered::(device, supported_config, duration_secs) + } + #[cfg(not(target_os = "windows"))] SampleFormat::I16 => self.create_stream::(device, supported_config), + #[cfg(not(target_os = "windows"))] SampleFormat::I32 => self.create_stream::(device, supported_config), + #[cfg(not(target_os = "windows"))] SampleFormat::F32 => self.create_stream::(device, supported_config), + #[cfg(not(target_os = "windows"))] SampleFormat::I64 => self.create_stream::(device, supported_config), + #[cfg(not(target_os = "windows"))] SampleFormat::U8 => self.create_stream::(device, supported_config), + #[cfg(not(target_os = "windows"))] SampleFormat::F64 => self.create_stream::(device, supported_config), format => { error!( @@ -764,6 +802,7 @@ impl AudioPlayback { }); } + #[cfg(not(target_os = "windows"))] fn create_stream( self, device: cpal::Device, @@ -779,6 +818,7 @@ impl AudioPlayback { segments, fps, playhead_rx, + .. } = self; let mut base_output_info = AudioInfo::from_stream_config(&supported_config); @@ -862,9 +902,14 @@ impl AudioPlayback { let buffer_size = base_output_info.buffer_size; let channels = base_output_info.channels; + #[cfg(target_os = "windows")] + let headroom_multiplier = 4usize; + #[cfg(not(target_os = "windows"))] + let headroom_multiplier = 2usize; + let headroom_samples = (buffer_size as usize) .saturating_mul(channels) - .saturating_mul(2) + .saturating_mul(headroom_multiplier) .max(channels * AudioPlaybackBuffer::::PLAYBACK_SAMPLES_COUNT as usize); let mut audio_renderer = AudioPlaybackBuffer::new(segments.clone(), base_output_info); @@ -901,6 +946,7 @@ impl AudioPlayback { let static_latency_hint = default_output_latency_hint(sample_rate, buffer_size).or(initial_latency_hint); let latency_config = LatencyCorrectionConfig::default(); + #[allow(unused_mut)] let mut latency_corrector = LatencyCorrector::new(static_latency_hint, latency_config); let initial_compensation_secs = latency_corrector.initial_compensation_secs(); let device_sample_rate = sample_rate; @@ -909,7 +955,13 @@ impl AudioPlayback { let project_snapshot = project.borrow(); audio_renderer .set_playhead(playhead + initial_compensation_secs, &project_snapshot); - audio_renderer.prefill(&project_snapshot, headroom_samples); + + #[cfg(target_os = "windows")] + let initial_prefill = headroom_samples * 4; + #[cfg(not(target_os = "windows"))] + let initial_prefill = headroom_samples; + + audio_renderer.prefill(&project_snapshot, initial_prefill); } if let Some(hint) = static_latency_hint @@ -935,27 +987,81 @@ impl AudioPlayback { let headroom_for_stream = headroom_samples; let mut playhead_rx_for_stream = playhead_rx.clone(); let mut last_video_playhead = playhead; + + #[cfg(target_os = "windows")] + const FIXED_LATENCY_SECS: f64 = 0.08; + #[cfg(target_os = "windows")] + const SYNC_THRESHOLD_SECS: f64 = 0.20; + #[cfg(target_os = "windows")] + const HARD_SEEK_THRESHOLD_SECS: f64 = 0.5; + #[cfg(target_os = "windows")] + const MIN_SYNC_INTERVAL_CALLBACKS: u32 = 50; + + #[cfg(not(target_os = "windows"))] const SYNC_THRESHOLD_SECS: f64 = 0.12; + #[cfg(target_os = "windows")] + let mut callbacks_since_last_sync: u32 = MIN_SYNC_INTERVAL_CALLBACKS; + let stream_result = device.build_output_stream( &config, move |buffer: &mut [T], info| { + #[cfg(not(target_os = "windows"))] let latency_secs = latency_corrector.update_from_callback(info); + #[cfg(target_os = "windows")] + let _ = (info, &latency_corrector); let project = project_for_stream.borrow(); + #[cfg(target_os = "windows")] + { + callbacks_since_last_sync = callbacks_since_last_sync.saturating_add(1); + } + if playhead_rx_for_stream.has_changed().unwrap_or(false) { let video_playhead = *playhead_rx_for_stream.borrow_and_update(); - let audio_playhead = audio_renderer - .current_audible_playhead(device_sample_rate, latency_secs); - let drift = (video_playhead - audio_playhead).abs(); - if drift > SYNC_THRESHOLD_SECS - || (video_playhead - last_video_playhead).abs() > SYNC_THRESHOLD_SECS + #[cfg(target_os = "windows")] { - audio_renderer - .set_playhead(video_playhead + initial_compensation_secs, &project); + let jump = (video_playhead - last_video_playhead).abs(); + let audio_playhead = audio_renderer + .current_audible_playhead(device_sample_rate, FIXED_LATENCY_SECS); + let drift = (video_playhead - audio_playhead).abs(); + + if jump > HARD_SEEK_THRESHOLD_SECS { + audio_renderer.set_playhead( + video_playhead + initial_compensation_secs, + &project, + ); + callbacks_since_last_sync = 0; + } else if drift > SYNC_THRESHOLD_SECS + && callbacks_since_last_sync >= MIN_SYNC_INTERVAL_CALLBACKS + { + audio_renderer.set_playhead_smooth( + video_playhead + initial_compensation_secs, + &project, + ); + callbacks_since_last_sync = 0; + } } + + #[cfg(not(target_os = "windows"))] + { + let audio_playhead = audio_renderer + .current_audible_playhead(device_sample_rate, latency_secs); + let drift = (video_playhead - audio_playhead).abs(); + + if drift > SYNC_THRESHOLD_SECS + || (video_playhead - last_video_playhead).abs() + > SYNC_THRESHOLD_SECS + { + audio_renderer.set_playhead( + video_playhead + initial_compensation_secs, + &project, + ); + } + } + last_video_playhead = video_playhead; } @@ -987,4 +1093,81 @@ impl AudioPlayback { MediaError::TaskLaunch("Failed to build audio output stream".to_string()) })) } + + #[cfg(target_os = "windows")] + fn create_stream_prerendered( + self, + device: cpal::Device, + supported_config: cpal::SupportedStreamConfig, + duration_secs: f64, + ) -> Result<(watch::Receiver, cpal::Stream), MediaError> + where + T: FromSampleBytes + cpal::Sample, + { + use crate::audio::PrerenderedAudioBuffer; + + let AudioPlayback { + stop_rx, + start_frame_number, + project, + segments, + fps, + playhead_rx, + .. + } = self; + + let mut output_info = AudioInfo::from_stream_config(&supported_config); + output_info.sample_format = output_info.sample_format.packed(); + + let config = supported_config.config(); + let sample_rate = output_info.sample_rate; + + let playhead = f64::from(start_frame_number) / f64::from(fps); + + info!( + duration_secs = duration_secs, + start_playhead = playhead, + sample_rate = sample_rate, + "Creating pre-rendered audio stream for Windows" + ); + + let project_snapshot = project.borrow().clone(); + let mut audio_buffer = PrerenderedAudioBuffer::::new( + segments, + &project_snapshot, + output_info, + duration_secs, + ); + + audio_buffer.set_playhead(playhead); + + let mut playhead_rx_for_stream = playhead_rx.clone(); + let mut last_video_playhead = playhead; + + let stream = device + .build_output_stream( + &config, + move |buffer: &mut [T], _info| { + if playhead_rx_for_stream.has_changed().unwrap_or(false) { + let video_playhead = *playhead_rx_for_stream.borrow_and_update(); + let jump = (video_playhead - last_video_playhead).abs(); + + if jump > 0.1 { + audio_buffer.set_playhead(video_playhead); + } + + last_video_playhead = video_playhead; + } + + audio_buffer.fill(buffer); + }, + |err| eprintln!("Audio stream error: {err}"), + None, + ) + .map_err(|e| MediaError::TaskLaunch(format!("Failed to build audio stream: {e}")))?; + + info!("Pre-rendered audio stream created successfully"); + + Ok((stop_rx, stream)) + } } diff --git a/crates/project/src/configuration.rs b/crates/project/src/configuration.rs index ed9b166f70..7de49265b8 100644 --- a/crates/project/src/configuration.rs +++ b/crates/project/src/configuration.rs @@ -54,8 +54,9 @@ fn default_alpha() -> u8 { impl Default for BackgroundSource { fn default() -> Self { - BackgroundSource::Wallpaper { - path: Some("sequoia-dark".to_string()), + BackgroundSource::Color { + value: [255, 255, 255], + alpha: 255, } } } @@ -816,6 +817,11 @@ pub struct CaptionSettings { default = "CaptionSettings::default_word_transition_duration" )] pub word_transition_duration: f32, + #[serde( + alias = "activeWordHighlight", + default = "CaptionSettings::default_active_word_highlight" + )] + pub active_word_highlight: bool, } impl CaptionSettings { @@ -838,6 +844,10 @@ impl CaptionSettings { fn default_word_transition_duration() -> f32 { 0.25 } + + fn default_active_word_highlight() -> bool { + false + } } impl Default for CaptionSettings { @@ -859,6 +869,7 @@ impl Default for CaptionSettings { fade_duration: Self::default_fade_duration(), linger_duration: Self::default_linger_duration(), word_transition_duration: Self::default_word_transition_duration(), + active_word_highlight: Self::default_active_word_highlight(), } } } diff --git a/crates/recording/src/output_pipeline/win.rs b/crates/recording/src/output_pipeline/win.rs index 48eb9e4f60..b60825a420 100644 --- a/crates/recording/src/output_pipeline/win.rs +++ b/crates/recording/src/output_pipeline/win.rs @@ -19,7 +19,10 @@ use windows::{ Graphics::SizeInt32, Win32::Graphics::{ Direct3D11::ID3D11Device, - Dxgi::Common::{DXGI_FORMAT, DXGI_FORMAT_NV12, DXGI_FORMAT_YUY2}, + Dxgi::Common::{ + DXGI_FORMAT, DXGI_FORMAT_B8G8R8A8_UNORM, DXGI_FORMAT_NV12, DXGI_FORMAT_R8G8B8A8_UNORM, + DXGI_FORMAT_YUY2, + }, }, }; @@ -462,6 +465,10 @@ impl NativeCameraFrame { cap_camera_windows::PixelFormat::YUYV422 | cap_camera_windows::PixelFormat::UYVY422 => { DXGI_FORMAT_YUY2 } + cap_camera_windows::PixelFormat::ARGB | cap_camera_windows::PixelFormat::RGB32 => { + DXGI_FORMAT_B8G8R8A8_UNORM + } + cap_camera_windows::PixelFormat::RGB24 => DXGI_FORMAT_R8G8B8A8_UNORM, _ => DXGI_FORMAT_NV12, } } @@ -940,10 +947,22 @@ fn convert_uyvy_to_yuyv_scalar(src: &[u8], dst: &mut [u8], len: usize) { pub fn camera_frame_to_ffmpeg(frame: &NativeCameraFrame) -> anyhow::Result { use cap_mediafoundation_utils::IMFMediaBufferExt; + if frame.pixel_format == cap_camera_windows::PixelFormat::MJPEG { + return decode_mjpeg_frame(frame); + } + let ffmpeg_format = match frame.pixel_format { cap_camera_windows::PixelFormat::NV12 => ffmpeg::format::Pixel::NV12, cap_camera_windows::PixelFormat::YUYV422 => ffmpeg::format::Pixel::YUYV422, cap_camera_windows::PixelFormat::UYVY422 => ffmpeg::format::Pixel::UYVY422, + cap_camera_windows::PixelFormat::ARGB | cap_camera_windows::PixelFormat::RGB32 => { + ffmpeg::format::Pixel::BGRA + } + cap_camera_windows::PixelFormat::RGB24 => ffmpeg::format::Pixel::BGR24, + cap_camera_windows::PixelFormat::BGR24 => ffmpeg::format::Pixel::BGR24, + cap_camera_windows::PixelFormat::YUV420P => ffmpeg::format::Pixel::YUV420P, + cap_camera_windows::PixelFormat::YV12 => ffmpeg::format::Pixel::YUV420P, + cap_camera_windows::PixelFormat::NV21 => ffmpeg::format::Pixel::NV12, other => anyhow::bail!("Unsupported camera pixel format: {:?}", other), }; @@ -970,8 +989,8 @@ pub fn camera_frame_to_ffmpeg(frame: &NativeCameraFrame) -> anyhow::Result { + match frame.pixel_format { + cap_camera_windows::PixelFormat::NV12 => { let y_size = (frame.width * frame.height) as usize; let uv_size = y_size / 2; if final_data.len() >= y_size + uv_size { @@ -979,18 +998,151 @@ pub fn camera_frame_to_ffmpeg(frame: &NativeCameraFrame) -> anyhow::Result { + cap_camera_windows::PixelFormat::NV21 => { + let y_size = (frame.width * frame.height) as usize; + let uv_size = y_size / 2; + if final_data.len() >= y_size + uv_size { + ffmpeg_frame.data_mut(0)[..y_size].copy_from_slice(&final_data[..y_size]); + let uv_data = &final_data[y_size..y_size + uv_size]; + let dest = ffmpeg_frame.data_mut(1); + for i in (0..uv_size).step_by(2) { + if i + 1 < uv_data.len() && i + 1 < dest.len() { + dest[i] = uv_data[i + 1]; + dest[i + 1] = uv_data[i]; + } + } + } + } + cap_camera_windows::PixelFormat::YUYV422 | cap_camera_windows::PixelFormat::UYVY422 => { let size = (frame.width * frame.height * 2) as usize; if final_data.len() >= size { ffmpeg_frame.data_mut(0)[..size].copy_from_slice(&final_data[..size]); } } + cap_camera_windows::PixelFormat::ARGB | cap_camera_windows::PixelFormat::RGB32 => { + let size = (frame.width * frame.height * 4) as usize; + if final_data.len() >= size { + ffmpeg_frame.data_mut(0)[..size].copy_from_slice(&final_data[..size]); + } + } + cap_camera_windows::PixelFormat::RGB24 | cap_camera_windows::PixelFormat::BGR24 => { + let size = (frame.width * frame.height * 3) as usize; + if final_data.len() >= size { + ffmpeg_frame.data_mut(0)[..size].copy_from_slice(&final_data[..size]); + } + } + cap_camera_windows::PixelFormat::YUV420P => { + let y_size = (frame.width * frame.height) as usize; + let uv_size = y_size / 4; + if final_data.len() >= y_size + uv_size * 2 { + let stride_y = ffmpeg_frame.stride(0); + let stride_u = ffmpeg_frame.stride(1); + let stride_v = ffmpeg_frame.stride(2); + copy_plane( + &final_data[..y_size], + ffmpeg_frame.data_mut(0), + frame.width as usize, + frame.height as usize, + stride_y, + ); + copy_plane( + &final_data[y_size..y_size + uv_size], + ffmpeg_frame.data_mut(1), + (frame.width / 2) as usize, + (frame.height / 2) as usize, + stride_u, + ); + copy_plane( + &final_data[y_size + uv_size..], + ffmpeg_frame.data_mut(2), + (frame.width / 2) as usize, + (frame.height / 2) as usize, + stride_v, + ); + } + } + cap_camera_windows::PixelFormat::YV12 => { + let y_size = (frame.width * frame.height) as usize; + let uv_size = y_size / 4; + if final_data.len() >= y_size + uv_size * 2 { + let stride_y = ffmpeg_frame.stride(0); + let stride_u = ffmpeg_frame.stride(1); + let stride_v = ffmpeg_frame.stride(2); + copy_plane( + &final_data[..y_size], + ffmpeg_frame.data_mut(0), + frame.width as usize, + frame.height as usize, + stride_y, + ); + copy_plane( + &final_data[y_size + uv_size..], + ffmpeg_frame.data_mut(1), + (frame.width / 2) as usize, + (frame.height / 2) as usize, + stride_u, + ); + copy_plane( + &final_data[y_size..y_size + uv_size], + ffmpeg_frame.data_mut(2), + (frame.width / 2) as usize, + (frame.height / 2) as usize, + stride_v, + ); + } + } _ => {} } Ok(ffmpeg_frame) } +fn copy_plane(src: &[u8], dst: &mut [u8], width: usize, height: usize, stride: usize) { + for row in 0..height { + let src_start = row * width; + let dst_start = row * stride; + let copy_len = width.min(src.len().saturating_sub(src_start)); + if copy_len > 0 && dst_start + copy_len <= dst.len() { + dst[dst_start..dst_start + copy_len] + .copy_from_slice(&src[src_start..src_start + copy_len]); + } + } +} + +fn decode_mjpeg_frame(frame: &NativeCameraFrame) -> anyhow::Result { + use cap_mediafoundation_utils::IMFMediaBufferExt; + + let buffer_guard = frame + .buffer + .lock() + .map_err(|_| anyhow!("Failed to lock camera buffer"))?; + let lock = buffer_guard + .lock() + .map_err(|e| anyhow!("Failed to lock MF buffer: {:?}", e))?; + let data = &*lock; + + let codec = ffmpeg::codec::decoder::find(ffmpeg::codec::Id::MJPEG) + .ok_or_else(|| anyhow!("MJPEG codec not found"))?; + + let decoder_context = ffmpeg::codec::context::Context::new_with_codec(codec); + let mut decoder = decoder_context + .decoder() + .video() + .map_err(|e| anyhow!("Failed to create MJPEG decoder: {e}"))?; + + let packet = ffmpeg::Packet::copy(data); + decoder + .send_packet(&packet) + .map_err(|e| anyhow!("Failed to send MJPEG packet: {e}"))?; + + let mut decoded_frame = ffmpeg::frame::Video::empty(); + decoder + .receive_frame(&mut decoded_frame) + .map_err(|e| anyhow!("Failed to decode MJPEG frame: {e}"))?; + + Ok(decoded_frame) +} + pub fn upload_mf_buffer_to_texture( device: &ID3D11Device, frame: &NativeCameraFrame, @@ -1006,6 +1158,8 @@ pub fn upload_mf_buffer_to_texture( let bytes_per_pixel: u32 = match frame.pixel_format { cap_camera_windows::PixelFormat::NV12 => 1, cap_camera_windows::PixelFormat::YUYV422 | cap_camera_windows::PixelFormat::UYVY422 => 2, + cap_camera_windows::PixelFormat::ARGB | cap_camera_windows::PixelFormat::RGB32 => 4, + cap_camera_windows::PixelFormat::RGB24 => 3, _ => 2, }; diff --git a/crates/rendering/src/decoder/media_foundation.rs b/crates/rendering/src/decoder/media_foundation.rs index 18a142b734..ee8d46e082 100644 --- a/crates/rendering/src/decoder/media_foundation.rs +++ b/crates/rendering/src/decoder/media_foundation.rs @@ -5,7 +5,7 @@ use std::{ time::{Duration, Instant}, }; use tokio::sync::oneshot; -use tracing::{debug, info, warn}; +use tracing::{info, warn}; use windows::Win32::{Foundation::HANDLE, Graphics::Direct3D11::ID3D11Texture2D}; use super::{DecodedFrame, DecoderInitResult, DecoderType, FRAME_CACHE_SIZE, VideoDecoderMessage}; @@ -202,11 +202,7 @@ impl MFDecoder { let requested_frame = (requested_time * fps as f32).floor() as u32; if let Some(cached) = cache.get(&requested_frame) { - if sender.send(cached.to_decoded_frame()).is_err() { - warn!( - "Failed to send cached frame {requested_frame}: receiver dropped" - ); - } + let _ = sender.send(cached.to_decoded_frame()); continue; } @@ -222,7 +218,6 @@ impl MFDecoder { .unwrap_or(true); if needs_seek { - debug!("MediaFoundation seeking to frame {requested_frame}"); let time_100ns = frame_to_100ns(requested_frame, fps); if let Err(e) = decoder.seek(time_100ns) { warn!("MediaFoundation seek failed: {e}"); @@ -248,16 +243,6 @@ impl MFDecoder { ) { Ok(data) => { health.record_success(decode_time); - debug!( - frame = frame_number, - data_len = data.data.len(), - y_stride = data.y_stride, - uv_stride = data.uv_stride, - width = mf_frame.width, - height = mf_frame.height, - decode_ms = decode_time.as_millis(), - "read_texture_to_cpu succeeded" - ); Some(Arc::new(data)) } Err(e) => { @@ -309,12 +294,8 @@ impl MFDecoder { if let Some(frame) = frame_to_send && let Some(s) = sender.take() - && s.send(frame.to_decoded_frame()).is_err() { - warn!( - "Failed to send frame {}: receiver dropped", - frame.number - ); + let _ = s.send(frame.to_decoded_frame()); } break; } @@ -324,7 +305,6 @@ impl MFDecoder { } } Ok(None) => { - debug!("MediaFoundation end of stream"); break; } Err(e) => { @@ -342,21 +322,14 @@ impl MFDecoder { if let Some(frame) = last_valid_frame .or_else(|| cache.values().max_by_key(|f| f.number).cloned()) { - if s.send(frame.to_decoded_frame()).is_err() { - warn!("Failed to send fallback frame: receiver dropped"); - } + let _ = s.send(frame.to_decoded_frame()); } else { - debug!( - "No frames available for request {requested_frame}, sending black frame" - ); let black_frame = DecodedFrame::new( vec![0u8; (video_width * video_height * 4) as usize], video_width, video_height, ); - if s.send(black_frame).is_err() { - warn!("Failed to send black frame: receiver dropped"); - } + let _ = s.send(black_frame); } } } diff --git a/crates/rendering/src/decoder/mod.rs b/crates/rendering/src/decoder/mod.rs index 81b92997a3..0a5b4a2892 100644 --- a/crates/rendering/src/decoder/mod.rs +++ b/crates/rendering/src/decoder/mod.rs @@ -6,9 +6,9 @@ use std::{ time::Duration, }; use tokio::sync::oneshot; +use tracing::info; #[cfg(target_os = "windows")] use tracing::warn; -use tracing::{debug, info}; #[cfg(target_os = "macos")] mod avassetreader; @@ -467,19 +467,12 @@ impl AsyncVideoDecoderHandle { .send(VideoDecoderMessage::GetFrame(adjusted_time, tx)) .is_err() { - debug!("Decoder channel closed, receiver dropped"); return None; } match tokio::time::timeout(std::time::Duration::from_millis(500), rx).await { Ok(result) => result.ok(), - Err(_) => { - debug!( - adjusted_time = adjusted_time, - "get_frame timed out after 500ms" - ); - None - } + Err(_) => None, } } @@ -555,8 +548,8 @@ pub async fn spawn_decoder( let (ready_tx, ready_rx) = oneshot::channel::>(); let (tx, rx) = mpsc::channel(); - match media_foundation::MFDecoder::spawn(name, path.clone(), fps, rx, ready_tx) { - Ok(()) => match tokio::time::timeout(timeout_duration, ready_rx).await { + if let Ok(()) = media_foundation::MFDecoder::spawn(name, path.clone(), fps, rx, ready_tx) { + match tokio::time::timeout(timeout_duration, ready_rx).await { Ok(Ok(Ok(init_result))) => { info!( "Video '{}' using {} decoder ({}x{})", @@ -592,12 +585,6 @@ pub async fn spawn_decoder( name ); } - }, - Err(mf_err) => { - debug!( - "MediaFoundation decoder spawn failed for '{}': {}, falling back to FFmpeg", - name, mf_err - ); } } diff --git a/crates/rendering/src/layers/background.rs b/crates/rendering/src/layers/background.rs index 03cd572fd8..45fd94e2fe 100644 --- a/crates/rendering/src/layers/background.rs +++ b/crates/rendering/src/layers/background.rs @@ -64,16 +64,8 @@ impl From for Background { .replace("localhost//", "/"); if std::path::Path::new(&clean_path).exists() { - tracing::debug!("Background image path resolved: {}", clean_path); return Background::Image { path: clean_path }; } - tracing::warn!( - "Background image path does not exist: {} (original: {})", - clean_path, - path - ); - } else { - tracing::debug!("Background path is empty or None"); } Background::Color([1.0, 1.0, 1.0, 1.0]) } diff --git a/crates/rendering/src/layers/captions.rs b/crates/rendering/src/layers/captions.rs index 7fb449c9d0..9f2c3f4fb4 100644 --- a/crates/rendering/src/layers/captions.rs +++ b/crates/rendering/src/layers/captions.rs @@ -553,7 +553,9 @@ impl CaptionsLayer { let base_alpha = (fade_opacity * BASE_TEXT_OPACITY).clamp(0.0, 1.0); let highlight_alpha = fade_opacity.clamp(0.0, 1.0); - if !caption_words.is_empty() { + let active_word_highlight_enabled = caption_data.settings.active_word_highlight; + + if !caption_words.is_empty() && active_word_highlight_enabled { let mut rich_text: Vec<(&str, Attrs)> = Vec::new(); let full_text = caption_text.as_str(); let mut last_end = 0usize; @@ -636,10 +638,10 @@ impl CaptionsLayer { ); } else { let color = Color::rgba( - (base_color[0] * 255.0) as u8, - (base_color[1] * 255.0) as u8, - (base_color[2] * 255.0) as u8, - (base_alpha * 255.0) as u8, + (highlight_color_rgb[0] * 255.0) as u8, + (highlight_color_rgb[1] * 255.0) as u8, + (highlight_color_rgb[2] * 255.0) as u8, + (highlight_alpha * 255.0) as u8, ); let attrs = Attrs::new().family(font_family).weight(weight).color(color); updated_buffer.set_text( diff --git a/crates/rendering/src/layers/display.rs b/crates/rendering/src/layers/display.rs index c799f8dfad..898ad16d09 100644 --- a/crates/rendering/src/layers/display.rs +++ b/crates/rendering/src/layers/display.rs @@ -162,25 +162,10 @@ impl DisplayLayer { }); true } else { - tracing::debug!( - width = frame_size.x, - height = frame_size.y, - y_stride, - "NV12 conversion succeeded but output texture is None, skipping copy" - ); false } } - Err(e) => { - tracing::debug!( - error = ?e, - width = frame_size.x, - height = frame_size.y, - y_stride, - "NV12 to RGBA conversion failed" - ); - false - } + Err(_) => false, } } else { false @@ -214,10 +199,7 @@ impl DisplayLayer { false } } - Err(e) => { - tracing::debug!(error = ?e, "CPU NV12 conversion failed"); - false - } + Err(_) => false, } } else { false @@ -227,53 +209,28 @@ impl DisplayLayer { { let mut d3d11_succeeded = false; - let has_y_handle = screen_frame.d3d11_y_handle().is_some(); - let has_uv_handle = screen_frame.d3d11_uv_handle().is_some(); - let has_y_plane = screen_frame.y_plane().is_some(); - let has_uv_plane = screen_frame.uv_plane().is_some(); - - tracing::debug!( - has_y_handle, - has_uv_handle, - has_y_plane, - has_uv_plane, - data_len = screen_frame.data().len(), - y_stride = screen_frame.y_stride(), - uv_stride = screen_frame.uv_stride(), - actual_width, - actual_height, - frame_size_x = frame_size.x, - frame_size_y = frame_size.y, - "Windows NV12 frame info" - ); - if let (Some(y_handle), Some(uv_handle)) = ( screen_frame.d3d11_y_handle(), screen_frame.d3d11_uv_handle(), - ) { - tracing::trace!("Using D3D11 zero-copy path for NV12 conversion"); - match self.yuv_converter.convert_nv12_from_d3d11_shared_handles( + ) && self + .yuv_converter + .convert_nv12_from_d3d11_shared_handles( device, queue, y_handle, uv_handle, actual_width, actual_height, - ) { - Ok(_) => { - if self.yuv_converter.output_texture().is_some() { - self.pending_copy = Some(PendingTextureCopy { - width: actual_width, - height: actual_height, - dst_texture_index: next_texture, - }); - d3d11_succeeded = true; - } - } - Err(e) => { - tracing::debug!(error = ?e, "D3D11 zero-copy conversion failed, falling back to CPU path"); - } - } + ) + .is_ok() + && self.yuv_converter.output_texture().is_some() + { + self.pending_copy = Some(PendingTextureCopy { + width: actual_width, + height: actual_height, + dst_texture_index: next_texture, + }); + d3d11_succeeded = true; } if d3d11_succeeded { @@ -284,17 +241,6 @@ impl DisplayLayer { let y_stride = screen_frame.y_stride(); let uv_stride = screen_frame.uv_stride(); - tracing::debug!( - y_data_len = y_data.len(), - uv_data_len = uv_data.len(), - y_stride, - uv_stride, - actual_width, - actual_height, - prefer_cpu = self.prefer_cpu_conversion, - "Attempting NV12 conversion" - ); - let convert_result = if self.prefer_cpu_conversion { self.yuv_converter.convert_nv12_cpu( device, @@ -321,7 +267,6 @@ impl DisplayLayer { match convert_result { Ok(_) => { - tracing::debug!("NV12 conversion succeeded"); if self.yuv_converter.output_texture().is_some() { self.pending_copy = Some(PendingTextureCopy { width: actual_width, @@ -330,21 +275,12 @@ impl DisplayLayer { }); true } else { - tracing::warn!( - "NV12 conversion succeeded but output texture is None" - ); false } } - Err(e) => { - tracing::warn!(error = ?e, "NV12 conversion failed"); - false - } + Err(_) => false, } } else { - tracing::warn!( - "No D3D11 handles and no CPU data available for NV12 frame" - ); false } } diff --git a/crates/rendering/src/lib.rs b/crates/rendering/src/lib.rs index 6bfe0a30ae..b07bffdab8 100644 --- a/crates/rendering/src/lib.rs +++ b/crates/rendering/src/lib.rs @@ -205,14 +205,6 @@ impl RecordingSegmentDecoders { let camera_frame = camera.flatten(); - if needs_camera && camera_frame.is_none() { - tracing::debug!( - segment_time, - has_camera_decoder = self.camera.is_some(), - "camera frame missing" - ); - } - Some(DecodedSegmentFrames { screen_frame: screen?, camera_frame, @@ -1376,12 +1368,15 @@ impl ProjectUniforms { let crop_bounds = match project.camera.shape { CameraShape::Source => [0.0, 0.0, frame_size[0], frame_size[1]], - CameraShape::Square => [ - (frame_size[0] - frame_size[1]) / 2.0, - 0.0, - frame_size[0] - (frame_size[0] - frame_size[1]) / 2.0, - frame_size[1], - ], + CameraShape::Square => { + if frame_size[0] > frame_size[1] { + let offset = (frame_size[0] - frame_size[1]) / 2.0; + [offset, 0.0, frame_size[0] - offset, frame_size[1]] + } else { + let offset = (frame_size[1] - frame_size[0]) / 2.0; + [0.0, offset, frame_size[0], frame_size[1] - offset] + } + } }; CompositeVideoFrameUniforms {