Render to textures
This commit is contained in:
@@ -47,6 +47,8 @@ use crate::{
|
||||
use anyhow::Result;
|
||||
use async_task::Runnable;
|
||||
use futures::channel::oneshot;
|
||||
#[cfg(any(test, feature = "test-support"))]
|
||||
use image::RgbaImage;
|
||||
use image::codecs::gif::GifDecoder;
|
||||
use image::{AnimationDecoder as _, Frame};
|
||||
use raw_window_handle::{HasDisplayHandle, HasWindowHandle};
|
||||
@@ -580,6 +582,14 @@ pub(crate) trait PlatformWindow: HasWindowHandle + HasDisplayHandle {
|
||||
fn native_window_id(&self) -> Option<u32> {
|
||||
None
|
||||
}
|
||||
|
||||
/// Renders the given scene to a texture and returns the pixel data as an RGBA image.
|
||||
/// This does not present the frame to screen - useful for visual testing where we want
|
||||
/// to capture what would be rendered without displaying it or requiring the window to be visible.
|
||||
#[cfg(any(test, feature = "test-support"))]
|
||||
fn render_to_image(&self, _scene: &Scene) -> Result<RgbaImage> {
|
||||
anyhow::bail!("render_to_image not implemented for this platform")
|
||||
}
|
||||
}
|
||||
|
||||
/// This type is public so that our test macro can generate and use it, but it should not
|
||||
|
||||
@@ -7,9 +7,13 @@ use crate::{
|
||||
PrimitiveBatch, Quad, ScaledPixels, Scene, Shadow, Size, Underline,
|
||||
get_gamma_correction_ratios,
|
||||
};
|
||||
#[cfg(any(test, feature = "test-support"))]
|
||||
use anyhow::Result;
|
||||
use blade_graphics as gpu;
|
||||
use blade_util::{BufferBelt, BufferBeltDescriptor};
|
||||
use bytemuck::{Pod, Zeroable};
|
||||
#[cfg(any(test, feature = "test-support"))]
|
||||
use image::RgbaImage;
|
||||
#[cfg(target_os = "macos")]
|
||||
use media::core_video::CVMetalTextureCache;
|
||||
use std::sync::Arc;
|
||||
@@ -917,6 +921,13 @@ impl BladeRenderer {
|
||||
self.wait_for_gpu();
|
||||
self.last_sync_point = Some(sync_point);
|
||||
}
|
||||
|
||||
/// Renders the scene to a texture and returns the pixel data as an RGBA image.
|
||||
/// This is not yet implemented for BladeRenderer.
|
||||
#[cfg(any(test, feature = "test-support"))]
|
||||
pub fn render_to_image(&mut self, _scene: &Scene) -> Result<RgbaImage> {
|
||||
anyhow::bail!("render_to_image is not yet implemented for BladeRenderer")
|
||||
}
|
||||
}
|
||||
|
||||
fn create_path_intermediate_texture(
|
||||
|
||||
@@ -11,6 +11,8 @@ use cocoa::{
|
||||
foundation::{NSSize, NSUInteger},
|
||||
quartzcore::AutoresizingMask,
|
||||
};
|
||||
#[cfg(any(test, feature = "test-support"))]
|
||||
use image::RgbaImage;
|
||||
|
||||
use core_foundation::base::TCFType;
|
||||
use core_video::{
|
||||
@@ -154,6 +156,9 @@ impl MetalRenderer {
|
||||
layer.set_pixel_format(MTLPixelFormat::BGRA8Unorm);
|
||||
layer.set_opaque(false);
|
||||
layer.set_maximum_drawable_count(3);
|
||||
// Allow texture reading for visual tests (captures screenshots without ScreenCaptureKit)
|
||||
#[cfg(any(test, feature = "test-support"))]
|
||||
layer.set_framebuffer_only(false);
|
||||
unsafe {
|
||||
let _: () = msg_send![&*layer, setAllowsNextDrawableTimeout: NO];
|
||||
let _: () = msg_send![&*layer, setNeedsDisplayOnBoundsChange: YES];
|
||||
@@ -426,6 +431,97 @@ impl MetalRenderer {
|
||||
}
|
||||
}
|
||||
|
||||
/// Renders the scene to a texture and returns the pixel data as an RGBA image.
|
||||
/// This does not present the frame to screen - useful for visual testing
|
||||
/// where we want to capture what would be rendered without displaying it.
|
||||
#[cfg(any(test, feature = "test-support"))]
|
||||
pub fn render_to_image(&mut self, scene: &Scene) -> Result<RgbaImage> {
|
||||
let layer = self.layer.clone();
|
||||
let viewport_size = layer.drawable_size();
|
||||
let viewport_size: Size<DevicePixels> = size(
|
||||
(viewport_size.width.ceil() as i32).into(),
|
||||
(viewport_size.height.ceil() as i32).into(),
|
||||
);
|
||||
let drawable = layer
|
||||
.next_drawable()
|
||||
.ok_or_else(|| anyhow::anyhow!("Failed to get drawable for render_to_image"))?;
|
||||
|
||||
loop {
|
||||
let mut instance_buffer = self.instance_buffer_pool.lock().acquire(&self.device);
|
||||
|
||||
let command_buffer =
|
||||
self.draw_primitives(scene, &mut instance_buffer, drawable, viewport_size);
|
||||
|
||||
match command_buffer {
|
||||
Ok(command_buffer) => {
|
||||
let instance_buffer_pool = self.instance_buffer_pool.clone();
|
||||
let instance_buffer = Cell::new(Some(instance_buffer));
|
||||
let block = ConcreteBlock::new(move |_| {
|
||||
if let Some(instance_buffer) = instance_buffer.take() {
|
||||
instance_buffer_pool.lock().release(instance_buffer);
|
||||
}
|
||||
});
|
||||
let block = block.copy();
|
||||
command_buffer.add_completed_handler(&block);
|
||||
|
||||
// Commit and wait for completion without presenting
|
||||
command_buffer.commit();
|
||||
command_buffer.wait_until_completed();
|
||||
|
||||
// Read pixels from the texture
|
||||
let texture = drawable.texture();
|
||||
let width = texture.width() as u32;
|
||||
let height = texture.height() as u32;
|
||||
let bytes_per_row = width as usize * 4;
|
||||
let buffer_size = height as usize * bytes_per_row;
|
||||
|
||||
let mut pixels = vec![0u8; buffer_size];
|
||||
|
||||
let region = metal::MTLRegion {
|
||||
origin: metal::MTLOrigin { x: 0, y: 0, z: 0 },
|
||||
size: metal::MTLSize {
|
||||
width: width as u64,
|
||||
height: height as u64,
|
||||
depth: 1,
|
||||
},
|
||||
};
|
||||
|
||||
texture.get_bytes(
|
||||
pixels.as_mut_ptr() as *mut std::ffi::c_void,
|
||||
bytes_per_row as u64,
|
||||
region,
|
||||
0,
|
||||
);
|
||||
|
||||
// Convert BGRA to RGBA (swap B and R channels)
|
||||
for chunk in pixels.chunks_exact_mut(4) {
|
||||
chunk.swap(0, 2);
|
||||
}
|
||||
|
||||
return RgbaImage::from_raw(width, height, pixels).ok_or_else(|| {
|
||||
anyhow::anyhow!("Failed to create RgbaImage from pixel data")
|
||||
});
|
||||
}
|
||||
Err(err) => {
|
||||
log::error!(
|
||||
"failed to render: {}. retrying with larger instance buffer size",
|
||||
err
|
||||
);
|
||||
let mut instance_buffer_pool = self.instance_buffer_pool.lock();
|
||||
let buffer_size = instance_buffer_pool.buffer_size;
|
||||
if buffer_size >= 256 * 1024 * 1024 {
|
||||
anyhow::bail!("instance buffer size grew too large: {}", buffer_size);
|
||||
}
|
||||
instance_buffer_pool.reset(buffer_size * 2);
|
||||
log::info!(
|
||||
"increased instance buffer size to {}",
|
||||
instance_buffer_pool.buffer_size
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn draw_primitives(
|
||||
&mut self,
|
||||
scene: &Scene,
|
||||
|
||||
@@ -8,6 +8,8 @@ use crate::{
|
||||
WindowBounds, WindowControlArea, WindowKind, WindowParams, dispatch_get_main_queue,
|
||||
dispatch_sys::dispatch_async_f, platform::PlatformInputHandler, point, px, size,
|
||||
};
|
||||
#[cfg(any(test, feature = "test-support"))]
|
||||
use anyhow::Result;
|
||||
use block::ConcreteBlock;
|
||||
use cocoa::{
|
||||
appkit::{
|
||||
@@ -25,6 +27,8 @@ use cocoa::{
|
||||
NSUserDefaults,
|
||||
},
|
||||
};
|
||||
#[cfg(any(test, feature = "test-support"))]
|
||||
use image::RgbaImage;
|
||||
|
||||
use core_graphics::display::{CGDirectDisplayID, CGPoint, CGRect};
|
||||
use ctor::ctor;
|
||||
@@ -1570,6 +1574,12 @@ impl PlatformWindow for MacWindow {
|
||||
fn native_window_id(&self) -> Option<u32> {
|
||||
Some(self.window_number())
|
||||
}
|
||||
|
||||
#[cfg(any(test, feature = "test-support"))]
|
||||
fn render_to_image(&self, scene: &crate::Scene) -> Result<RgbaImage> {
|
||||
let mut this = self.0.lock();
|
||||
this.renderer.render_to_image(scene)
|
||||
}
|
||||
}
|
||||
|
||||
impl rwh::HasWindowHandle for MacWindow {
|
||||
|
||||
@@ -1784,6 +1784,15 @@ impl Window {
|
||||
self.platform_window.native_window_id()
|
||||
}
|
||||
|
||||
/// Renders the current frame's scene to a texture and returns the pixel data as an RGBA image.
|
||||
/// This does not present the frame to screen - useful for visual testing where we want
|
||||
/// to capture what would be rendered without displaying it or requiring the window to be visible.
|
||||
#[cfg(any(test, feature = "test-support"))]
|
||||
pub fn render_to_image(&self) -> anyhow::Result<image::RgbaImage> {
|
||||
self.platform_window
|
||||
.render_to_image(&self.rendered_frame.scene)
|
||||
}
|
||||
|
||||
/// Set the content size of the window.
|
||||
pub fn resize(&mut self, size: Size<Pixels>) {
|
||||
self.platform_window.resize(size);
|
||||
|
||||
@@ -3,15 +3,13 @@
|
||||
//! This binary runs visual regression tests for Zed's UI. It captures screenshots
|
||||
//! of real Zed windows and compares them against baseline images.
|
||||
//!
|
||||
//! ## Prerequisites
|
||||
//! ## How It Works
|
||||
//!
|
||||
//! **Screen Recording Permission Required**: This tool uses macOS ScreenCaptureKit
|
||||
//! to capture window screenshots. You must grant Screen Recording permission:
|
||||
//!
|
||||
//! 1. Run this tool once - macOS will prompt for permission
|
||||
//! 2. Or manually: System Settings > Privacy & Security > Screen Recording
|
||||
//! 3. Enable the terminal app you're running from (e.g., Terminal.app, iTerm2)
|
||||
//! 4. You may need to restart your terminal after granting permission
|
||||
//! This tool uses direct texture capture - it renders the scene to a Metal texture
|
||||
//! and reads the pixels back directly. This approach:
|
||||
//! - Does NOT require Screen Recording permission
|
||||
//! - Does NOT require the window to be visible on screen
|
||||
//! - Captures raw GPUI output without system window chrome
|
||||
//!
|
||||
//! ## Usage
|
||||
//!
|
||||
@@ -87,6 +85,8 @@ fn main() {
|
||||
release_channel::init(semver::Version::new(0, 0, 0), cx);
|
||||
command_palette::init(cx);
|
||||
editor::init(cx);
|
||||
call::init(app_state.client.clone(), app_state.user_store.clone(), cx);
|
||||
title_bar::init(cx);
|
||||
project_panel::init(cx);
|
||||
outline_panel::init(cx);
|
||||
terminal_view::init(cx);
|
||||
@@ -95,12 +95,10 @@ fn main() {
|
||||
|
||||
// Open a real Zed workspace window
|
||||
let window_size = size(px(1280.0), px(800.0));
|
||||
// TODO: We'd like to hide this window during tests, but macOS prevents windows
|
||||
// from being positioned fully offscreen. The proper fix is to implement direct
|
||||
// texture capture (reading pixels from Metal texture instead of using ScreenCaptureKit).
|
||||
// See docs/direct-texture-capture-implementation.md for details.
|
||||
// Window can be hidden since we use direct texture capture (reading pixels from
|
||||
// Metal texture) instead of ScreenCaptureKit which requires visible windows.
|
||||
let bounds = Bounds {
|
||||
origin: point(px(100.0), px(100.0)),
|
||||
origin: point(px(0.0), px(0.0)),
|
||||
size: window_size,
|
||||
};
|
||||
|
||||
@@ -120,8 +118,8 @@ fn main() {
|
||||
.open_window(
|
||||
WindowOptions {
|
||||
window_bounds: Some(WindowBounds::Windowed(bounds)),
|
||||
focus: true,
|
||||
show: true,
|
||||
focus: false,
|
||||
show: false,
|
||||
..Default::default()
|
||||
},
|
||||
|window, cx| {
|
||||
@@ -148,18 +146,6 @@ fn main() {
|
||||
eprintln!("Failed to add worktree: {:?}", e);
|
||||
}
|
||||
|
||||
// Activate the window and bring it to front to ensure it's visible
|
||||
// This is needed because macOS won't render occluded windows
|
||||
cx.update(|cx| {
|
||||
cx.activate(true); // Activate the app
|
||||
workspace_window
|
||||
.update(cx, |_, window, _| {
|
||||
window.activate_window(); // Bring window to front
|
||||
})
|
||||
.ok();
|
||||
})
|
||||
.ok();
|
||||
|
||||
// Wait for UI to settle
|
||||
cx.background_executor()
|
||||
.timer(std::time::Duration::from_millis(500))
|
||||
@@ -262,22 +248,6 @@ fn main() {
|
||||
.timer(std::time::Duration::from_secs(2))
|
||||
.await;
|
||||
|
||||
// Activate window again before screenshot to ensure it's rendered
|
||||
cx.update(|cx| {
|
||||
workspace_window
|
||||
.update(cx, |_, window, _| {
|
||||
window.activate_window();
|
||||
})
|
||||
.ok();
|
||||
})
|
||||
.ok();
|
||||
|
||||
// One more refresh and wait
|
||||
cx.refresh().ok();
|
||||
cx.background_executor()
|
||||
.timer(std::time::Duration::from_millis(500))
|
||||
.await;
|
||||
|
||||
// Track test results
|
||||
let mut passed = 0;
|
||||
let mut failed = 0;
|
||||
@@ -393,8 +363,8 @@ async fn run_visual_test(
|
||||
cx: &mut gpui::AsyncApp,
|
||||
update_baseline: bool,
|
||||
) -> Result<TestResult> {
|
||||
// Capture the screenshot
|
||||
let screenshot = capture_screenshot(window, cx).await?;
|
||||
// Capture the screenshot using direct texture capture (no ScreenCaptureKit needed)
|
||||
let screenshot = cx.update(|cx| capture_screenshot(window, cx))??;
|
||||
|
||||
// Get paths
|
||||
let baseline_path = get_baseline_path(test_name);
|
||||
@@ -550,23 +520,12 @@ fn pixels_are_similar(a: &image::Rgba<u8>, b: &image::Rgba<u8>) -> bool {
|
||||
&& (a[3] as i16 - b[3] as i16).abs() <= TOLERANCE
|
||||
}
|
||||
|
||||
async fn capture_screenshot(
|
||||
window: gpui::AnyWindowHandle,
|
||||
cx: &mut gpui::AsyncApp,
|
||||
) -> Result<RgbaImage> {
|
||||
// Get the native window ID
|
||||
let window_id = cx
|
||||
.update(|cx| {
|
||||
cx.update_window(window, |_view, window: &mut Window, _cx| {
|
||||
window.native_window_id()
|
||||
})
|
||||
})??
|
||||
.ok_or_else(|| anyhow::anyhow!("Failed to get native window ID"))?;
|
||||
|
||||
// Capture the screenshot
|
||||
let screenshot = gpui::capture_window_screenshot(window_id)
|
||||
.await
|
||||
.map_err(|_| anyhow::anyhow!("Screenshot capture was cancelled"))??;
|
||||
fn capture_screenshot(window: gpui::AnyWindowHandle, cx: &mut gpui::App) -> Result<RgbaImage> {
|
||||
// Use direct texture capture - renders the scene to a texture and reads pixels back.
|
||||
// This does not require the window to be visible on screen.
|
||||
let screenshot = cx.update_window(window, |_view, window: &mut Window, _cx| {
|
||||
window.render_to_image()
|
||||
})??;
|
||||
|
||||
println!(
|
||||
"Screenshot captured: {}x{} pixels",
|
||||
|
||||
Binary file not shown.
|
Before Width: | Height: | Size: 122 KiB After Width: | Height: | Size: 230 KiB |
Binary file not shown.
|
Before Width: | Height: | Size: 111 KiB After Width: | Height: | Size: 213 KiB |
Reference in New Issue
Block a user