Compare commits

...

12 Commits

Author SHA1 Message Date
Richard Feldman
de1cc44dd7 Render to textures 2025-12-24 14:40:29 -05:00
Richard Feldman
ba90b55b13 Revert the window "hiding" attempt 2025-12-19 12:18:20 -05:00
Richard Feldman
1dcf1cf8dc wip 2025-12-19 12:09:32 -05:00
Richard Feldman
60261963a8 Merge remote-tracking branch 'origin/main' into screenshots 2025-12-18 09:59:15 -05:00
Richard Feldman
c705931001 Commit project_panel.png 2025-12-17 17:19:53 -05:00
Richard Feldman
038be5b46c Get project panel test working 2025-12-17 17:19:46 -05:00
Richard Feldman
02eda685b0 Update documentation for visual testing 2025-12-17 15:00:35 -05:00
Richard Feldman
bdcc69dc1e Check in workspace_with_editor.png, use it 2025-12-17 14:50:30 -05:00
Richard Feldman
9de9b0bde0 Visual test for actual Zed workspace 2025-12-17 12:40:35 -05:00
Richard Feldman
0ce65331f8 wip adding screenshot tests 2025-12-17 10:36:35 -05:00
Richard Feldman
b32f6daab6 Revert "wip"
This reverts commit b5d0f5d4f8.
2025-12-17 10:03:09 -05:00
Richard Feldman
b5d0f5d4f8 wip 2025-12-17 10:03:01 -05:00
20 changed files with 2283 additions and 8 deletions

View File

@@ -44,7 +44,7 @@ submitted. If you'd like your PR to have the best chance of being merged:
effort. If there isn't already a GitHub issue for your feature with staff
confirmation that we want it, start with a GitHub discussion rather than a PR.
- Include a clear description of **what you're solving**, and why it's important.
- Include **tests**.
- Include **tests**. For UI changes, consider updating visual regression tests (see [Building Zed for macOS](./docs/src/development/macos.md#visual-regression-tests)).
- If it changes the UI, attach **screenshots** or screen recordings.
- Make the PR about **one thing only**, e.g. if it's a bugfix, don't add two
features and a refactoring on top of that.

3
Cargo.lock generated
View File

@@ -20638,6 +20638,7 @@ dependencies = [
"clap",
"cli",
"client",
"clock",
"codestral",
"collab_ui",
"collections",
@@ -20671,6 +20672,7 @@ dependencies = [
"gpui",
"gpui_tokio",
"http_client",
"image",
"image_viewer",
"inspector_ui",
"install_cli",
@@ -20737,6 +20739,7 @@ dependencies = [
"task",
"tasks_ui",
"telemetry",
"tempfile",
"terminal_view",
"theme",
"theme_extension",

View File

@@ -30,6 +30,8 @@ use smallvec::SmallVec;
#[cfg(any(test, feature = "test-support"))]
pub use test_context::*;
use util::{ResultExt, debug_panic};
#[cfg(all(target_os = "macos", any(test, feature = "test-support")))]
pub use visual_test_context::*;
#[cfg(any(feature = "inspector", debug_assertions))]
use crate::InspectorElementRegistry;
@@ -52,6 +54,8 @@ mod context;
mod entity_map;
#[cfg(any(test, feature = "test-support"))]
mod test_context;
#[cfg(all(target_os = "macos", any(test, feature = "test-support")))]
mod visual_test_context;
/// The duration for which futures returned from [Context::on_app_quit] can run before the application fully quits.
pub const SHUTDOWN_TIMEOUT: Duration = Duration::from_millis(100);

View File

@@ -0,0 +1,478 @@
#[cfg(feature = "screen-capture")]
use crate::capture_window_screenshot;
use crate::{
Action, AnyView, AnyWindowHandle, App, AppCell, AppContext, BackgroundExecutor, Bounds,
ClipboardItem, Context, Entity, ForegroundExecutor, Global, InputEvent, Keystroke, Modifiers,
MouseButton, MouseDownEvent, MouseMoveEvent, MouseUpEvent, Pixels, Platform, Point, Render,
Result, Size, Task, TextSystem, Window, WindowBounds, WindowHandle, WindowOptions,
app::GpuiMode, current_platform,
};
use anyhow::anyhow;
#[cfg(feature = "screen-capture")]
use image::RgbaImage;
use std::{future::Future, rc::Rc, sync::Arc, time::Duration};
/// A test context that uses real macOS rendering instead of mocked rendering.
/// This is used for visual tests that need to capture actual screenshots.
///
/// Unlike `TestAppContext` which uses `TestPlatform` with mocked rendering,
/// `VisualTestAppContext` uses the real `MacPlatform` to produce actual rendered output.
///
/// Windows created through this context are positioned off-screen (at coordinates like -10000, -10000)
/// so they are invisible to the user but still fully rendered by the compositor.
#[derive(Clone)]
pub struct VisualTestAppContext {
/// The underlying app cell
pub app: Rc<AppCell>,
/// The background executor for running async tasks
pub background_executor: BackgroundExecutor,
/// The foreground executor for running tasks on the main thread
pub foreground_executor: ForegroundExecutor,
platform: Rc<dyn Platform>,
text_system: Arc<TextSystem>,
}
impl VisualTestAppContext {
/// Creates a new `VisualTestAppContext` with real macOS platform rendering.
///
/// This initializes the real macOS platform (not the test platform), which means:
/// - Windows are actually rendered by Metal/the compositor
/// - Screenshots can be captured via ScreenCaptureKit
/// - All platform APIs work as they do in production
pub fn new() -> Self {
let platform = current_platform(false);
let background_executor = platform.background_executor();
let foreground_executor = platform.foreground_executor();
let text_system = Arc::new(TextSystem::new(platform.text_system()));
let asset_source = Arc::new(());
let http_client = http_client::FakeHttpClient::with_404_response();
let mut app = App::new_app(platform.clone(), asset_source, http_client);
app.borrow_mut().mode = GpuiMode::test();
Self {
app,
background_executor,
foreground_executor,
platform,
text_system,
}
}
/// Opens a window positioned off-screen for invisible rendering.
///
/// The window is positioned at (-10000, -10000) so it's not visible on any display,
/// but it's still fully rendered by the compositor and can be captured via ScreenCaptureKit.
///
/// # Arguments
/// * `size` - The size of the window to create
/// * `build_root` - A closure that builds the root view for the window
pub fn open_offscreen_window<V: Render + 'static>(
&mut self,
size: Size<Pixels>,
build_root: impl FnOnce(&mut Window, &mut App) -> Entity<V>,
) -> Result<WindowHandle<V>> {
use crate::{point, px};
let bounds = Bounds {
origin: point(px(-10000.0), px(-10000.0)),
size,
};
let mut cx = self.app.borrow_mut();
cx.open_window(
WindowOptions {
window_bounds: Some(WindowBounds::Windowed(bounds)),
focus: false,
show: true,
..Default::default()
},
build_root,
)
}
/// Opens an off-screen window with default size (1280x800).
pub fn open_offscreen_window_default<V: Render + 'static>(
&mut self,
build_root: impl FnOnce(&mut Window, &mut App) -> Entity<V>,
) -> Result<WindowHandle<V>> {
use crate::{px, size};
self.open_offscreen_window(size(px(1280.0), px(800.0)), build_root)
}
/// Returns whether screen capture is supported on this platform.
pub fn is_screen_capture_supported(&self) -> bool {
self.platform.is_screen_capture_supported()
}
/// Returns the text system used by this context.
pub fn text_system(&self) -> &Arc<TextSystem> {
&self.text_system
}
/// Returns the background executor.
pub fn executor(&self) -> BackgroundExecutor {
self.background_executor.clone()
}
/// Returns the foreground executor.
pub fn foreground_executor(&self) -> ForegroundExecutor {
self.foreground_executor.clone()
}
/// Runs pending background tasks until there's nothing left to do.
pub fn run_until_parked(&self) {
self.background_executor.run_until_parked();
}
/// Updates the app state.
pub fn update<R>(&mut self, f: impl FnOnce(&mut App) -> R) -> R {
let mut app = self.app.borrow_mut();
f(&mut app)
}
/// Reads from the app state.
pub fn read<R>(&self, f: impl FnOnce(&App) -> R) -> R {
let app = self.app.borrow();
f(&app)
}
/// Updates a window.
pub fn update_window<T, F>(&mut self, window: AnyWindowHandle, f: F) -> Result<T>
where
F: FnOnce(AnyView, &mut Window, &mut App) -> T,
{
let mut lock = self.app.borrow_mut();
lock.update_window(window, f)
}
/// Spawns a task on the foreground executor.
pub fn spawn<F, R>(&self, f: F) -> Task<R>
where
F: Future<Output = R> + 'static,
R: 'static,
{
self.foreground_executor.spawn(f)
}
/// Checks if a global of type G exists.
pub fn has_global<G: Global>(&self) -> bool {
let app = self.app.borrow();
app.has_global::<G>()
}
/// Reads a global value.
pub fn read_global<G: Global, R>(&self, f: impl FnOnce(&G, &App) -> R) -> R {
let app = self.app.borrow();
f(app.global::<G>(), &app)
}
/// Sets a global value.
pub fn set_global<G: Global>(&mut self, global: G) {
let mut app = self.app.borrow_mut();
app.set_global(global);
}
/// Updates a global value.
pub fn update_global<G: Global, R>(&mut self, f: impl FnOnce(&mut G, &mut App) -> R) -> R {
let mut lock = self.app.borrow_mut();
lock.update(|cx| {
let mut global = cx.lease_global::<G>();
let result = f(&mut global, cx);
cx.end_global_lease(global);
result
})
}
/// Simulates a sequence of keystrokes on the given window.
///
/// Keystrokes are specified as a space-separated string, e.g., "cmd-p escape".
pub fn simulate_keystrokes(&mut self, window: AnyWindowHandle, keystrokes: &str) {
for keystroke_text in keystrokes.split_whitespace() {
let keystroke = Keystroke::parse(keystroke_text)
.unwrap_or_else(|_| panic!("Invalid keystroke: {}", keystroke_text));
self.dispatch_keystroke(window, keystroke);
}
self.run_until_parked();
}
/// Dispatches a single keystroke to a window.
pub fn dispatch_keystroke(&mut self, window: AnyWindowHandle, keystroke: Keystroke) {
self.update_window(window, |_, window, cx| {
window.dispatch_keystroke(keystroke, cx);
})
.ok();
}
/// Simulates typing text input on the given window.
pub fn simulate_input(&mut self, window: AnyWindowHandle, input: &str) {
for char in input.chars() {
let key = char.to_string();
let keystroke = Keystroke {
modifiers: Modifiers::default(),
key: key.clone(),
key_char: Some(key),
};
self.dispatch_keystroke(window, keystroke);
}
self.run_until_parked();
}
/// Simulates a mouse move event.
pub fn simulate_mouse_move(
&mut self,
window: AnyWindowHandle,
position: Point<Pixels>,
button: impl Into<Option<MouseButton>>,
modifiers: Modifiers,
) {
self.simulate_event(
window,
MouseMoveEvent {
position,
modifiers,
pressed_button: button.into(),
},
);
}
/// Simulates a mouse down event.
pub fn simulate_mouse_down(
&mut self,
window: AnyWindowHandle,
position: Point<Pixels>,
button: MouseButton,
modifiers: Modifiers,
) {
self.simulate_event(
window,
MouseDownEvent {
position,
modifiers,
button,
click_count: 1,
first_mouse: false,
},
);
}
/// Simulates a mouse up event.
pub fn simulate_mouse_up(
&mut self,
window: AnyWindowHandle,
position: Point<Pixels>,
button: MouseButton,
modifiers: Modifiers,
) {
self.simulate_event(
window,
MouseUpEvent {
position,
modifiers,
button,
click_count: 1,
},
);
}
/// Simulates a click (mouse down followed by mouse up).
pub fn simulate_click(
&mut self,
window: AnyWindowHandle,
position: Point<Pixels>,
modifiers: Modifiers,
) {
self.simulate_mouse_down(window, position, MouseButton::Left, modifiers);
self.simulate_mouse_up(window, position, MouseButton::Left, modifiers);
}
/// Simulates an input event on the given window.
pub fn simulate_event<E: InputEvent>(&mut self, window: AnyWindowHandle, event: E) {
self.update_window(window, |_, window, cx| {
window.dispatch_event(event.to_platform_input(), cx);
})
.ok();
self.run_until_parked();
}
/// Dispatches an action to the given window.
pub fn dispatch_action(&mut self, window: AnyWindowHandle, action: impl Action) {
self.update_window(window, |_, window, cx| {
window.dispatch_action(action.boxed_clone(), cx);
})
.ok();
self.run_until_parked();
}
/// Writes to the clipboard.
pub fn write_to_clipboard(&self, item: ClipboardItem) {
self.platform.write_to_clipboard(item);
}
/// Reads from the clipboard.
pub fn read_from_clipboard(&self) -> Option<ClipboardItem> {
self.platform.read_from_clipboard()
}
/// Waits for a condition to become true, with a timeout.
pub async fn wait_for<T: 'static>(
&mut self,
entity: &Entity<T>,
predicate: impl Fn(&T) -> bool,
timeout: Duration,
) -> Result<()> {
let start = std::time::Instant::now();
loop {
{
let app = self.app.borrow();
if predicate(entity.read(&app)) {
return Ok(());
}
}
if start.elapsed() > timeout {
return Err(anyhow!("Timed out waiting for condition"));
}
self.run_until_parked();
self.background_executor
.timer(Duration::from_millis(10))
.await;
}
}
/// Returns the native window ID (CGWindowID on macOS) for a window.
/// This can be used to capture screenshots of specific windows.
#[cfg(feature = "screen-capture")]
pub fn native_window_id(&mut self, window: AnyWindowHandle) -> Result<u32> {
self.update_window(window, |_, window, _| {
window
.native_window_id()
.ok_or_else(|| anyhow!("Window does not have a native window ID"))
})?
}
/// Captures a screenshot of the specified window.
///
/// This uses ScreenCaptureKit to capture the window contents, even if the window
/// is positioned off-screen (e.g., at -10000, -10000 for invisible rendering).
///
/// # Arguments
/// * `window` - The window handle to capture
///
/// # Returns
/// An `RgbaImage` containing the captured window contents, or an error if capture failed.
#[cfg(feature = "screen-capture")]
pub async fn capture_screenshot(&mut self, window: AnyWindowHandle) -> Result<RgbaImage> {
let window_id = self.native_window_id(window)?;
let rx = capture_window_screenshot(window_id);
rx.await
.map_err(|_| anyhow!("Screenshot capture was cancelled"))?
}
/// Waits for animations to complete by waiting a couple of frames.
pub async fn wait_for_animations(&self) {
self.background_executor
.timer(Duration::from_millis(32))
.await;
self.run_until_parked();
}
}
impl Default for VisualTestAppContext {
fn default() -> Self {
Self::new()
}
}
impl AppContext for VisualTestAppContext {
type Result<T> = T;
fn new<T: 'static>(
&mut self,
build_entity: impl FnOnce(&mut Context<T>) -> T,
) -> Self::Result<Entity<T>> {
let mut app = self.app.borrow_mut();
app.new(build_entity)
}
fn reserve_entity<T: 'static>(&mut self) -> Self::Result<crate::Reservation<T>> {
let mut app = self.app.borrow_mut();
app.reserve_entity()
}
fn insert_entity<T: 'static>(
&mut self,
reservation: crate::Reservation<T>,
build_entity: impl FnOnce(&mut Context<T>) -> T,
) -> Self::Result<Entity<T>> {
let mut app = self.app.borrow_mut();
app.insert_entity(reservation, build_entity)
}
fn update_entity<T: 'static, R>(
&mut self,
handle: &Entity<T>,
update: impl FnOnce(&mut T, &mut Context<T>) -> R,
) -> Self::Result<R> {
let mut app = self.app.borrow_mut();
app.update_entity(handle, update)
}
fn as_mut<'a, T>(&'a mut self, _: &Entity<T>) -> Self::Result<crate::GpuiBorrow<'a, T>>
where
T: 'static,
{
panic!("Cannot use as_mut with a visual test app context. Try calling update() first")
}
fn read_entity<T, R>(
&self,
handle: &Entity<T>,
read: impl FnOnce(&T, &App) -> R,
) -> Self::Result<R>
where
T: 'static,
{
let app = self.app.borrow();
app.read_entity(handle, read)
}
fn update_window<T, F>(&mut self, window: AnyWindowHandle, f: F) -> Result<T>
where
F: FnOnce(AnyView, &mut Window, &mut App) -> T,
{
let mut lock = self.app.borrow_mut();
lock.update_window(window, f)
}
fn read_window<T, R>(
&self,
window: &WindowHandle<T>,
read: impl FnOnce(Entity<T>, &App) -> R,
) -> Result<R>
where
T: 'static,
{
let app = self.app.borrow();
app.read_window(window, read)
}
fn background_spawn<R>(&self, future: impl Future<Output = R> + Send + 'static) -> Task<R>
where
R: Send + 'static,
{
self.background_executor.spawn(future)
}
fn read_global<G, R>(&self, callback: impl FnOnce(&G, &App) -> R) -> Self::Result<R>
where
G: Global,
{
let app = self.app.borrow();
callback(app.global::<G>(), &app)
}
}

View File

@@ -425,6 +425,7 @@ impl BackgroundExecutor {
timeout: Option<Duration>,
) -> Result<Fut::Output, impl Future<Output = Fut::Output> + use<Fut>> {
use std::sync::atomic::AtomicBool;
use std::time::Instant;
use parking::Parker;
@@ -432,8 +433,36 @@ impl BackgroundExecutor {
if timeout == Some(Duration::ZERO) {
return Err(future);
}
// If there's no test dispatcher, fall back to production blocking behavior
let Some(dispatcher) = self.dispatcher.as_test() else {
return Err(future);
let deadline = timeout.map(|timeout| Instant::now() + timeout);
let parker = Parker::new();
let unparker = parker.unparker();
let waker = waker_fn(move || {
unparker.unpark();
});
let mut cx = std::task::Context::from_waker(&waker);
loop {
match future.as_mut().poll(&mut cx) {
Poll::Ready(result) => return Ok(result),
Poll::Pending => {
let timeout = deadline
.map(|deadline| deadline.saturating_duration_since(Instant::now()));
if let Some(timeout) = timeout {
if !parker.park_timeout(timeout)
&& deadline.is_some_and(|deadline| deadline < Instant::now())
{
return Err(future);
}
} else {
parker.park();
}
}
}
}
};
let mut max_ticks = if timeout.is_some() {

View File

@@ -47,6 +47,8 @@ use crate::{
use anyhow::Result;
use async_task::Runnable;
use futures::channel::oneshot;
#[cfg(any(test, feature = "test-support"))]
use image::RgbaImage;
use image::codecs::gif::GifDecoder;
use image::{AnimationDecoder as _, Frame};
use raw_window_handle::{HasDisplayHandle, HasWindowHandle};
@@ -88,6 +90,15 @@ pub use linux::layer_shell;
#[cfg(any(test, feature = "test-support"))]
pub use test::{TestDispatcher, TestScreenCaptureSource, TestScreenCaptureStream};
#[cfg(all(
target_os = "macos",
feature = "screen-capture",
any(test, feature = "test-support")
))]
pub use mac::{
capture_window_screenshot, cv_pixel_buffer_to_rgba_image, screen_capture_frame_to_rgba_image,
};
/// Returns a background executor for the current platform.
pub fn background_executor() -> BackgroundExecutor {
current_platform(true).background_executor()
@@ -564,6 +575,21 @@ pub(crate) trait PlatformWindow: HasWindowHandle + HasDisplayHandle {
fn as_test(&mut self) -> Option<&mut TestWindow> {
None
}
/// Returns the native window ID (CGWindowID on macOS) for window capture.
/// This is used by visual testing infrastructure to capture window screenshots.
#[cfg(any(test, feature = "test-support"))]
fn native_window_id(&self) -> Option<u32> {
None
}
/// Renders the given scene to a texture and returns the pixel data as an RGBA image.
/// This does not present the frame to screen - useful for visual testing where we want
/// to capture what would be rendered without displaying it or requiring the window to be visible.
#[cfg(any(test, feature = "test-support"))]
fn render_to_image(&self, _scene: &Scene) -> Result<RgbaImage> {
anyhow::bail!("render_to_image not implemented for this platform")
}
}
/// This type is public so that our test macro can generate and use it, but it should not

View File

@@ -7,9 +7,13 @@ use crate::{
PrimitiveBatch, Quad, ScaledPixels, Scene, Shadow, Size, Underline,
get_gamma_correction_ratios,
};
#[cfg(any(test, feature = "test-support"))]
use anyhow::Result;
use blade_graphics as gpu;
use blade_util::{BufferBelt, BufferBeltDescriptor};
use bytemuck::{Pod, Zeroable};
#[cfg(any(test, feature = "test-support"))]
use image::RgbaImage;
#[cfg(target_os = "macos")]
use media::core_video::CVMetalTextureCache;
use std::sync::Arc;
@@ -917,6 +921,13 @@ impl BladeRenderer {
self.wait_for_gpu();
self.last_sync_point = Some(sync_point);
}
/// Renders the scene to a texture and returns the pixel data as an RGBA image.
/// This is not yet implemented for BladeRenderer.
#[cfg(any(test, feature = "test-support"))]
pub fn render_to_image(&mut self, _scene: &Scene) -> Result<RgbaImage> {
anyhow::bail!("render_to_image is not yet implemented for BladeRenderer")
}
}
fn create_path_intermediate_texture(

View File

@@ -8,6 +8,10 @@ mod keyboard;
#[cfg(feature = "screen-capture")]
mod screen_capture;
#[cfg(all(feature = "screen-capture", any(test, feature = "test-support")))]
pub use screen_capture::{
capture_window_screenshot, cv_pixel_buffer_to_rgba_image, screen_capture_frame_to_rgba_image,
};
#[cfg(not(feature = "macos-blade"))]
mod metal_atlas;

View File

@@ -11,6 +11,8 @@ use cocoa::{
foundation::{NSSize, NSUInteger},
quartzcore::AutoresizingMask,
};
#[cfg(any(test, feature = "test-support"))]
use image::RgbaImage;
use core_foundation::base::TCFType;
use core_video::{
@@ -154,6 +156,9 @@ impl MetalRenderer {
layer.set_pixel_format(MTLPixelFormat::BGRA8Unorm);
layer.set_opaque(false);
layer.set_maximum_drawable_count(3);
// Allow texture reading for visual tests (captures screenshots without ScreenCaptureKit)
#[cfg(any(test, feature = "test-support"))]
layer.set_framebuffer_only(false);
unsafe {
let _: () = msg_send![&*layer, setAllowsNextDrawableTimeout: NO];
let _: () = msg_send![&*layer, setNeedsDisplayOnBoundsChange: YES];
@@ -426,6 +431,97 @@ impl MetalRenderer {
}
}
/// Renders the scene to a texture and returns the pixel data as an RGBA image.
/// This does not present the frame to screen - useful for visual testing
/// where we want to capture what would be rendered without displaying it.
#[cfg(any(test, feature = "test-support"))]
pub fn render_to_image(&mut self, scene: &Scene) -> Result<RgbaImage> {
let layer = self.layer.clone();
let viewport_size = layer.drawable_size();
let viewport_size: Size<DevicePixels> = size(
(viewport_size.width.ceil() as i32).into(),
(viewport_size.height.ceil() as i32).into(),
);
let drawable = layer
.next_drawable()
.ok_or_else(|| anyhow::anyhow!("Failed to get drawable for render_to_image"))?;
loop {
let mut instance_buffer = self.instance_buffer_pool.lock().acquire(&self.device);
let command_buffer =
self.draw_primitives(scene, &mut instance_buffer, drawable, viewport_size);
match command_buffer {
Ok(command_buffer) => {
let instance_buffer_pool = self.instance_buffer_pool.clone();
let instance_buffer = Cell::new(Some(instance_buffer));
let block = ConcreteBlock::new(move |_| {
if let Some(instance_buffer) = instance_buffer.take() {
instance_buffer_pool.lock().release(instance_buffer);
}
});
let block = block.copy();
command_buffer.add_completed_handler(&block);
// Commit and wait for completion without presenting
command_buffer.commit();
command_buffer.wait_until_completed();
// Read pixels from the texture
let texture = drawable.texture();
let width = texture.width() as u32;
let height = texture.height() as u32;
let bytes_per_row = width as usize * 4;
let buffer_size = height as usize * bytes_per_row;
let mut pixels = vec![0u8; buffer_size];
let region = metal::MTLRegion {
origin: metal::MTLOrigin { x: 0, y: 0, z: 0 },
size: metal::MTLSize {
width: width as u64,
height: height as u64,
depth: 1,
},
};
texture.get_bytes(
pixels.as_mut_ptr() as *mut std::ffi::c_void,
bytes_per_row as u64,
region,
0,
);
// Convert BGRA to RGBA (swap B and R channels)
for chunk in pixels.chunks_exact_mut(4) {
chunk.swap(0, 2);
}
return RgbaImage::from_raw(width, height, pixels).ok_or_else(|| {
anyhow::anyhow!("Failed to create RgbaImage from pixel data")
});
}
Err(err) => {
log::error!(
"failed to render: {}. retrying with larger instance buffer size",
err
);
let mut instance_buffer_pool = self.instance_buffer_pool.lock();
let buffer_size = instance_buffer_pool.buffer_size;
if buffer_size >= 256 * 1024 * 1024 {
anyhow::bail!("instance buffer size grew too large: {}", buffer_size);
}
instance_buffer_pool.reset(buffer_size * 2);
log::info!(
"increased instance buffer size to {}",
instance_buffer_pool.buffer_size
);
}
}
}
}
fn draw_primitives(
&mut self,
scene: &Scene,

View File

@@ -7,17 +7,25 @@ use crate::{
use anyhow::{Result, anyhow};
use block::ConcreteBlock;
use cocoa::{
base::{YES, id, nil},
base::{NO, YES, id, nil},
foundation::NSArray,
};
use collections::HashMap;
use core_foundation::base::TCFType;
use core_graphics::display::{
CGDirectDisplayID, CGDisplayCopyDisplayMode, CGDisplayModeGetPixelHeight,
CGDisplayModeGetPixelWidth, CGDisplayModeRelease,
use core_graphics::{
base::CGFloat,
color_space::CGColorSpace,
display::{
CGDirectDisplayID, CGDisplayCopyDisplayMode, CGDisplayModeGetPixelHeight,
CGDisplayModeGetPixelWidth, CGDisplayModeRelease,
},
image::CGImage,
};
use core_video::pixel_buffer::CVPixelBuffer;
use ctor::ctor;
use foreign_types::ForeignType;
use futures::channel::oneshot;
use image::{ImageBuffer, Rgba, RgbaImage};
use media::core_media::{CMSampleBuffer, CMSampleBufferRef};
use metal::NSInteger;
use objc::{
@@ -285,6 +293,281 @@ pub(crate) fn get_sources() -> oneshot::Receiver<Result<Vec<Rc<dyn ScreenCapture
}
}
/// Captures a single screenshot of a specific window by its CGWindowID.
///
/// This uses ScreenCaptureKit's `initWithDesktopIndependentWindow:` API which can
/// capture windows even when they are positioned off-screen (e.g., at -10000, -10000).
///
/// # Arguments
/// * `window_id` - The CGWindowID (NSWindow's windowNumber) of the window to capture
///
/// # Returns
/// An `RgbaImage` containing the captured window contents, or an error if capture failed.
pub fn capture_window_screenshot(window_id: u32) -> oneshot::Receiver<Result<RgbaImage>> {
let (tx, rx) = oneshot::channel();
let tx = Rc::new(RefCell::new(Some(tx)));
unsafe {
log::info!(
"capture_window_screenshot: looking for window_id={}",
window_id
);
let content_handler = ConcreteBlock::new(move |shareable_content: id, error: id| {
log::info!("capture_window_screenshot: content handler called");
if error != nil {
if let Some(sender) = tx.borrow_mut().take() {
let msg: id = msg_send![error, localizedDescription];
sender
.send(Err(anyhow!(
"Failed to get shareable content: {:?}",
NSStringExt::to_str(&msg)
)))
.ok();
}
return;
}
let windows: id = msg_send![shareable_content, windows];
let count: usize = msg_send![windows, count];
let mut target_window: id = nil;
log::info!(
"capture_window_screenshot: searching {} windows for window_id={}",
count,
window_id
);
for i in 0..count {
let window: id = msg_send![windows, objectAtIndex: i];
let wid: u32 = msg_send![window, windowID];
if wid == window_id {
log::info!(
"capture_window_screenshot: found matching window at index {}",
i
);
target_window = window;
break;
}
}
if target_window == nil {
if let Some(sender) = tx.borrow_mut().take() {
sender
.send(Err(anyhow!(
"Window with ID {} not found in shareable content",
window_id
)))
.ok();
}
return;
}
log::info!("capture_window_screenshot: calling capture_window_frame");
capture_window_frame(target_window, &tx);
});
let content_handler = content_handler.copy();
let _: () = msg_send![
class!(SCShareableContent),
getShareableContentExcludingDesktopWindows:NO
onScreenWindowsOnly:NO
completionHandler:content_handler
];
}
rx
}
unsafe fn capture_window_frame(
sc_window: id,
tx: &Rc<RefCell<Option<oneshot::Sender<Result<RgbaImage>>>>>,
) {
log::info!("capture_window_frame: creating filter for window");
let filter: id = msg_send![class!(SCContentFilter), alloc];
let filter: id = msg_send![filter, initWithDesktopIndependentWindow: sc_window];
log::info!("capture_window_frame: filter created: {:?}", filter);
let configuration: id = msg_send![class!(SCStreamConfiguration), alloc];
let configuration: id = msg_send![configuration, init];
let frame: cocoa::foundation::NSRect = msg_send![sc_window, frame];
let width = frame.size.width as i64;
let height = frame.size.height as i64;
log::info!("capture_window_frame: window frame {}x{}", width, height);
if width <= 0 || height <= 0 {
if let Some(tx) = tx.borrow_mut().take() {
tx.send(Err(anyhow!(
"Window has invalid dimensions: {}x{}",
width,
height
)))
.ok();
}
return;
}
let _: () = msg_send![configuration, setWidth: width];
let _: () = msg_send![configuration, setHeight: height];
let _: () = msg_send![configuration, setScalesToFit: true];
let _: () = msg_send![configuration, setPixelFormat: 0x42475241u32]; // 'BGRA'
let _: () = msg_send![configuration, setShowsCursor: false];
let _: () = msg_send![configuration, setCapturesAudio: false];
let tx_for_capture = tx.clone();
// The completion handler receives (CGImageRef, NSError*), not CMSampleBuffer
let capture_handler =
ConcreteBlock::new(move |cg_image: core_graphics::sys::CGImageRef, error: id| {
log::info!("Screenshot capture handler called");
let Some(tx) = tx_for_capture.borrow_mut().take() else {
log::warn!("Screenshot capture: tx already taken");
return;
};
unsafe {
if error != nil {
let msg: id = msg_send![error, localizedDescription];
let error_str = NSStringExt::to_str(&msg);
log::error!("Screenshot capture error from API: {:?}", error_str);
tx.send(Err(anyhow!("Screenshot capture failed: {:?}", error_str)))
.ok();
return;
}
if cg_image.is_null() {
log::error!("Screenshot capture: cg_image is null");
tx.send(Err(anyhow!(
"Screenshot capture returned null CGImage. \
This may mean Screen Recording permission is not granted."
)))
.ok();
return;
}
log::info!("Screenshot capture: got CGImage, converting...");
let cg_image = CGImage::from_ptr(cg_image);
match cg_image_to_rgba_image(&cg_image) {
Ok(image) => {
log::info!(
"Screenshot capture: success! {}x{}",
image.width(),
image.height()
);
tx.send(Ok(image)).ok();
}
Err(e) => {
log::error!("Screenshot capture: CGImage conversion failed: {}", e);
tx.send(Err(e)).ok();
}
}
}
});
let capture_handler = capture_handler.copy();
log::info!("Calling SCScreenshotManager captureImageWithFilter...");
let _: () = msg_send![
class!(SCScreenshotManager),
captureImageWithFilter: filter
configuration: configuration
completionHandler: capture_handler
];
log::info!("SCScreenshotManager captureImageWithFilter called");
}
/// Converts a CGImage to an RgbaImage.
fn cg_image_to_rgba_image(cg_image: &CGImage) -> Result<RgbaImage> {
let width = cg_image.width();
let height = cg_image.height();
if width == 0 || height == 0 {
return Err(anyhow!("CGImage has zero dimensions: {}x{}", width, height));
}
// Create a bitmap context to draw the CGImage into
let color_space = CGColorSpace::create_device_rgb();
let bytes_per_row = width * 4;
let mut pixel_data: Vec<u8> = vec![0; height * bytes_per_row];
let context = core_graphics::context::CGContext::create_bitmap_context(
Some(pixel_data.as_mut_ptr() as *mut c_void),
width,
height,
8, // bits per component
bytes_per_row, // bytes per row
&color_space,
core_graphics::base::kCGImageAlphaPremultipliedLast // RGBA
| core_graphics::base::kCGBitmapByteOrder32Big,
);
// Draw the image into the context
let rect = core_graphics::geometry::CGRect::new(
&core_graphics::geometry::CGPoint::new(0.0, 0.0),
&core_graphics::geometry::CGSize::new(width as CGFloat, height as CGFloat),
);
context.draw_image(rect, cg_image);
// The pixel data is now in RGBA format
ImageBuffer::<Rgba<u8>, Vec<u8>>::from_raw(width as u32, height as u32, pixel_data)
.ok_or_else(|| anyhow!("Failed to create RgbaImage from CGImage pixel data"))
}
/// Converts a CVPixelBuffer (in BGRA format) to an RgbaImage.
///
/// This function locks the pixel buffer, reads the raw pixel data,
/// converts from BGRA to RGBA format, and returns an image::RgbaImage.
pub fn cv_pixel_buffer_to_rgba_image(pixel_buffer: &CVPixelBuffer) -> Result<RgbaImage> {
use core_video::r#return::kCVReturnSuccess;
unsafe {
if pixel_buffer.lock_base_address(0) != kCVReturnSuccess {
return Err(anyhow!("Failed to lock pixel buffer base address"));
}
let width = pixel_buffer.get_width();
let height = pixel_buffer.get_height();
let bytes_per_row = pixel_buffer.get_bytes_per_row();
let base_address = pixel_buffer.get_base_address();
if base_address.is_null() {
pixel_buffer.unlock_base_address(0);
return Err(anyhow!("Pixel buffer base address is null"));
}
let mut rgba_data = Vec::with_capacity(width * height * 4);
for y in 0..height {
let row_start = base_address.add(y * bytes_per_row) as *const u8;
for x in 0..width {
let pixel = row_start.add(x * 4);
let b = *pixel;
let g = *pixel.add(1);
let r = *pixel.add(2);
let a = *pixel.add(3);
rgba_data.push(r);
rgba_data.push(g);
rgba_data.push(b);
rgba_data.push(a);
}
}
pixel_buffer.unlock_base_address(0);
ImageBuffer::<Rgba<u8>, Vec<u8>>::from_raw(width as u32, height as u32, rgba_data)
.ok_or_else(|| anyhow!("Failed to create RgbaImage from pixel data"))
}
}
/// Converts a ScreenCaptureFrame to an RgbaImage.
///
/// This is useful for converting frames received from continuous screen capture streams.
pub fn screen_capture_frame_to_rgba_image(frame: &ScreenCaptureFrame) -> Result<RgbaImage> {
unsafe {
let pixel_buffer =
CVPixelBuffer::wrap_under_get_rule(frame.0.as_concrete_TypeRef() as *mut _);
cv_pixel_buffer_to_rgba_image(&pixel_buffer)
}
}
#[ctor]
unsafe fn build_classes() {
let mut decl = ClassDecl::new("GPUIStreamDelegate", class!(NSObject)).unwrap();

View File

@@ -8,6 +8,8 @@ use crate::{
WindowBounds, WindowControlArea, WindowKind, WindowParams, dispatch_get_main_queue,
dispatch_sys::dispatch_async_f, platform::PlatformInputHandler, point, px, size,
};
#[cfg(any(test, feature = "test-support"))]
use anyhow::Result;
use block::ConcreteBlock;
use cocoa::{
appkit::{
@@ -25,6 +27,8 @@ use cocoa::{
NSUserDefaults,
},
};
#[cfg(any(test, feature = "test-support"))]
use image::RgbaImage;
use core_graphics::display::{CGDirectDisplayID, CGPoint, CGRect};
use ctor::ctor;
@@ -931,6 +935,14 @@ impl MacWindow {
}
}
}
/// Returns the CGWindowID (NSWindow's windowNumber) for this window.
/// This can be used for ScreenCaptureKit window capture.
#[cfg(any(test, feature = "test-support"))]
pub fn window_number(&self) -> u32 {
let this = self.0.lock();
unsafe { this.native_window.windowNumber() as u32 }
}
}
impl Drop for MacWindow {
@@ -1557,6 +1569,17 @@ impl PlatformWindow for MacWindow {
let _: () = msg_send![window, performWindowDragWithEvent: event];
}
}
#[cfg(any(test, feature = "test-support"))]
fn native_window_id(&self) -> Option<u32> {
Some(self.window_number())
}
#[cfg(any(test, feature = "test-support"))]
fn render_to_image(&self, scene: &crate::Scene) -> Result<RgbaImage> {
let mut this = self.0.lock();
this.renderer.render_to_image(scene)
}
}
impl rwh::HasWindowHandle for MacWindow {

View File

@@ -1776,6 +1776,23 @@ impl Window {
self.platform_window.bounds()
}
/// Returns the native window ID (CGWindowID on macOS) for window capture.
/// This is used by visual testing infrastructure to capture window screenshots.
/// Returns None on platforms that don't support this or in non-test builds.
#[cfg(any(test, feature = "test-support"))]
pub fn native_window_id(&self) -> Option<u32> {
self.platform_window.native_window_id()
}
/// Renders the current frame's scene to a texture and returns the pixel data as an RGBA image.
/// This does not present the frame to screen - useful for visual testing where we want
/// to capture what would be rendered without displaying it or requiring the window to be visible.
#[cfg(any(test, feature = "test-support"))]
pub fn render_to_image(&self) -> anyhow::Result<image::RgbaImage> {
self.platform_window
.render_to_image(&self.rendered_frame.scene)
}
/// Set the content size of the window.
pub fn resize(&mut self, size: Size<Pixels>) {
self.platform_window.resize(size);

View File

@@ -12,11 +12,40 @@ workspace = true
[features]
tracy = ["ztracing/tracy"]
test-support = [
"gpui/test-support",
"gpui/screen-capture",
"dep:image",
"dep:semver",
"workspace/test-support",
"project/test-support",
"editor/test-support",
"terminal_view/test-support",
"image_viewer/test-support",
]
visual-tests = [
"gpui/test-support",
"gpui/screen-capture",
"dep:image",
"dep:semver",
"dep:tempfile",
"workspace/test-support",
"project/test-support",
"editor/test-support",
"terminal_view/test-support",
"image_viewer/test-support",
"clock/test-support",
]
[[bin]]
name = "zed"
path = "src/zed-main.rs"
[[bin]]
name = "visual_test_runner"
path = "src/visual_test_runner.rs"
required-features = ["visual-tests"]
[lib]
name = "zed"
path = "src/main.rs"
@@ -74,6 +103,10 @@ gpui = { workspace = true, features = [
"font-kit",
"windows-manifest",
] }
image = { workspace = true, optional = true }
semver = { workspace = true, optional = true }
tempfile = { workspace = true, optional = true }
clock = { workspace = true, optional = true }
gpui_tokio.workspace = true
rayon.workspace = true
@@ -185,7 +218,7 @@ ashpd.workspace = true
call = { workspace = true, features = ["test-support"] }
dap = { workspace = true, features = ["test-support"] }
editor = { workspace = true, features = ["test-support"] }
gpui = { workspace = true, features = ["test-support"] }
gpui = { workspace = true, features = ["test-support", "screen-capture"] }
image_viewer = { workspace = true, features = ["test-support"] }
itertools.workspace = true
language = { workspace = true, features = ["test-support"] }
@@ -196,11 +229,11 @@ terminal_view = { workspace = true, features = ["test-support"] }
tree-sitter-md.workspace = true
tree-sitter-rust.workspace = true
workspace = { workspace = true, features = ["test-support"] }
image.workspace = true
agent_ui = { workspace = true, features = ["test-support"] }
agent_ui_v2 = { workspace = true, features = ["test-support"] }
search = { workspace = true, features = ["test-support"] }
[package.metadata.bundle-dev]
icon = ["resources/app-icon-dev@2x.png", "resources/app-icon-dev.png"]
identifier = "dev.zed.Zed-Dev"

View File

@@ -0,0 +1,696 @@
//! Visual Test Runner
//!
//! This binary runs visual regression tests for Zed's UI. It captures screenshots
//! of real Zed windows and compares them against baseline images.
//!
//! ## How It Works
//!
//! This tool uses direct texture capture - it renders the scene to a Metal texture
//! and reads the pixels back directly. This approach:
//! - Does NOT require Screen Recording permission
//! - Does NOT require the window to be visible on screen
//! - Captures raw GPUI output without system window chrome
//!
//! ## Usage
//!
//! Run the visual tests:
//! cargo run -p zed --bin visual_test_runner --features visual-tests
//!
//! Update baseline images (when UI intentionally changes):
//! UPDATE_BASELINE=1 cargo run -p zed --bin visual_test_runner --features visual-tests
//!
//! ## Environment Variables
//!
//! UPDATE_BASELINE - Set to update baseline images instead of comparing
//! VISUAL_TEST_OUTPUT_DIR - Directory to save test output (default: target/visual_tests)
use anyhow::{Context, Result};
use gpui::{
AppContext as _, Application, Bounds, Window, WindowBounds, WindowHandle, WindowOptions, point,
px, size,
};
use image::RgbaImage;
use project_panel::ProjectPanel;
use settings::SettingsStore;
use std::path::{Path, PathBuf};
use std::sync::Arc;
use workspace::{AppState, Workspace};
/// Baseline images are stored relative to this file
const BASELINE_DIR: &str = "crates/zed/test_fixtures/visual_tests";
/// Threshold for image comparison (0.0 to 1.0)
/// Images must match at least this percentage to pass
const MATCH_THRESHOLD: f64 = 0.99;
fn main() {
env_logger::builder()
.filter_level(log::LevelFilter::Info)
.init();
let update_baseline = std::env::var("UPDATE_BASELINE").is_ok();
if update_baseline {
println!("=== Visual Test Runner (UPDATE MODE) ===\n");
println!("Baseline images will be updated.\n");
} else {
println!("=== Visual Test Runner ===\n");
}
// Create a temporary directory for test files
let temp_dir = tempfile::tempdir().expect("Failed to create temp directory");
let project_path = temp_dir.path().join("project");
std::fs::create_dir_all(&project_path).expect("Failed to create project directory");
// Create test files in the real filesystem
create_test_files(&project_path);
let project_path_clone = project_path.clone();
let test_result = std::panic::catch_unwind(|| {
Application::new().run(move |cx| {
// Initialize settings store first (required by theme and other subsystems)
let settings_store = SettingsStore::test(cx);
cx.set_global(settings_store);
// Create AppState using the production-like initialization
let app_state = init_app_state(cx);
// Initialize all Zed subsystems
gpui_tokio::init(cx);
theme::init(theme::LoadThemes::JustBase, cx);
client::init(&app_state.client, cx);
audio::init(cx);
workspace::init(app_state.clone(), cx);
release_channel::init(semver::Version::new(0, 0, 0), cx);
command_palette::init(cx);
editor::init(cx);
call::init(app_state.client.clone(), app_state.user_store.clone(), cx);
title_bar::init(cx);
project_panel::init(cx);
outline_panel::init(cx);
terminal_view::init(cx);
image_viewer::init(cx);
search::init(cx);
// Open a real Zed workspace window
let window_size = size(px(1280.0), px(800.0));
// Window can be hidden since we use direct texture capture (reading pixels from
// Metal texture) instead of ScreenCaptureKit which requires visible windows.
let bounds = Bounds {
origin: point(px(0.0), px(0.0)),
size: window_size,
};
// Create a project for the workspace
let project = project::Project::local(
app_state.client.clone(),
app_state.node_runtime.clone(),
app_state.user_store.clone(),
app_state.languages.clone(),
app_state.fs.clone(),
None,
false,
cx,
);
let workspace_window: WindowHandle<Workspace> = cx
.open_window(
WindowOptions {
window_bounds: Some(WindowBounds::Windowed(bounds)),
focus: false,
show: false,
..Default::default()
},
|window, cx| {
cx.new(|cx| {
Workspace::new(None, project.clone(), app_state.clone(), window, cx)
})
},
)
.expect("Failed to open workspace window");
// Add the test project as a worktree directly to the project
let add_worktree_task = workspace_window
.update(cx, |workspace, _window, cx| {
workspace.project().update(cx, |project, cx| {
project.find_or_create_worktree(&project_path_clone, true, cx)
})
})
.expect("Failed to update workspace");
// Spawn async task to set up the UI and capture screenshot
cx.spawn(async move |mut cx| {
// Wait for the worktree to be added
if let Err(e) = add_worktree_task.await {
eprintln!("Failed to add worktree: {:?}", e);
}
// Wait for UI to settle
cx.background_executor()
.timer(std::time::Duration::from_millis(500))
.await;
// Create and add the project panel to the workspace
let panel_task = cx.update(|cx| {
workspace_window
.update(cx, |_workspace, window, cx| {
let weak_workspace = cx.weak_entity();
window.spawn(cx, async move |cx| {
ProjectPanel::load(weak_workspace, cx.clone()).await
})
})
.ok()
});
if let Ok(Some(task)) = panel_task {
if let Ok(panel) = task.await {
cx.update(|cx| {
workspace_window
.update(cx, |workspace, window, cx| {
workspace.add_panel(panel, window, cx);
})
.ok();
})
.ok();
}
}
// Wait for panel to be added
cx.background_executor()
.timer(std::time::Duration::from_millis(500))
.await;
// Open the project panel
cx.update(|cx| {
workspace_window
.update(cx, |workspace, window, cx| {
workspace.open_panel::<ProjectPanel>(window, cx);
})
.ok();
})
.ok();
// Wait for project panel to render
cx.background_executor()
.timer(std::time::Duration::from_millis(500))
.await;
// Open main.rs in the editor
let open_file_task = cx.update(|cx| {
workspace_window
.update(cx, |workspace, window, cx| {
let worktree = workspace.project().read(cx).worktrees(cx).next();
if let Some(worktree) = worktree {
let worktree_id = worktree.read(cx).id();
let rel_path: std::sync::Arc<util::rel_path::RelPath> =
util::rel_path::rel_path("src/main.rs").into();
let project_path: project::ProjectPath =
(worktree_id, rel_path.clone()).into();
Some(workspace.open_path(project_path, None, true, window, cx))
} else {
None
}
})
.ok()
.flatten()
});
if let Ok(Some(task)) = open_file_task {
if let Ok(item) = task.await {
// Focus the opened item to dismiss the welcome screen
cx.update(|cx| {
workspace_window
.update(cx, |workspace, window, cx| {
let pane = workspace.active_pane().clone();
pane.update(cx, |pane, cx| {
if let Some(index) = pane.index_for_item(item.as_ref()) {
pane.activate_item(index, true, true, window, cx);
}
});
})
.ok();
})
.ok();
// Wait for item activation to render
cx.background_executor()
.timer(std::time::Duration::from_millis(500))
.await;
}
}
// Request a window refresh to ensure all pending effects are processed
cx.refresh().ok();
// Wait for UI to fully stabilize
cx.background_executor()
.timer(std::time::Duration::from_secs(2))
.await;
// Track test results
let mut passed = 0;
let mut failed = 0;
let mut updated = 0;
// Run Test 1: Project Panel (with project panel visible)
println!("\n--- Test 1: project_panel ---");
let test_result = run_visual_test(
"project_panel",
workspace_window.into(),
&mut cx,
update_baseline,
)
.await;
match test_result {
Ok(TestResult::Passed) => {
println!("✓ project_panel: PASSED");
passed += 1;
}
Ok(TestResult::BaselineUpdated(path)) => {
println!("✓ project_panel: Baseline updated at {}", path.display());
updated += 1;
}
Err(e) => {
eprintln!("✗ project_panel: FAILED - {}", e);
failed += 1;
}
}
// Close the project panel for the second test
cx.update(|cx| {
workspace_window
.update(cx, |workspace, window, cx| {
workspace.close_panel::<ProjectPanel>(window, cx);
})
.ok();
})
.ok();
// Refresh and wait for panel to close
cx.refresh().ok();
cx.background_executor()
.timer(std::time::Duration::from_millis(500))
.await;
// Run Test 2: Workspace with Editor (without project panel)
println!("\n--- Test 2: workspace_with_editor ---");
let test_result = run_visual_test(
"workspace_with_editor",
workspace_window.into(),
&mut cx,
update_baseline,
)
.await;
match test_result {
Ok(TestResult::Passed) => {
println!("✓ workspace_with_editor: PASSED");
passed += 1;
}
Ok(TestResult::BaselineUpdated(path)) => {
println!(
"✓ workspace_with_editor: Baseline updated at {}",
path.display()
);
updated += 1;
}
Err(e) => {
eprintln!("✗ workspace_with_editor: FAILED - {}", e);
failed += 1;
}
}
// Print summary
println!("\n=== Test Summary ===");
println!("Passed: {}", passed);
println!("Failed: {}", failed);
if updated > 0 {
println!("Baselines Updated: {}", updated);
}
if failed > 0 {
eprintln!("\n=== Visual Tests FAILED ===");
cx.update(|cx| cx.quit()).ok();
std::process::exit(1);
} else {
println!("\n=== All Visual Tests PASSED ===");
}
cx.update(|cx| cx.quit()).ok();
})
.detach();
});
});
// Keep temp_dir alive until we're done
drop(temp_dir);
if test_result.is_err() {
std::process::exit(1);
}
}
enum TestResult {
Passed,
BaselineUpdated(PathBuf),
}
async fn run_visual_test(
test_name: &str,
window: gpui::AnyWindowHandle,
cx: &mut gpui::AsyncApp,
update_baseline: bool,
) -> Result<TestResult> {
// Capture the screenshot using direct texture capture (no ScreenCaptureKit needed)
let screenshot = cx.update(|cx| capture_screenshot(window, cx))??;
// Get paths
let baseline_path = get_baseline_path(test_name);
let output_dir = std::env::var("VISUAL_TEST_OUTPUT_DIR")
.unwrap_or_else(|_| "target/visual_tests".to_string());
let actual_path = Path::new(&output_dir).join(format!("{}.png", test_name));
// Create output directory
if let Some(parent) = actual_path.parent() {
std::fs::create_dir_all(parent)?;
}
// Save the actual screenshot
screenshot.save(&actual_path)?;
println!("Screenshot saved to: {}", actual_path.display());
if update_baseline {
// Update the baseline
if let Some(parent) = baseline_path.parent() {
std::fs::create_dir_all(parent)?;
}
screenshot.save(&baseline_path)?;
return Ok(TestResult::BaselineUpdated(baseline_path));
}
// Compare against baseline
if !baseline_path.exists() {
return Err(anyhow::anyhow!(
"Baseline image not found: {}\n\
Run with UPDATE_BASELINE=1 to create it.",
baseline_path.display()
));
}
let baseline = image::open(&baseline_path)
.context("Failed to load baseline image")?
.to_rgba8();
let comparison = compare_images(&baseline, &screenshot);
println!(
"Image comparison: {:.2}% match ({} different pixels out of {})",
comparison.match_percentage * 100.0,
comparison.diff_pixel_count,
comparison.total_pixels
);
if comparison.match_percentage >= MATCH_THRESHOLD {
Ok(TestResult::Passed)
} else {
// Save the diff image for debugging
if let Some(diff_image) = comparison.diff_image {
let diff_path = Path::new(&output_dir).join(format!("{}_diff.png", test_name));
diff_image.save(&diff_path)?;
println!("Diff image saved to: {}", diff_path.display());
}
Err(anyhow::anyhow!(
"Screenshot does not match baseline.\n\
Match: {:.2}% (threshold: {:.2}%)\n\
Actual: {}\n\
Baseline: {}\n\
\n\
Run with UPDATE_BASELINE=1 to update the baseline if this change is intentional.",
comparison.match_percentage * 100.0,
MATCH_THRESHOLD * 100.0,
actual_path.display(),
baseline_path.display()
))
}
}
fn get_baseline_path(test_name: &str) -> PathBuf {
// Find the workspace root by looking for Cargo.toml
let mut path = std::env::current_dir().expect("Failed to get current directory");
while !path.join("Cargo.toml").exists() || !path.join("crates").exists() {
if !path.pop() {
panic!("Could not find workspace root");
}
}
path.join(BASELINE_DIR).join(format!("{}.png", test_name))
}
struct ImageComparison {
match_percentage: f64,
diff_image: Option<RgbaImage>,
diff_pixel_count: u64,
total_pixels: u64,
}
fn compare_images(baseline: &RgbaImage, actual: &RgbaImage) -> ImageComparison {
// Check dimensions
if baseline.dimensions() != actual.dimensions() {
return ImageComparison {
match_percentage: 0.0,
diff_image: None,
diff_pixel_count: baseline.width() as u64 * baseline.height() as u64,
total_pixels: baseline.width() as u64 * baseline.height() as u64,
};
}
let (width, height) = baseline.dimensions();
let total_pixels = width as u64 * height as u64;
let mut diff_count: u64 = 0;
let mut diff_image = RgbaImage::new(width, height);
for y in 0..height {
for x in 0..width {
let baseline_pixel = baseline.get_pixel(x, y);
let actual_pixel = actual.get_pixel(x, y);
if pixels_are_similar(baseline_pixel, actual_pixel) {
// Matching pixel - show as dimmed version of actual
diff_image.put_pixel(
x,
y,
image::Rgba([
actual_pixel[0] / 3,
actual_pixel[1] / 3,
actual_pixel[2] / 3,
255,
]),
);
} else {
diff_count += 1;
// Different pixel - highlight in red
diff_image.put_pixel(x, y, image::Rgba([255, 0, 0, 255]));
}
}
}
let match_percentage = if total_pixels > 0 {
(total_pixels - diff_count) as f64 / total_pixels as f64
} else {
1.0
};
ImageComparison {
match_percentage,
diff_image: Some(diff_image),
diff_pixel_count: diff_count,
total_pixels,
}
}
fn pixels_are_similar(a: &image::Rgba<u8>, b: &image::Rgba<u8>) -> bool {
// Allow small differences due to anti-aliasing, font rendering, etc.
const TOLERANCE: i16 = 2;
(a[0] as i16 - b[0] as i16).abs() <= TOLERANCE
&& (a[1] as i16 - b[1] as i16).abs() <= TOLERANCE
&& (a[2] as i16 - b[2] as i16).abs() <= TOLERANCE
&& (a[3] as i16 - b[3] as i16).abs() <= TOLERANCE
}
fn capture_screenshot(window: gpui::AnyWindowHandle, cx: &mut gpui::App) -> Result<RgbaImage> {
// Use direct texture capture - renders the scene to a texture and reads pixels back.
// This does not require the window to be visible on screen.
let screenshot = cx.update_window(window, |_view, window: &mut Window, _cx| {
window.render_to_image()
})??;
println!(
"Screenshot captured: {}x{} pixels",
screenshot.width(),
screenshot.height()
);
Ok(screenshot)
}
/// Create test files in a real filesystem directory
fn create_test_files(project_path: &Path) {
let src_dir = project_path.join("src");
std::fs::create_dir_all(&src_dir).expect("Failed to create src directory");
std::fs::write(
src_dir.join("main.rs"),
r#"fn main() {
println!("Hello, world!");
let message = greet("Zed");
println!("{}", message);
}
fn greet(name: &str) -> String {
format!("Welcome to {}, the editor of the future!", name)
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_greet() {
assert_eq!(greet("World"), "Welcome to World, the editor of the future!");
}
}
"#,
)
.expect("Failed to write main.rs");
std::fs::write(
src_dir.join("lib.rs"),
r#"//! A sample library for visual testing.
pub mod utils;
/// Adds two numbers together.
pub fn add(a: i32, b: i32) -> i32 {
a + b
}
/// Subtracts the second number from the first.
pub fn subtract(a: i32, b: i32) -> i32 {
a - b
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_add() {
assert_eq!(add(2, 3), 5);
}
#[test]
fn test_subtract() {
assert_eq!(subtract(5, 3), 2);
}
}
"#,
)
.expect("Failed to write lib.rs");
std::fs::write(
src_dir.join("utils.rs"),
r#"//! Utility functions for the sample project.
/// Formats a greeting message.
pub fn format_greeting(name: &str) -> String {
format!("Hello, {}!", name)
}
/// Formats a farewell message.
pub fn format_farewell(name: &str) -> String {
format!("Goodbye, {}!", name)
}
"#,
)
.expect("Failed to write utils.rs");
std::fs::write(
project_path.join("Cargo.toml"),
r#"[package]
name = "test-project"
version = "0.1.0"
edition = "2021"
[dependencies]
[dev-dependencies]
"#,
)
.expect("Failed to write Cargo.toml");
std::fs::write(
project_path.join("README.md"),
r#"# Test Project
This is a test project for visual testing of Zed.
## Description
A simple Rust project used to verify that Zed's visual testing
infrastructure can capture screenshots of real workspaces.
## Features
- Sample Rust code with main.rs, lib.rs, and utils.rs
- Standard Cargo.toml configuration
- Example tests
## Building
```bash
cargo build
```
## Testing
```bash
cargo test
```
"#,
)
.expect("Failed to write README.md");
}
/// Initialize AppState with real filesystem for visual testing.
fn init_app_state(cx: &mut gpui::App) -> Arc<AppState> {
use client::Client;
use clock::FakeSystemClock;
use fs::RealFs;
use language::LanguageRegistry;
use node_runtime::NodeRuntime;
use session::Session;
let fs = Arc::new(RealFs::new(None, cx.background_executor().clone()));
let languages = Arc::new(LanguageRegistry::new(cx.background_executor().clone()));
let clock = Arc::new(FakeSystemClock::new());
let http_client = http_client::FakeHttpClient::with_404_response();
let client = Client::new(clock, http_client, cx);
let session = cx.new(|cx| session::AppSession::new(Session::test(), cx));
let user_store = cx.new(|cx| client::UserStore::new(client.clone(), cx));
let workspace_store = cx.new(|cx| workspace::WorkspaceStore::new(client.clone(), cx));
Arc::new(AppState {
client,
fs,
languages,
user_store,
workspace_store,
node_runtime: NodeRuntime::unavailable(),
build_window_options: |_, _| Default::default(),
session,
})
}

View File

@@ -6,6 +6,8 @@ pub(crate) mod mac_only_instance;
mod migrate;
mod open_listener;
mod quick_action_bar;
#[cfg(all(target_os = "macos", any(test, feature = "test-support")))]
pub mod visual_tests;
#[cfg(target_os = "windows")]
pub(crate) mod windows_only_instance;

View File

@@ -0,0 +1,539 @@
#![allow(dead_code)]
//! Visual testing infrastructure for Zed.
//!
//! This module provides utilities for visual regression testing of Zed's UI.
//! It allows capturing screenshots of the real Zed application window and comparing
//! them against baseline images.
//!
//! ## Important: Main Thread Requirement
//!
//! On macOS, the `VisualTestAppContext` must be created on the main thread.
//! Standard Rust tests run on worker threads, so visual tests that use
//! `VisualTestAppContext::new()` must be run with special consideration.
//!
//! ## Running Visual Tests
//!
//! Visual tests are marked with `#[ignore]` by default because:
//! 1. They require macOS with Screen Recording permission
//! 2. They need to run on the main thread
//! 3. They may produce different results on different displays/resolutions
//!
//! To run visual tests:
//! ```bash
//! # Run all visual tests (requires macOS, may need Screen Recording permission)
//! cargo test -p zed visual_tests -- --ignored --test-threads=1
//!
//! # Update baselines when UI intentionally changes
//! UPDATE_BASELINES=1 cargo test -p zed visual_tests -- --ignored --test-threads=1
//! ```
//!
//! ## Screenshot Output
//!
//! Screenshots are saved to the directory specified by `VISUAL_TEST_OUTPUT_DIR`
//! environment variable, or `target/visual_tests` by default.
use anyhow::{Result, anyhow};
use gpui::{
AnyWindowHandle, AppContext as _, Empty, Size, VisualTestAppContext, WindowHandle, px, size,
};
use image::{ImageBuffer, Rgba, RgbaImage};
use std::path::Path;
use std::sync::Arc;
use std::time::Duration;
use workspace::AppState;
/// Initialize a visual test context with all necessary Zed subsystems.
pub fn init_visual_test(cx: &mut VisualTestAppContext) -> Arc<AppState> {
cx.update(|cx| {
env_logger::builder().is_test(true).try_init().ok();
let app_state = AppState::test(cx);
gpui_tokio::init(cx);
theme::init(theme::LoadThemes::JustBase, cx);
audio::init(cx);
workspace::init(app_state.clone(), cx);
release_channel::init(semver::Version::new(0, 0, 0), cx);
command_palette::init(cx);
editor::init(cx);
project_panel::init(cx);
outline_panel::init(cx);
terminal_view::init(cx);
image_viewer::init(cx);
search::init(cx);
app_state
})
}
/// Open a test workspace with the given app state.
pub async fn open_test_workspace(
app_state: Arc<AppState>,
cx: &mut VisualTestAppContext,
) -> Result<WindowHandle<workspace::Workspace>> {
let window_size = size(px(1280.0), px(800.0));
let project = cx.update(|cx| {
project::Project::local(
app_state.client.clone(),
app_state.node_runtime.clone(),
app_state.user_store.clone(),
app_state.languages.clone(),
app_state.fs.clone(),
None,
false,
cx,
)
});
let window = cx.open_offscreen_window(window_size, |window, cx| {
cx.new(|cx| workspace::Workspace::new(None, project.clone(), app_state.clone(), window, cx))
})?;
cx.run_until_parked();
Ok(window)
}
/// Returns the default window size for visual tests (1280x800).
pub fn default_window_size() -> Size<gpui::Pixels> {
size(px(1280.0), px(800.0))
}
/// Waits for the UI to stabilize by running pending work and waiting for animations.
pub async fn wait_for_ui_stabilization(cx: &VisualTestAppContext) {
cx.run_until_parked();
cx.background_executor
.timer(Duration::from_millis(100))
.await;
cx.run_until_parked();
}
/// Captures a screenshot of the given window and optionally saves it to a file.
///
/// # Arguments
/// * `cx` - The visual test context
/// * `window` - The window to capture
/// * `output_path` - Optional path to save the screenshot
///
/// # Returns
/// The captured screenshot as an RgbaImage
pub async fn capture_and_save_screenshot(
cx: &mut VisualTestAppContext,
window: AnyWindowHandle,
output_path: Option<&Path>,
) -> Result<RgbaImage> {
wait_for_ui_stabilization(cx).await;
let screenshot = cx.capture_screenshot(window).await?;
if let Some(path) = output_path {
if let Some(parent) = path.parent() {
std::fs::create_dir_all(parent)?;
}
screenshot.save(path)?;
println!("Screenshot saved to: {}", path.display());
}
Ok(screenshot)
}
/// Check if we should update baselines (controlled by UPDATE_BASELINES env var).
pub fn should_update_baselines() -> bool {
std::env::var("UPDATE_BASELINES").is_ok()
}
/// Assert that a screenshot matches a baseline, or update the baseline if UPDATE_BASELINES is set.
pub fn assert_or_update_baseline(
actual: &RgbaImage,
baseline_path: &Path,
tolerance: f64,
per_pixel_threshold: u8,
) -> Result<()> {
if should_update_baselines() {
save_baseline(actual, baseline_path)?;
println!("Updated baseline: {}", baseline_path.display());
Ok(())
} else {
assert_screenshot_matches(actual, baseline_path, tolerance, per_pixel_threshold)
}
}
/// Result of comparing two screenshots.
#[derive(Debug)]
pub struct ScreenshotComparison {
/// Percentage of pixels that match (0.0 to 1.0)
pub match_percentage: f64,
/// Optional diff image highlighting differences (red = different, green = same)
pub diff_image: Option<RgbaImage>,
/// Number of pixels that differ
pub diff_pixel_count: u64,
/// Total number of pixels compared
pub total_pixels: u64,
}
impl ScreenshotComparison {
/// Returns true if the images match within the given tolerance.
pub fn matches(&self, tolerance: f64) -> bool {
self.match_percentage >= (1.0 - tolerance)
}
}
/// Compare two screenshots with tolerance for minor differences (e.g., anti-aliasing).
///
/// # Arguments
/// * `actual` - The screenshot to test
/// * `expected` - The baseline screenshot to compare against
/// * `per_pixel_threshold` - Maximum color difference per channel (0-255) to consider pixels equal
///
/// # Returns
/// A `ScreenshotComparison` containing match statistics and an optional diff image.
pub fn compare_screenshots(
actual: &RgbaImage,
expected: &RgbaImage,
per_pixel_threshold: u8,
) -> ScreenshotComparison {
let (width, height) = actual.dimensions();
let (exp_width, exp_height) = expected.dimensions();
if width != exp_width || height != exp_height {
return ScreenshotComparison {
match_percentage: 0.0,
diff_image: None,
diff_pixel_count: (width * height).max(exp_width * exp_height) as u64,
total_pixels: (width * height).max(exp_width * exp_height) as u64,
};
}
let total_pixels = (width * height) as u64;
let mut diff_pixel_count = 0u64;
let mut diff_image: RgbaImage = ImageBuffer::new(width, height);
for y in 0..height {
for x in 0..width {
let actual_pixel = actual.get_pixel(x, y);
let expected_pixel = expected.get_pixel(x, y);
let pixels_match =
pixels_are_similar(actual_pixel, expected_pixel, per_pixel_threshold);
if pixels_match {
diff_image.put_pixel(x, y, Rgba([0, 128, 0, 255]));
} else {
diff_pixel_count += 1;
diff_image.put_pixel(x, y, Rgba([255, 0, 0, 255]));
}
}
}
let matching_pixels = total_pixels - diff_pixel_count;
let match_percentage = if total_pixels > 0 {
matching_pixels as f64 / total_pixels as f64
} else {
1.0
};
ScreenshotComparison {
match_percentage,
diff_image: Some(diff_image),
diff_pixel_count,
total_pixels,
}
}
/// Check if two pixels are similar within a threshold.
fn pixels_are_similar(a: &Rgba<u8>, b: &Rgba<u8>, threshold: u8) -> bool {
let threshold = threshold as i16;
let diff_r = (a[0] as i16 - b[0] as i16).abs();
let diff_g = (a[1] as i16 - b[1] as i16).abs();
let diff_b = (a[2] as i16 - b[2] as i16).abs();
let diff_a = (a[3] as i16 - b[3] as i16).abs();
diff_r <= threshold && diff_g <= threshold && diff_b <= threshold && diff_a <= threshold
}
/// Assert that a screenshot matches a baseline image within tolerance.
///
/// # Arguments
/// * `actual` - The screenshot to test
/// * `baseline_path` - Path to the baseline image file
/// * `tolerance` - Percentage of pixels that can differ (0.0 to 1.0)
/// * `per_pixel_threshold` - Maximum color difference per channel (0-255) to consider pixels equal
///
/// # Returns
/// Ok(()) if the images match, Err with details if they don't.
pub fn assert_screenshot_matches(
actual: &RgbaImage,
baseline_path: &Path,
tolerance: f64,
per_pixel_threshold: u8,
) -> Result<()> {
if !baseline_path.exists() {
return Err(anyhow!(
"Baseline image not found at: {}. Run with UPDATE_BASELINES=1 to create it.",
baseline_path.display()
));
}
let expected = image::open(baseline_path)
.map_err(|e| anyhow!("Failed to open baseline image: {}", e))?
.to_rgba8();
let comparison = compare_screenshots(actual, &expected, per_pixel_threshold);
if comparison.matches(tolerance) {
Ok(())
} else {
let diff_path = baseline_path.with_extension("diff.png");
if let Some(diff_image) = &comparison.diff_image {
diff_image.save(&diff_path).ok();
}
let actual_path = baseline_path.with_extension("actual.png");
actual.save(&actual_path).ok();
Err(anyhow!(
"Screenshot does not match baseline.\n\
Match: {:.2}% (required: {:.2}%)\n\
Differing pixels: {} / {}\n\
Baseline: {}\n\
Actual saved to: {}\n\
Diff saved to: {}",
comparison.match_percentage * 100.0,
(1.0 - tolerance) * 100.0,
comparison.diff_pixel_count,
comparison.total_pixels,
baseline_path.display(),
actual_path.display(),
diff_path.display()
))
}
}
/// Save an image as the new baseline, creating parent directories if needed.
pub fn save_baseline(image: &RgbaImage, baseline_path: &Path) -> Result<()> {
if let Some(parent) = baseline_path.parent() {
std::fs::create_dir_all(parent)
.map_err(|e| anyhow!("Failed to create baseline directory: {}", e))?;
}
image
.save(baseline_path)
.map_err(|e| anyhow!("Failed to save baseline image: {}", e))?;
Ok(())
}
/// Load an image from a file path.
pub fn load_image(path: &Path) -> Result<RgbaImage> {
image::open(path)
.map_err(|e| anyhow!("Failed to load image from {}: {}", path.display(), e))
.map(|img| img.to_rgba8())
}
#[cfg(test)]
mod tests {
use super::*;
fn create_test_image(width: u32, height: u32, color: Rgba<u8>) -> RgbaImage {
let mut img = ImageBuffer::new(width, height);
for pixel in img.pixels_mut() {
*pixel = color;
}
img
}
#[test]
fn test_identical_images_match() {
let img1 = create_test_image(100, 100, Rgba([255, 0, 0, 255]));
let img2 = create_test_image(100, 100, Rgba([255, 0, 0, 255]));
let comparison = compare_screenshots(&img1, &img2, 0);
assert_eq!(comparison.match_percentage, 1.0);
assert_eq!(comparison.diff_pixel_count, 0);
assert!(comparison.matches(0.0));
}
#[test]
fn test_different_images_dont_match() {
let img1 = create_test_image(100, 100, Rgba([255, 0, 0, 255]));
let img2 = create_test_image(100, 100, Rgba([0, 255, 0, 255]));
let comparison = compare_screenshots(&img1, &img2, 0);
assert_eq!(comparison.match_percentage, 0.0);
assert_eq!(comparison.diff_pixel_count, 10000);
assert!(!comparison.matches(0.5));
}
#[test]
fn test_similar_images_match_with_threshold() {
let img1 = create_test_image(100, 100, Rgba([255, 0, 0, 255]));
let img2 = create_test_image(100, 100, Rgba([250, 5, 0, 255]));
let comparison_strict = compare_screenshots(&img1, &img2, 0);
assert_eq!(comparison_strict.match_percentage, 0.0);
let comparison_lenient = compare_screenshots(&img1, &img2, 10);
assert_eq!(comparison_lenient.match_percentage, 1.0);
}
#[test]
fn test_different_size_images() {
let img1 = create_test_image(100, 100, Rgba([255, 0, 0, 255]));
let img2 = create_test_image(200, 200, Rgba([255, 0, 0, 255]));
let comparison = compare_screenshots(&img1, &img2, 0);
assert_eq!(comparison.match_percentage, 0.0);
assert!(comparison.diff_image.is_none());
}
#[test]
fn test_partial_difference() {
let mut img1 = create_test_image(100, 100, Rgba([255, 0, 0, 255]));
let img2 = create_test_image(100, 100, Rgba([255, 0, 0, 255]));
for x in 0..50 {
for y in 0..100 {
img1.put_pixel(x, y, Rgba([0, 255, 0, 255]));
}
}
let comparison = compare_screenshots(&img1, &img2, 0);
assert_eq!(comparison.match_percentage, 0.5);
assert_eq!(comparison.diff_pixel_count, 5000);
assert!(comparison.matches(0.5));
assert!(!comparison.matches(0.49));
}
#[test]
#[ignore]
fn test_visual_test_smoke() {
let mut cx = VisualTestAppContext::new();
let _window = cx
.open_offscreen_window_default(|_, cx| cx.new(|_| Empty))
.expect("Failed to open offscreen window");
cx.run_until_parked();
}
#[test]
#[ignore]
fn test_workspace_opens() {
let mut cx = VisualTestAppContext::new();
let app_state = init_visual_test(&mut cx);
smol::block_on(async {
app_state
.fs
.as_fake()
.insert_tree(
"/project",
serde_json::json!({
"src": {
"main.rs": "fn main() {\n println!(\"Hello, world!\");\n}\n"
}
}),
)
.await;
});
let workspace_result = smol::block_on(open_test_workspace(app_state, &mut cx));
assert!(
workspace_result.is_ok(),
"Failed to open workspace: {:?}",
workspace_result.err()
);
cx.run_until_parked();
}
/// This test captures a screenshot of an empty Zed workspace.
///
/// Note: This test is ignored by default because:
/// 1. It requires macOS with Screen Recording permission granted
/// 2. It must run on the main thread (standard test threads won't work)
/// 3. Screenshot capture may fail in CI environments without display access
///
/// The test will gracefully handle screenshot failures and print an error
/// message rather than failing hard, to allow running in environments
/// where screen capture isn't available.
#[test]
#[ignore]
fn test_workspace_screenshot() {
let mut cx = VisualTestAppContext::new();
let app_state = init_visual_test(&mut cx);
smol::block_on(async {
app_state
.fs
.as_fake()
.insert_tree(
"/project",
serde_json::json!({
"src": {
"main.rs": "fn main() {\n println!(\"Hello, world!\");\n}\n"
},
"README.md": "# Test Project\n\nThis is a test project for visual testing.\n"
}),
)
.await;
});
let workspace = smol::block_on(open_test_workspace(app_state, &mut cx))
.expect("Failed to open workspace");
smol::block_on(async {
wait_for_ui_stabilization(&cx).await;
let screenshot_result = cx.capture_screenshot(workspace.into()).await;
match screenshot_result {
Ok(screenshot) => {
println!(
"Screenshot captured successfully: {}x{}",
screenshot.width(),
screenshot.height()
);
let output_dir = std::env::var("VISUAL_TEST_OUTPUT_DIR")
.unwrap_or_else(|_| "target/visual_tests".to_string());
let output_path = Path::new(&output_dir).join("workspace_screenshot.png");
if let Err(e) = std::fs::create_dir_all(&output_dir) {
eprintln!("Warning: Failed to create output directory: {}", e);
}
if let Err(e) = screenshot.save(&output_path) {
eprintln!("Warning: Failed to save screenshot: {}", e);
} else {
println!("Screenshot saved to: {}", output_path.display());
}
assert!(
screenshot.width() > 0,
"Screenshot width should be positive"
);
assert!(
screenshot.height() > 0,
"Screenshot height should be positive"
);
}
Err(e) => {
eprintln!(
"Screenshot capture failed (this may be expected in CI without screen recording permission): {}",
e
);
}
}
});
cx.run_until_parked();
}
}

Binary file not shown.

After

Width:  |  Height:  |  Size: 230 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 213 KiB

View File

@@ -57,6 +57,35 @@ And to run the tests:
cargo test --workspace
```
## Visual Regression Tests
Zed includes visual regression tests that capture screenshots of real Zed windows and compare them against baseline images. These tests require macOS with Screen Recording permission.
### Prerequisites
You must grant Screen Recording permission to your terminal:
1. Run the visual test runner once - macOS will prompt for permission
2. Or manually: System Settings > Privacy & Security > Screen Recording
3. Enable your terminal app (e.g., Terminal.app, iTerm2, Ghostty)
4. Restart your terminal after granting permission
### Running Visual Tests
```sh
cargo run -p zed --bin visual_test_runner --features visual-tests
```
### Updating Baselines
When UI changes are intentional, update the baseline images:
```sh
UPDATE_BASELINE=1 cargo run -p zed --bin visual_test_runner --features visual-tests
```
Baseline images are stored in `crates/zed/test_fixtures/visual_tests/` and should be committed to the repository.
## Troubleshooting
### Error compiling metal shaders

View File

@@ -112,6 +112,8 @@ And to run the tests:
cargo test --workspace
```
> **Note:** Visual regression tests are currently macOS-only and require Screen Recording permission. See [Building Zed for macOS](./macos.md#visual-regression-tests) for details.
## Installing from msys2
Zed does not support unofficial MSYS2 Zed packages built for Mingw-w64. Please report any issues you may have with [mingw-w64-zed](https://packages.msys2.org/base/mingw-w64-zed) to [msys2/MINGW-packages/issues](https://github.com/msys2/MINGW-packages/issues?q=is%3Aissue+is%3Aopen+zed).