- 
 
 - 
  Notifications
 
You must be signed in to change notification settings  - Fork 404
 
Description
On macOS Tahoe (possibly earlier to a lesser exend?) Hyperion causes the windows server to spike in CPU usage. This is very noticeable due to a vastly reduced frame rate even on my high end M2 Max MacBook Pro and occasional freezes of Mission Control. This is related to Hyperions screen recording feature. The high memory usage is exposed with the OSX as well as the QT recording backend independent of the recording frequency and the scaling factor. When recording is disabled the windowserver cpu cycles return back to normal. I tested as of now the latest nightly builds of the ARM native as well as the x86_64 version via Rosetta. Screenrecording via QuickTime does not cause the windowserver to go mad.
While I'm no developer myself I (think I) have a sufficiently funded understanding of programming. Thus I dug around a bit more and I (suspect I) found the root cause and analyzed it using an LLM:
Claude Sonnet 4.5
The Problem
In OsxFrameGrabber::grabFrame(), the code is using ScreenCaptureKit's SCScreenshotManager on SDK 15+ (macOS 12+), but it's calling it synchronously for every single frame capture.
Here's what's happening on line 50-73 in the capture15() function:
dispatch_semaphore_t semaphore = dispatch_semaphore_create(0);
[SCShareableContent getShareableContentWithCompletionHandler:^(SCShareableContent* content, NSError* error) {
 // ... setup capture ...
 [SCScreenshotManager captureImageWithFilter:filter
 configuration:config
 completionHandler:^(CGImageRef img, NSError* error) {
 // ... handle image ...
 dispatch_semaphore_signal(semaphore);
 }];
}];
dispatch_semaphore_wait(semaphore, DISPATCH_TIME_FOREVER);
The issue: Every single frame grab:
Calls getShareableContentWithCompletionHandler to enumerate all displays/windows
Waits synchronously for the async operation
Takes a screenshot
Waits synchronously again
This is incredibly inefficient. getShareableContentWithCompletionHandler is meant to be called once at setup, not on every frame. Enumerating all shareable content on every frame grab (potentially 10-60 times per second) is what's killing WindowServer.
The Fix
The code should:
Call getShareableContentWithCompletionHandler once during setup to get the display
Create an SCStream with an output handler
Reuse the stream for continuous capture instead of taking individual screenshots.
Claude's proposed fix
[untested & unverified] OsxFrameGrabber.mm
// STL includes
#include 
#include 
#include 
// Header
#include <grabber/osx/OsxFrameGrabber.h>
// ScreenCaptureKit
#if defined(SDK_15_AVAILABLE)
#include <ScreenCaptureKit/ScreenCaptureKit.h>
#endif
//Qt
#include 
#include 
#include 
// Constants
namespace {
const bool verbose = false;
} //End of constants
#if defined(SDK_15_AVAILABLE)
// Stream-based capture for efficient continuous frame grabbing
@interface FrameGrabberStreamOutput : NSObject 
{
@public 
CGImageRef _latestFrame;
std::mutex _frameMutex;
}
@EnD 
@implementation FrameGrabberStreamOutput
- (id)init
{
	self = [super init];
	if (self)
	{
		_latestFrame = nil;
	}
	return self;
}
- (void)dealloc
{
	std::lock_guard<std::mutex> lock(_frameMutex);
	if (_latestFrame)
	{
		CGImageRelease(_latestFrame);
		_latestFrame = nil;
	}
	[super dealloc];
}
- (void)stream:(SCStream *)stream didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer ofType:(SCStreamOutputType)type
{
	if (type != SCStreamOutputTypeScreen)
		return;
	CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
	if (!imageBuffer)
		return;
	CIImage *ciImage = [CIImage imageWithCVImageBuffer:imageBuffer];
	if (!ciImage)
		return;
	CIContext *context = [CIContext context];
	CGImageRef newFrame = [context createCGImage:ciImage fromRect:ciImage.extent];
	std::lock_guard<std::mutex> lock(_frameMutex);
	if (_latestFrame)
	{
		CGImageRelease(_latestFrame);
	}
	_latestFrame = newFrame;
}
- (CGImageRef)getLatestFrameAndRetain
{
	std::lock_guard<std::mutex> lock(_frameMutex);
	if (_latestFrame)
	{
		CGImageRetain(_latestFrame);
		return _latestFrame;
	}
	return nil;
}
@end
// Fallback screenshot method for single captures (used in setup/discovery)
static CGImageRef captureScreenshot15(CGDirectDisplayID id, CGRect rect)
{
	dispatch_semaphore_t semaphore = dispatch_semaphore_create(0);
	__block CGImageRef image1 = nil;
	[SCShareableContent getShareableContentWithCompletionHandler:^(SCShareableContent* content, NSError* error)
	{
		@autoreleasepool
		{
			if (error || !content)
			{
				dispatch_semaphore_signal(semaphore);
				return;
			}
			SCDisplay* target = nil;
			for (SCDisplay *display in content.displays)
			{
				if (display.displayID == id)
				{
					target = display;
					break;
				}
			}
			if (!target)
			{
				dispatch_semaphore_signal(semaphore);
				return;
			}
			SCContentFilter* filter = [[SCContentFilter alloc] initWithDisplay:target excludingWindows:@[]];
			SCStreamConfiguration* config = [[SCStreamConfiguration alloc] init];
			config.sourceRect = rect;
			config.scalesToFit = false;
			config.captureResolution = SCCaptureResolutionBest;
			CGDisplayModeRef modeRef = CGDisplayCopyDisplayMode(id);
			double sysScale = CGDisplayModeGetPixelWidth(modeRef) / CGDisplayModeGetWidth(modeRef);
			config.width = rect.size.width * sysScale;
			config.height = rect.size.height * sysScale;
			CGDisplayModeRelease(modeRef);
			[SCScreenshotManager captureImageWithFilter:filter
				configuration:config
				completionHandler:^(CGImageRef img, NSError* error)
				{
					if (!error && img)
					{
						image1 = CGImageCreateCopyWithColorSpace(img, CGColorSpaceCreateDeviceRGB());
					}
					dispatch_semaphore_signal(semaphore);
			}];
			[filter release];
			[config release];
		}
	}];
	dispatch_semaphore_wait(semaphore, DISPATCH_TIME_FOREVER);
	dispatch_release(semaphore);
	return image1;
}
#endif
OsxFrameGrabber::OsxFrameGrabber(int display)
: Grabber("GRABBER-OSX")
, _screenIndex(display)
#if defined(SDK_15_AVAILABLE)
, _stream(nil)
, _streamOutput(nil)
#endif
{
_isEnabled = false;
_useImageResampler = true;
}
OsxFrameGrabber::~OsxFrameGrabber()
{
#if defined(SDK_15_AVAILABLE)
stopStream();
#endif
}
#if defined(SDK_15_AVAILABLE)
void OsxFrameGrabber::stopStream()
{
if (_stream)
{
[_stream stopCaptureWithCompletionHandler:^(NSError * _Nullable error) {
if (error)
{
Error(_log, "Error stopping stream: %s", [[error localizedDescription] UTF8String]);
}
}];
[_stream release];
_stream = nil;
}
if (_streamOutput)
{
	[_streamOutput release];
	_streamOutput = nil;
}
}
bool OsxFrameGrabber::startStream(CGDirectDisplayID displayID)
{
stopStream();
dispatch_semaphore_t semaphore = dispatch_semaphore_create(0);
__block bool success = false;
[SCShareableContent getShareableContentWithCompletionHandler:^(SCShareableContent* content, NSError* error)
{
	@autoreleasepool
	{
		if (error || !content)
		{
			if (error)
			{
				Error(_log, "Failed to get shareable content: %s", [[error localizedDescription] UTF8String]);
			}
			dispatch_semaphore_signal(semaphore);
			return;
		}
		SCDisplay* target = nil;
		for (SCDisplay *display in content.displays)
		{
			if (display.displayID == displayID)
			{
				target = display;
				break;
			}
		}
		if (!target)
		{
			Error(_log, "Display not found in shareable content");
			dispatch_semaphore_signal(semaphore);
			return;
		}
		SCContentFilter* filter = [[SCContentFilter alloc] initWithDisplay:target excludingWindows:@[]];
		SCStreamConfiguration* config = [[SCStreamConfiguration alloc] init];
		
		config.queueDepth = 3;
		config.pixelFormat = kCVPixelFormatType_32BGRA;
		config.showsCursor = false;
		config.scalesToFit = false;
		config.captureResolution = SCCaptureResolutionBest;
		
		// Set the source rect to the display bounds
		CGRect bounds = CGDisplayBounds(displayID);
		config.sourceRect = bounds;
		
		CGDisplayModeRef modeRef = CGDisplayCopyDisplayMode(displayID);
		double sysScale = CGDisplayModeGetPixelWidth(modeRef) / CGDisplayModeGetWidth(modeRef);
		config.width = bounds.size.width * sysScale;
		config.height = bounds.size.height * sysScale;
		CGDisplayModeRelease(modeRef);
		// Minimum frame interval for frame rate limiting (e.g., 60fps max)
		config.minimumFrameInterval = CMTimeMake(1, 60);
		NSError* streamError = nil;
		_stream = [[SCStream alloc] initWithFilter:filter configuration:config delegate:nil];
		if (!_stream)
		{
			Error(_log, "Failed to create SCStream");
			[filter release];
			[config release];
			dispatch_semaphore_signal(semaphore);
			return;
		}
		_streamOutput = [[FrameGrabberStreamOutput alloc] init];
		
		[_stream addStreamOutput:_streamOutput type:SCStreamOutputTypeScreen sampleHandlerQueue:dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_HIGH, 0) error:&streamError];
		if (streamError)
		{
			Error(_log, "Failed to add stream output: %s", [[streamError localizedDescription] UTF8String]);
			[_stream release];
			_stream = nil;
			[_streamOutput release];
			_streamOutput = nil;
			[filter release];
			[config release];
			dispatch_semaphore_signal(semaphore);
			return;
		}
		[_stream startCaptureWithCompletionHandler:^(NSError * _Nullable error) {
			if (error)
			{
				Error(_log, "Failed to start capture: %s", [[error localizedDescription] UTF8String]);
				success = false;
			}
			else
			{
				Info(_log, "SCStream capture started successfully");
				success = true;
			}
			dispatch_semaphore_signal(semaphore);
		}];
		[filter release];
		[config release];
	}
}];
dispatch_semaphore_wait(semaphore, DISPATCH_TIME_FOREVER);
dispatch_release(semaphore);
return success;
}
#endif
bool OsxFrameGrabber::setupDisplay()
{
bool rc (false);
#if defined(SDK_15_AVAILABLE)
if (!CGPreflightScreenCaptureAccess())
{
if(!CGRequestScreenCaptureAccess())
{
Error(_log, "Screen capture permission required to start the grabber");
return false;
}
}
#endif
rc = setDisplayIndex(_screenIndex);
return rc;
}
int OsxFrameGrabber::grabFrame(Image & image)
{
int rc = 0;
if (_isEnabled && !_isDeviceInError)
{
CGImageRef dispImage = nil;
	#if defined(SDK_15_AVAILABLE)
		// Get the latest frame from the stream (non-blocking)
		if (_streamOutput)
		{
			dispImage = [_streamOutput getLatestFrameAndRetain];
		}
		
		// Fallback if no frame available yet or stream not running
		if (!dispImage)
		{
			dispImage = captureScreenshot15(_display, CGDisplayBounds(_display));
		}
	#else
		dispImage = CGDisplayCreateImageForRect(_display, CGDisplayBounds(_display));
	#endif
	// display lost, use main
	if (dispImage == nullptr && _display != 0)
	{
		#if defined(SDK_15_AVAILABLE)
			dispImage = captureScreenshot15(kCGDirectMainDisplay, CGDisplayBounds(kCGDirectMainDisplay));
		#else
			dispImage = CGDisplayCreateImageForRect(kCGDirectMainDisplay, CGDisplayBounds(kCGDirectMainDisplay));
		#endif
	}
	// no displays connected, return
	if (dispImage == nullptr)
	{
		Error(_log, "No display connected...");
		return -1;
	}
	CFDataRef imgData = CGDataProviderCopyData(CGImageGetDataProvider(dispImage));
	if (imgData != nullptr)
	{
		_imageResampler.processImage((uint8_t *)CFDataGetBytePtr(imgData), static_cast<int>(CGImageGetWidth(dispImage)), static_cast<int>(CGImageGetHeight(dispImage)), static_cast<int>(CGImageGetBytesPerRow(dispImage)), PixelFormat::BGR32, image);
		CFRelease(imgData);
	}
	CGImageRelease(dispImage);
}
return rc;
}
bool OsxFrameGrabber::setDisplayIndex(int index)
{
bool rc (true);
if(_screenIndex != index || !_isEnabled)
{
_screenIndex = index;
	// get list of displays
	CGDisplayCount dspyCnt = 0 ;
	CGDisplayErr err;
	err = CGGetActiveDisplayList(0, nullptr, &dspyCnt);
	if (err == kCGErrorSuccess && dspyCnt > 0)
	{
		CGDirectDisplayID *activeDspys = new CGDirectDisplayID [dspyCnt] ;
		err = CGGetActiveDisplayList(dspyCnt, activeDspys, &dspyCnt) ;
		if (err == kCGErrorSuccess)
		{
			CGImageRef image;
			if (_screenIndex + 1 > static_cast<int>(dspyCnt))
			{
				Error(_log, "Display with index %d is not available.", _screenIndex);
				rc = false;
			}
			else
			{
				_display = activeDspys[_screenIndex];
				#if defined(SDK_15_AVAILABLE)
					image = captureScreenshot15(_display, CGDisplayBounds(_display));
				#else
					image = CGDisplayCreateImageForRect(_display, CGDisplayBounds(_display));
				#endif
				if(image == nullptr)
				{
					setEnabled(false);
					Error(_log, "Failed to open main display, disable capture interface");
					rc = false;
				}
				else
				{
					#if defined(SDK_15_AVAILABLE)
						// Start the stream for continuous capture
						if (!startStream(_display))
						{
							Error(_log, "Failed to start capture stream, falling back to screenshot mode");
						}
					#endif
					
					setEnabled(true);
					rc = true;
					Info(_log, "Display [%u] opened with resolution: %ux%u@%ubit", _display, CGImageGetWidth(image), CGImageGetHeight(image), CGImageGetBitsPerPixel(image));
				}
				CGImageRelease(image);
			}
		}
		delete[] activeDspys;
	}
	else
	{
		rc=false;
	}
}
return rc;
}
QJsonObject OsxFrameGrabber::discover(const QJsonObject& params)
{
DebugIf(verbose, _log, "params: [%s]", QString(QJsonDocument(params).toJson(QJsonDocument::Compact)).toUtf8().constData());
QJsonObject inputsDiscovered;
// get list of displays
CGDisplayCount dspyCnt = 0 ;
CGDisplayErr err;
err = CGGetActiveDisplayList(0, nullptr, &dspyCnt);
if (err == kCGErrorSuccess && dspyCnt > 0)
{
	CGDirectDisplayID *activeDspys = new CGDirectDisplayID [dspyCnt] ;
	err = CGGetActiveDisplayList(dspyCnt, activeDspys, &dspyCnt) ;
	if (err == kCGErrorSuccess)
	{
		inputsDiscovered["device"] = "osx";
		inputsDiscovered["device_name"] = "OSX";
		inputsDiscovered["type"] = "screen";
		QJsonArray video_inputs;
		QJsonArray fps = { 1, 5, 10, 15, 20, 25, 30, 40, 50, 60 };
		for (int i = 0; i < static_cast<int>(dspyCnt); ++i)
		{
			QJsonObject in;
			CGDirectDisplayID did = activeDspys[i];
			QString displayName;
			displayName = QString("Display:%1").arg(did);
			in["name"] = displayName;
			in["inputIdx"] = i;
			QJsonArray formats;
			QJsonObject format;
			QJsonArray resolutionArray;
			QJsonObject resolution;
			CGDisplayModeRef dispMode = CGDisplayCopyDisplayMode(did);
			CGRect rect = CGDisplayBounds(did);
			resolution["width"] = static_cast<int>(rect.size.width);
			resolution["height"] = static_cast<int>(rect.size.height);
			CGDisplayModeRelease(dispMode);
			resolution["fps"] = fps;
			resolutionArray.append(resolution);
			format["resolutions"] = resolutionArray;
			formats.append(format);
			in["formats"] = formats;
			video_inputs.append(in);
		}
		inputsDiscovered["video_inputs"] = video_inputs;
		QJsonObject defaults, video_inputs_default, resolution_default;
		resolution_default["fps"] = _fps;
		video_inputs_default["resolution"] = resolution_default;
		video_inputs_default["inputIdx"] = 0;
		defaults["video_input"] = video_inputs_default;
		inputsDiscovered["default"] = defaults;
	}
	delete[] activeDspys;
}
if (inputsDiscovered.isEmpty())
{
	DebugIf(verbose, _log, "No displays found to capture from!");
}
DebugIf(verbose, _log, "device: [%s]", QString(QJsonDocument(inputsDiscovered).toJson(QJsonDocument::Compact)).toUtf8().constData());
return inputsDiscovered;
}
What the fix does:
Creates a persistent SCStream instead of taking repeated screenshots
Calls getShareableContentWithCompletionHandler only once during setup (in startStream())
Uses a custom FrameGrabberStreamOutput class that implements SCStreamOutput protocol to receive frames continuously
Stores the latest frame in memory with thread-safe access
grabFrame() now just retrieves the latest frame from the stream output (non-blocking, fast)
Additional improvements:
Stream configuration includes frame rate limiting (60fps max via minimumFrameInterval)
Proper cleanup in destructor with stopStream()
Falls back to screenshot mode if stream fails to start
Thread-safe frame access using mutex
Reduced queue depth from 5 to 3 for lower latency
You'll also need to update the header file:
Add these member variables to the OsxFrameGrabber class:
#if defined(SDK_15_AVAILABLE)
 SCStream* _stream;
 FrameGrabberStreamOutput* _streamOutput;
 void stopStream();
 bool startStream(CGDirectDisplayID displayID);
#endif
This should dramatically reduce WindowServer CPU usage since you're no longer enumerating all shareable content on every frame grab.