Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
58 changes: 57 additions & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@ view inside the editor.

## Features

- WebTransport streaming server in Rust, plus experimental WebRTC for runner previews, using hardware encoded HEVC/H.264 video
- WebTransport streaming server in Rust, plus experimental WebRTC for runner previews, using HEVC/H.264 video or full-resolution JPEG on CI runners
- Simulator control & inspection using private accessibility APIs
- CoreSimulator chrome asset rendering for device bezels
- NativeScript and React Native runtime inspector plugins, plus a native UIKit inspector framework for other apps
Expand Down Expand Up @@ -56,6 +56,62 @@ simdeck tap <udid> 0.5 0.5 --normalized
simdeck describe <udid> --format agent --max-depth 2
```

## Daemon

Manage the project daemon explicitly when needed:

```sh
simdeck daemon start
simdeck daemon status
simdeck daemon stop
```

`simdeck daemon` manages the normal per-project warm process. For an always-on
daemon that is available after login, use the macOS user service commands:

```sh
simdeck service on
simdeck service off
```

This uses a LaunchAgent, keeps the server bound to localhost by default, and is
best for agents or editor integrations that should be able to open SimDeck
without first starting a project daemon.

Use software H.264 when macOS screen recording starves the hardware encoder:

```sh
simdeck daemon start --video-codec h264-software
```

On GitHub Actions macOS runners where VideoToolbox hardware encode is not
available, use the experimental full-resolution JPEG data-channel stream:

```sh
simdeck daemon start --video-codec jpeg
# open http://127.0.0.1:4310?transport=webrtc-data
```

For LAN browser access:

```sh
simdeck ui --bind 0.0.0.0 --advertise-host 192.168.1.50 --open
```

Restart the CoreSimulator service layer when `simctl` reports a stale service
version or the live display gets stuck before the first frame:

```sh
simdeck core-simulator restart
```

You can also start or stop the CoreSimulator service layer explicitly:

```sh
simdeck core-simulator start
simdeck core-simulator shutdown
```

## CLI

```sh
Expand Down
146 changes: 146 additions & 0 deletions cli/XCWH264Encoder.m
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
#import "XCWH264Encoder.h"

#import <CoreMedia/CoreMedia.h>
#import <ImageIO/ImageIO.h>
#import <os/lock.h>
#import <QuartzCore/QuartzCore.h>
#import <VideoToolbox/VideoToolbox.h>
Expand All @@ -15,6 +16,7 @@ typedef NS_ENUM(NSUInteger, XCWVideoEncoderMode) {
XCWVideoEncoderModeHEVCHardware,
XCWVideoEncoderModeH264Hardware,
XCWVideoEncoderModeH264Software,
XCWVideoEncoderModeJPEG,
};

static XCWVideoEncoderMode XCWVideoEncoderModeFromEnvironment(void) {
Expand All @@ -25,6 +27,9 @@ static XCWVideoEncoderMode XCWVideoEncoderModeFromEnvironment(void) {
if ([value isEqualToString:@"h264-software"] || [value isEqualToString:@"software-h264"]) {
return XCWVideoEncoderModeH264Software;
}
if ([value isEqualToString:@"jpeg"] || [value isEqualToString:@"jpg"] || [value isEqualToString:@"mjpeg"]) {
return XCWVideoEncoderModeJPEG;
}
return XCWVideoEncoderModeHEVCHardware;
}

Expand All @@ -33,6 +38,8 @@ static CMVideoCodecType XCWVideoCodecTypeForMode(XCWVideoEncoderMode mode) {
case XCWVideoEncoderModeH264Hardware:
case XCWVideoEncoderModeH264Software:
return kCMVideoCodecType_H264;
case XCWVideoEncoderModeJPEG:
return kCMVideoCodecType_JPEG;
case XCWVideoEncoderModeHEVCHardware:
default:
return kCMVideoCodecType_HEVC;
Expand All @@ -45,6 +52,8 @@ static CMVideoCodecType XCWVideoCodecTypeForMode(XCWVideoEncoderMode mode) {
return @"h264";
case XCWVideoEncoderModeH264Software:
return @"h264-software";
case XCWVideoEncoderModeJPEG:
return @"jpeg";
case XCWVideoEncoderModeHEVCHardware:
default:
return @"hevc";
Expand All @@ -57,6 +66,8 @@ static CMVideoCodecType XCWVideoCodecTypeForMode(XCWVideoEncoderMode mode) {
return nil;
case XCWVideoEncoderModeH264Software:
return @"com.apple.videotoolbox.videoencoder.h264";
case XCWVideoEncoderModeJPEG:
return nil;
case XCWVideoEncoderModeHEVCHardware:
default:
return nil;
Expand Down Expand Up @@ -173,11 +184,106 @@ static uint32_t XCWReverseBits32(uint32_t value) {
return @"hevc";
case kCMVideoCodecType_H264:
return @"h264";
case kCMVideoCodecType_JPEG:
return @"jpeg";
default:
return [NSString stringWithFormat:@"0x%08x", (unsigned int)codecType];
}
}

static CGFloat XCWJPEGQualityFromEnvironment(void) {
NSString *value = [[NSProcessInfo processInfo] environment][@"SIMDECK_JPEG_QUALITY"];
double quality = value.length > 0 ? value.doubleValue : 1.0;
if (!isfinite(quality) || quality < 0.1 || quality > 1.0) {
return 1.0;
}
return (CGFloat)quality;
}

static CGColorSpaceRef XCWDeviceRGBColorSpace(void) {
static CGColorSpaceRef colorSpace = NULL;
static dispatch_once_t onceToken;
dispatch_once(&onceToken, ^{
colorSpace = CGColorSpaceCreateDeviceRGB();
});
return colorSpace;
}

static NSData *XCWJPEGDataFromPixelBuffer(CVPixelBufferRef pixelBuffer) {
if (pixelBuffer == NULL) {
return nil;
}

CGImageRef image = NULL;
BOOL didLockPixelBuffer = NO;
OSType pixelFormat = CVPixelBufferGetPixelFormatType(pixelBuffer);
if (pixelFormat == kCVPixelFormatType_32BGRA &&
CVPixelBufferLockBaseAddress(pixelBuffer, kCVPixelBufferLock_ReadOnly) == kCVReturnSuccess) {
didLockPixelBuffer = YES;
void *baseAddress = CVPixelBufferGetBaseAddress(pixelBuffer);
size_t width = CVPixelBufferGetWidth(pixelBuffer);
size_t height = CVPixelBufferGetHeight(pixelBuffer);
size_t bytesPerRow = CVPixelBufferGetBytesPerRow(pixelBuffer);
if (baseAddress != NULL && width > 0 && height > 0 && bytesPerRow >= width * 4) {
CGDataProviderRef provider = CGDataProviderCreateWithData(NULL,
baseAddress,
bytesPerRow * height,
NULL);
if (provider != NULL) {
image = CGImageCreate(width,
height,
8,
32,
bytesPerRow,
XCWDeviceRGBColorSpace(),
kCGBitmapByteOrder32Little | kCGImageAlphaPremultipliedFirst,
provider,
NULL,
false,
kCGRenderingIntentDefault);
CGDataProviderRelease(provider);
}
}
}

if (image == NULL) {
if (didLockPixelBuffer) {
CVPixelBufferUnlockBaseAddress(pixelBuffer, kCVPixelBufferLock_ReadOnly);
didLockPixelBuffer = NO;
}
OSStatus imageStatus = VTCreateCGImageFromCVPixelBuffer(pixelBuffer, NULL, &image);
if (imageStatus != noErr || image == NULL) {
return nil;
}
}

NSMutableData *data = [NSMutableData data];
CGImageDestinationRef destination =
CGImageDestinationCreateWithData((__bridge CFMutableDataRef)data,
CFSTR("public.jpeg"),
1,
NULL);
if (destination == NULL) {
CGImageRelease(image);
if (didLockPixelBuffer) {
CVPixelBufferUnlockBaseAddress(pixelBuffer, kCVPixelBufferLock_ReadOnly);
}
return nil;
}

NSDictionary *properties = @{
(__bridge NSString *)kCGImageDestinationLossyCompressionQuality: @(XCWJPEGQualityFromEnvironment()),
};
CGImageDestinationAddImage(destination, image, (__bridge CFDictionaryRef)properties);
BOOL ok = CGImageDestinationFinalize(destination);
CFRelease(destination);
CGImageRelease(image);
if (didLockPixelBuffer) {
CVPixelBufferUnlockBaseAddress(pixelBuffer, kCVPixelBufferLock_ReadOnly);
}
return ok && data.length > 0 ? data : nil;
}

static NSData *XCWCopySampleData(CMSampleBufferRef sampleBuffer) {
CMBlockBufferRef blockBuffer = CMSampleBufferGetDataBuffer(sampleBuffer);
if (blockBuffer == NULL) {
Expand Down Expand Up @@ -505,6 +611,12 @@ - (BOOL)encodePixelBufferLocked:(CVPixelBufferRef)pixelBuffer {
return NO;
}

if (_encoderMode == XCWVideoEncoderModeJPEG) {
return [self encodeJPEGPixelBufferLocked:pixelBuffer
sourceWidth:sourceWidth
sourceHeight:sourceHeight];
}

if (![self ensureCompressionSessionWithWidth:targetWidth height:targetHeight]) {
return NO;
}
Expand Down Expand Up @@ -553,6 +665,40 @@ - (BOOL)encodePixelBufferLocked:(CVPixelBufferRef)pixelBuffer {
return YES;
}

- (BOOL)encodeJPEGPixelBufferLocked:(CVPixelBufferRef)pixelBuffer
sourceWidth:(int32_t)sourceWidth
sourceHeight:(int32_t)sourceHeight {
uint64_t submittedAtUs = (uint64_t)(CACurrentMediaTime() * 1000000.0);
if (_timestampOriginUs == 0) {
_timestampOriginUs = submittedAtUs;
}
uint64_t relativeTimestampUs = submittedAtUs - _timestampOriginUs;

NSData *jpegData = XCWJPEGDataFromPixelBuffer(pixelBuffer);
if (jpegData.length == 0) {
_encodeFailureCount += 1;
_lastEncodeStatus = -1;
return NO;
}

_width = sourceWidth;
_height = sourceHeight;
_submittedFrameCount += 1;
_outputFrameCount += 1;
_keyFrameOutputCount += 1;
_lastEncodeStatus = noErr;
uint64_t nowUs = (uint64_t)(CACurrentMediaTime() * 1000000.0);
_latestEncodeLatencyUs = nowUs >= submittedAtUs ? nowUs - submittedAtUs : 0;

self.outputHandler(jpegData,
relativeTimestampUs,
YES,
@"jpeg",
nil,
CGSizeMake(sourceWidth, sourceHeight));
return YES;
}

- (BOOL)ensureCompressionSessionWithWidth:(int32_t)width height:(int32_t)height {
if (_compressionSession != NULL && _width == width && _height == height) {
return YES;
Expand Down
Loading
Loading