-
Notifications
You must be signed in to change notification settings - Fork 185
Description
Description:
I'm using livekit_client: ^2.4.9
(previously also tested with ^2.4.1) and followed the official documentation for iOS screen sharing via Broadcast Extension:
🔗 https://github.com/flutter-webrtc/flutter-webrtc/wiki/iOS-Screen-Sharing#broadcast-extension-quick-setup
Despite correctly setting up the broadcast extension, app group, and all necessary configuration, the app is not listed in the iOS screen recording picker when starting a broadcast. It only shows the default screen recording options — my app extension is missing.
What I've done:
-
Created a Broadcast Extension target in Xcode
-
Set the same App Group (e.g., group.com.mycompany.myapp) for the main app and extension
-
Followed all steps from the linked documentation
-
Used useiOSBroadcastExtension: true in setScreenShareEnabled(...)
-
Implemented Flutter method channel and native iOS code to handle ReplayKit
-
Tested on a physical device
Expected Behavior:
- When starting a screen share, the app extension should appear in the broadcast list
- Selecting it should start the ReplayKit broadcast and stream to the LiveKit room
Actual Behavior:
Code:
Flutter
void _enableScreenShare(RtcViewmodel viewModel) async {
final participant = viewModel.room.localParticipant;
if (lkPlatformIsDesktop()) {
try {
final source = await showDialog<DesktopCapturerSource>(
context: context,
builder: (context) => ScreenSelectDialog(),
);
if (source == null) {
if (kDebugMode) {
print('cancelled screenshare');
}
return;
}
if (kDebugMode) {
print('DesktopCapturerSource: ${source.id}');
}
var track = await LocalVideoTrack.createScreenShareTrack(
ScreenShareCaptureOptions(
sourceId: source.id,
maxFrameRate: 15.0,
),
);
await participant?.publishVideoTrack(track);
} catch (e) {
if (kDebugMode) {
print('could not publish video: $e');
}
}
return;
}
if (lkPlatformIs(PlatformType.android)) {
// Android specific
bool hasCapturePermission = await Helper.requestCapturePermission();
if (!hasCapturePermission) {
return;
}
requestBackgroundPermission([bool isRetry = false]) async {
// Required for android screenshare.
try {
bool hasPermissions = await FlutterBackground.hasPermissions;
var appName = await Utils.getAppName();
if (!isRetry) {
var androidConfig = FlutterBackgroundAndroidConfig(
notificationTitle: 'Screen Sharing',
notificationText: '$appName is sharing the screen.',
notificationImportance: AndroidNotificationImportance.high,
// notificationIcon: const AndroidResource(
// name: 'livekit_ic_launcher', defType: 'mipmap'),
);
hasPermissions = await FlutterBackground.initialize(
androidConfig: androidConfig);
}
if (hasPermissions &&
!FlutterBackground.isBackgroundExecutionEnabled) {
await FlutterBackground.enableBackgroundExecution();
}
} catch (e) {
if (!isRetry) {
return await Future<void>.delayed(const Duration(seconds: 1),
() => requestBackgroundPermission(true));
}
if (kDebugMode) {
print('could not publish video: $e');
}
}
}
await requestBackgroundPermission();
}
if (lkPlatformIs(PlatformType.iOS)) {
try {
await participant?.setScreenShareEnabled(true,
captureScreenAudio: false,
screenShareCaptureOptions: const ScreenShareCaptureOptions(
useiOSBroadcastExtension: true));
return;
} catch (e) {
if (kDebugMode) {
print('could not publish screen share on iOS: $e');
}
}
return;
}
if (lkPlatformIsWebMobile()) {
if (mounted) {
await context
.showErrorDialog('Screen share is not supported on mobile web');
}
return;
}
await participant?.setScreenShareEnabled(true, captureScreenAudio: true);
}
reply_kit.dart
class ReplayKitChannel {
static const String kReplayKitChannel =
'my_app/replaykit-channel';
static const MethodChannel _replayKitChannel =
MethodChannel(kReplayKitChannel);
static void listenMethodChannel(Room room) {
_replayKitChannel.setMethodCallHandler((call) async {
if (call.method == 'closeReplayKitFromNative') {
if (!(room.localParticipant?.isScreenShareEnabled() ?? false)) {
return;
}
await room.localParticipant?.setScreenShareEnabled(false);
} else if (call.method == 'hasSampleBroadcast') {
if (room.localParticipant?.isScreenShareEnabled() ?? true) return;
await room.localParticipant?.setScreenShareEnabled(true);
}
});
}
static void startReplayKit() {
if (!Platform.isIOS) return;
_replayKitChannel.invokeMethod('startReplayKit');
}
static void closeReplayKit() {
if (!Platform.isIOS) return;
_replayKitChannel.invokeMethod('closeReplayKit');
}
}
iOS:
SampleHandler.swift
import ReplayKit
import OSLog
let broadcastLogger = OSLog(subsystem: "com.mycompany.myapp", category: "Broadcast")
private enum Constants {
// the App Group ID value that the app and the broadcast extension targets are setup with. It differs for each app.
static let appGroupIdentifier = "group.com.mycompany.myapp"
}
class SampleHandler: RPBroadcastSampleHandler {
private var clientConnection: SocketConnection?
private var uploader: SampleUploader?
private var frameCount: Int = 0
var socketFilePath: String {
let sharedContainer = FileManager.default.containerURL(forSecurityApplicationGroupIdentifier: Constants.appGroupIdentifier)
return sharedContainer?.appendingPathComponent("rtc_SSFD").path ?? ""
}
override init() {
super.init()
if let connection = SocketConnection(filePath: socketFilePath) {
clientConnection = connection
setupConnection()
uploader = SampleUploader(connection: connection)
}
os_log(.debug, log: broadcastLogger, "%{public}s", socketFilePath)
}
override func broadcastStarted(withSetupInfo setupInfo: [String: NSObject]?) {
// User has requested to start the broadcast. Setup info from the UI extension can be supplied but optional.
frameCount = 0
DarwinNotificationCenter.shared.postNotification(.broadcastStarted)
openConnection()
startReplayKit()
}
override func broadcastPaused() {
// User has requested to pause the broadcast. Samples will stop being delivered.
}
override func broadcastResumed() {
// User has requested to resume the broadcast. Samples delivery will resume.
}
override func broadcastFinished() {
// User has requested to finish the broadcast.
DarwinNotificationCenter.shared.postNotification(.broadcastStopped)
clientConnection?.close()
closeReplayKit()
}
override func processSampleBuffer(_ sampleBuffer: CMSampleBuffer, with sampleBufferType: RPSampleBufferType) {
switch sampleBufferType {
case RPSampleBufferType.video:
uploader?.send(sample: sampleBuffer)
default:
break
}
}
}
private extension SampleHandler {
func setupConnection() {
clientConnection?.didClose = { [weak self] error in
os_log(.debug, log: broadcastLogger, "client connection did close \(String(describing: error))")
if let error = error {
self?.finishBroadcastWithError(error)
} else {
// the displayed failure message is more user friendly when using NSError instead of Error
let JMScreenSharingStopped = 10001
let customError = NSError(domain: RPRecordingErrorDomain, code: JMScreenSharingStopped, userInfo: [NSLocalizedDescriptionKey: "Screen sharing stopped"])
self?.finishBroadcastWithError(customError)
}
}
}
func openConnection() {
let queue = DispatchQueue(label: "broadcast.connectTimer")
let timer = DispatchSource.makeTimerSource(queue: queue)
timer.schedule(deadline: .now(), repeating: .milliseconds(100), leeway: .milliseconds(500))
timer.setEventHandler { [weak self] in
guard self?.clientConnection?.open() == true else {
return
}
timer.cancel()
}
timer.resume()
}
func startReplayKit() {
let group=UserDefaults(suiteName: Constants.appGroupIdentifier)
group!.set(false, forKey: "closeReplayKitFromNative")
group!.set(false, forKey: "closeReplayKitFromFlutter")
group!.set(true, forKey: "hasSampleBroadcast")
}
func closeReplayKit() {
let group = UserDefaults(suiteName: Constants.appGroupIdentifier)
group!.set(true, forKey:"closeReplayKitFromNative")
group!.set(false, forKey: "hasSampleBroadcast")
}
}
SampleUploader.swift
import Foundation
import ReplayKit
import OSLog
private enum Constants {
static let bufferMaxLength = 10240
}
class SampleUploader {
private static var imageContext = CIContext(options: nil)
@Atomic private var isReady = false
private var connection: SocketConnection
private var dataToSend: Data?
private var byteIndex = 0
private let serialQueue: DispatchQueue
init(connection: SocketConnection) {
self.connection = connection
self.serialQueue = DispatchQueue(label: "org.jitsi.meet.broadcast.sampleUploader")
setupConnection()
}
@discardableResult func send(sample buffer: CMSampleBuffer) -> Bool {
guard isReady else {
return false
}
isReady = false
dataToSend = prepare(sample: buffer)
byteIndex = 0
serialQueue.async { [weak self] in
self?.sendDataChunk()
}
return true
}
}
private extension SampleUploader {
func setupConnection() {
connection.didOpen = { [weak self] in
self?.isReady = true
}
connection.streamHasSpaceAvailable = { [weak self] in
self?.serialQueue.async {
if let success = self?.sendDataChunk() {
self?.isReady = !success
}
}
}
}
@discardableResult func sendDataChunk() -> Bool {
guard let dataToSend = dataToSend else {
return false
}
var bytesLeft = dataToSend.count - byteIndex
var length = bytesLeft > Constants.bufferMaxLength ? Constants.bufferMaxLength : bytesLeft
length = dataToSend[byteIndex..<(byteIndex + length)].withUnsafeBytes {
guard let ptr = $0.bindMemory(to: UInt8.self).baseAddress else {
return 0
}
return connection.writeToStream(buffer: ptr, maxLength: length)
}
if length > 0 {
byteIndex += length
bytesLeft -= length
if bytesLeft == 0 {
self.dataToSend = nil
byteIndex = 0
}
} else {
os_log(.debug, log: broadcastLogger, "writeBufferToStream failure")
}
return true
}
func prepare(sample buffer: CMSampleBuffer) -> Data? {
guard let imageBuffer = CMSampleBufferGetImageBuffer(buffer) else {
os_log(.debug, log: broadcastLogger, "image buffer not available")
return nil
}
CVPixelBufferLockBaseAddress(imageBuffer, .readOnly)
let scaleFactor = 1.0
let width = CVPixelBufferGetWidth(imageBuffer)/Int(scaleFactor)
let height = CVPixelBufferGetHeight(imageBuffer)/Int(scaleFactor)
let orientation = CMGetAttachment(buffer, key: RPVideoSampleOrientationKey as CFString, attachmentModeOut: nil)?.uintValue ?? 0
let scaleTransform = CGAffineTransform(scaleX: CGFloat(1.0/scaleFactor), y: CGFloat(1.0/scaleFactor))
let bufferData = self.jpegData(from: imageBuffer, scale: scaleTransform)
CVPixelBufferUnlockBaseAddress(imageBuffer, .readOnly)
guard let messageData = bufferData else {
os_log(.debug, log: broadcastLogger, "corrupted image buffer")
return nil
}
let httpResponse = CFHTTPMessageCreateResponse(nil, 200, nil, kCFHTTPVersion1_1).takeRetainedValue()
CFHTTPMessageSetHeaderFieldValue(httpResponse, "Content-Length" as CFString, String(messageData.count) as CFString)
CFHTTPMessageSetHeaderFieldValue(httpResponse, "Buffer-Width" as CFString, String(width) as CFString)
CFHTTPMessageSetHeaderFieldValue(httpResponse, "Buffer-Height" as CFString, String(height) as CFString)
CFHTTPMessageSetHeaderFieldValue(httpResponse, "Buffer-Orientation" as CFString, String(orientation) as CFString)
CFHTTPMessageSetBody(httpResponse, messageData as CFData)
let serializedMessage = CFHTTPMessageCopySerializedMessage(httpResponse)?.takeRetainedValue() as Data?
return serializedMessage
}
func jpegData(from buffer: CVPixelBuffer, scale scaleTransform: CGAffineTransform) -> Data? {
let image = CIImage(cvPixelBuffer: buffer).transformed(by: scaleTransform)
guard let colorSpace = image.colorSpace else {
return nil
}
let options: [CIImageRepresentationOption: Float] = [kCGImageDestinationLossyCompressionQuality as CIImageRepresentationOption: 1.0]
return SampleUploader.imageContext.jpegRepresentation(of: image, colorSpace: colorSpace, options: options)
}
}
SocketConnection.swift
import Foundation
import OSLog
class SocketConnection: NSObject {
var didOpen: (() -> Void)?
var didClose: ((Error?) -> Void)?
var streamHasSpaceAvailable: (() -> Void)?
private let filePath: String
private var socketHandle: Int32 = -1
private var address: sockaddr_un?
private var inputStream: InputStream?
private var outputStream: OutputStream?
private var networkQueue: DispatchQueue?
private var shouldKeepRunning = false
init?(filePath path: String) {
filePath = path
socketHandle = Darwin.socket(AF_UNIX, SOCK_STREAM, 0)
guard socketHandle != -1 else {
os_log(.debug, log: broadcastLogger, "failure: create socket")
return nil
}
}
func open() -> Bool {
os_log(.debug, log: broadcastLogger, "open socket connection")
guard FileManager.default.fileExists(atPath: filePath) else {
os_log(.debug, log: broadcastLogger, "failure: socket file missing")
return false
}
guard setupAddress() == true else {
return false
}
guard connectSocket() == true else {
return false
}
setupStreams()
inputStream?.open()
outputStream?.open()
return true
}
func close() {
unscheduleStreams()
inputStream?.delegate = nil
outputStream?.delegate = nil
inputStream?.close()
outputStream?.close()
inputStream = nil
outputStream = nil
}
func writeToStream(buffer: UnsafePointer<UInt8>, maxLength length: Int) -> Int {
outputStream?.write(buffer, maxLength: length) ?? 0
}
}
extension SocketConnection: StreamDelegate {
func stream(_ aStream: Stream, handle eventCode: Stream.Event) {
switch eventCode {
case .openCompleted:
os_log(.debug, log: broadcastLogger, "client stream open completed")
if aStream == outputStream {
didOpen?()
}
case .hasBytesAvailable:
if aStream == inputStream {
var buffer: UInt8 = 0
let numberOfBytesRead = inputStream?.read(&buffer, maxLength: 1)
if numberOfBytesRead == 0 && aStream.streamStatus == .atEnd {
os_log(.debug, log: broadcastLogger, "server socket closed")
close()
notifyDidClose(error: nil)
}
}
case .hasSpaceAvailable:
if aStream == outputStream {
streamHasSpaceAvailable?()
}
case .errorOccurred:
os_log(.debug, log: broadcastLogger, "client stream error occured: \(String(describing: aStream.streamError))")
close()
notifyDidClose(error: aStream.streamError)
default:
break
}
}
}
private extension SocketConnection {
func setupAddress() -> Bool {
var addr = sockaddr_un()
guard filePath.count < MemoryLayout.size(ofValue: addr.sun_path) else {
os_log(.debug, log: broadcastLogger, "failure: fd path is too long")
return false
}
_ = withUnsafeMutablePointer(to: &addr.sun_path.0) { ptr in
filePath.withCString {
strncpy(ptr, $0, filePath.count)
}
}
address = addr
return true
}
func connectSocket() -> Bool {
guard var addr = address else {
return false
}
let status = withUnsafePointer(to: &addr) { ptr in
ptr.withMemoryRebound(to: sockaddr.self, capacity: 1) {
Darwin.connect(socketHandle, $0, socklen_t(MemoryLayout<sockaddr_un>.size))
}
}
guard status == noErr else {
os_log(.debug, log: broadcastLogger, "failure: \(status)")
return false
}
return true
}
func setupStreams() {
var readStream: Unmanaged<CFReadStream>?
var writeStream: Unmanaged<CFWriteStream>?
CFStreamCreatePairWithSocket(kCFAllocatorDefault, socketHandle, &readStream, &writeStream)
inputStream = readStream?.takeRetainedValue()
inputStream?.delegate = self
inputStream?.setProperty(kCFBooleanTrue, forKey: Stream.PropertyKey(kCFStreamPropertyShouldCloseNativeSocket as String))
outputStream = writeStream?.takeRetainedValue()
outputStream?.delegate = self
outputStream?.setProperty(kCFBooleanTrue, forKey: Stream.PropertyKey(kCFStreamPropertyShouldCloseNativeSocket as String))
scheduleStreams()
}
func scheduleStreams() {
shouldKeepRunning = true
networkQueue = DispatchQueue.global(qos: .userInitiated)
networkQueue?.async { [weak self] in
self?.inputStream?.schedule(in: .current, forMode: .common)
self?.outputStream?.schedule(in: .current, forMode: .common)
RunLoop.current.run()
var isRunning = false
repeat {
isRunning = self?.shouldKeepRunning ?? false && RunLoop.current.run(mode: .default, before: .distantFuture)
} while (isRunning)
}
}
func unscheduleStreams() {
networkQueue?.sync { [weak self] in
self?.inputStream?.remove(from: .current, forMode: .common)
self?.outputStream?.remove(from: .current, forMode: .common)
}
shouldKeepRunning = false
}
func notifyDidClose(error: Error?) {
if didClose != nil {
didClose?(error)
}
}
}
DarwinNotification.swift
import Foundation
enum DarwinNotification: String {
case broadcastStarted = "iOS_BroadcastStarted"
case broadcastStopped = "iOS_BroadcastStopped"
}
class DarwinNotificationCenter {
static let shared = DarwinNotificationCenter()
private let notificationCenter: CFNotificationCenter
init() {
notificationCenter = CFNotificationCenterGetDarwinNotifyCenter()
}
func postNotification(_ name: DarwinNotification) {
CFNotificationCenterPostNotification(notificationCenter, CFNotificationName(rawValue: name.rawValue as CFString), nil, nil, true)
}
}
Atomic.swift
import Foundation
@propertyWrapper
struct Atomic<Value> {
private var value: Value
private let lock = NSLock()
init(wrappedValue value: Value) {
self.value = value
}
var wrappedValue: Value {
get { load() }
set { store(newValue: newValue) }
}
func load() -> Value {
lock.lock()
defer { lock.unlock() }
return value
}
mutating func store(newValue: Value) {
lock.lock()
defer { lock.unlock() }
value = newValue
}
}
AppDelegate.swift
import Flutter
import UIKit
@main
@objc class AppDelegate: FlutterAppDelegate {
var replayKitChannel: FlutterMethodChannel! = nil
var observeTimer: Timer! = nil
var hasEmittedFirstSample = false
override func application(
_ application: UIApplication,
didFinishLaunchingWithOptions launchOptions: [UIApplication.LaunchOptionsKey: Any]?
) -> Bool {
guard let controller = window?.rootViewController as? FlutterViewController else {
return super.application(application, didFinishLaunchingWithOptions: launchOptions)
}
// Setting up ReplayKit communication
replayKitChannel = FlutterMethodChannel(name: "my_app/replaykit-channel",
binaryMessenger: controller.binaryMessenger)
// Handle Flutter method calls
replayKitChannel.setMethodCallHandler { [weak self] (call: FlutterMethodCall, result: @escaping FlutterResult) in
self?.handleReplayKitFromFlutter(result: result, call: call)
}
GeneratedPluginRegistrant.register(with: self)
return super.application(application, didFinishLaunchingWithOptions: launchOptions)
}
func handleReplayKitFromFlutter(result: FlutterResult, call: FlutterMethodCall) {
switch call.method {
case "startReplayKit":
self.hasEmittedFirstSample = false
if let group = UserDefaults(suiteName: "group.com.mycompany.myapp") {
group.set(false, forKey: "closeReplayKitFromNative")
group.set(false, forKey: "closeReplayKitFromFlutter")
self.observeReplayKitStateChanged()
}
result(true)
case "closeReplayKit":
if let group = UserDefaults(suiteName: "group.com.mycompany.myapp") {
group.set(true, forKey: "closeReplayKitFromFlutter")
result(true)
}
default:
result(FlutterMethodNotImplemented)
}
}
func observeReplayKitStateChanged() {
if observeTimer != nil { return }
let group = UserDefaults(suiteName: "group.com.mycompany.myapp")
observeTimer = Timer.scheduledTimer(withTimeInterval: 1, repeats: true) { [weak self] timer in
guard let self = self, let group = group else { return }
let closeReplayKitFromNative = group.bool(forKey: "closeReplayKitFromNative")
let hasSampleBroadcast = group.bool(forKey: "hasSampleBroadcast")
if closeReplayKitFromNative {
self.hasEmittedFirstSample = false
self.replayKitChannel.invokeMethod("closeReplayKitFromNative", arguments: true)
} else if hasSampleBroadcast, !self.hasEmittedFirstSample {
self.hasEmittedFirstSample = true
self.replayKitChannel.invokeMethod("hasSampleBroadcast", arguments: true)
}
}
}
}
Platform information
[✓] Flutter (Channel stable, 3.32.2, on macOS 15.5 24F74 darwin-arm64, locale en-IN) [602ms]
• Flutter version 3.32.2 on channel stable at /Users/develop/flutter
• Upstream repository https://github.com/flutter/flutter.git
• Framework revision 8defaa71a7 (7 weeks ago), 2025-06-04 11:02:51 -0700
• Engine revision 1091508939
• Dart version 3.8.1
• DevTools version 2.45.1
[✓] Android toolchain - develop for Android devices (Android SDK version 35.0.1) [2.5s]
• Android SDK at /Users/Library/Android/sdk
• Platform android-35, build-tools 35.0.1
• Java binary at: /Applications/Android Studio.app/Contents/jbr/Contents/Home/bin/java
This is the JDK bundled with the latest Android Studio installation on this machine.
To manually set the JDK path, use: `flutter config --jdk-dir="path/to/jdk"`.
• Java version OpenJDK Runtime Environment (build 21.0.6+-13368085-b895.109)
• All Android licenses accepted.
[✓] Xcode - develop for iOS and macOS (Xcode 16.4) [1,540ms]
• Xcode at /Applications/Xcode.app/Contents/Developer
• Build 16F6
• CocoaPods version 1.16.2
[✓] Chrome - develop for the web [10ms]
• Chrome at /Applications/Google Chrome.app/Contents/MacOS/Google Chrome
[✓] Android Studio (version 2024.3) [10ms]
• Android Studio at /Applications/Android Studio.app/Contents
• Flutter plugin can be installed from:
🔨 https://plugins.jetbrains.com/plugin/9212-flutter
• Dart plugin can be installed from:
🔨 https://plugins.jetbrains.com/plugin/6351-dart
• Java version OpenJDK Runtime Environment (build 21.0.6+-13368085-b895.109)
[✓] VS Code (version 1.102.1) [9ms]
• VS Code at /Applications/Visual Studio Code.app/Contents
• Flutter extension version 3.114.0
[✓] Connected device (2 available) [6.4s]
• macOS (desktop) • macos • darwin-arm64 • macOS 15.5 24F74 darwin-arm64
• Chrome (web) • chrome • web-javascript • Google Chrome 138.0.7204.158
[✓] Network resources [1,733ms]
• All expected network resources are available.
• No issues found!