Skip to content
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
15 changes: 13 additions & 2 deletions android/src/main/java/com/oney/WebRTCModule/GetUserMediaImpl.java
Original file line number Diff line number Diff line change
Expand Up @@ -39,7 +39,7 @@
* The implementation of {@code getUserMedia} extracted into a separate file in
* order to reduce complexity and to (somewhat) separate concerns.
*/
class GetUserMediaImpl {
public class GetUserMediaImpl {
/**
* The {@link Log} tag with which {@code GetUserMediaImpl} is to log.
*/
Expand All @@ -62,6 +62,15 @@ class GetUserMediaImpl {
private Promise displayMediaPromise;
private Intent mediaProjectionPermissionResultData;

/**
* Returns the MediaProjection permission result data Intent.
* This Intent can be used to create a MediaProjection for audio capture
* via AudioPlaybackCaptureConfiguration.
*/
public Intent getMediaProjectionPermissionResultData() {
return mediaProjectionPermissionResultData;
}

private final ServiceConnection mediaProjectionServiceConnection = new ServiceConnection() {
@Override
public void onServiceConnected(ComponentName name, IBinder service) {
Expand Down Expand Up @@ -355,7 +364,9 @@ private void createScreenStream() {
}

// Cleanup
mediaProjectionPermissionResultData = null;
// Note: mediaProjectionPermissionResultData is intentionally NOT nulled here.
// It is retained so it can be reused to create a MediaProjection for
// screen share audio capture (AudioPlaybackCaptureConfiguration).
displayMediaPromise = null;
}

Expand Down
57 changes: 57 additions & 0 deletions android/src/main/java/com/oney/WebRTCModule/WebRTCModule.java
Original file line number Diff line number Diff line change
Expand Up @@ -129,9 +129,62 @@ private JavaAudioDeviceModule createAudioDeviceModule(ReactApplicationContext re
.setUseHardwareAcousticEchoCanceler(Build.VERSION.SDK_INT >= Build.VERSION_CODES.Q)
.setUseHardwareNoiseSuppressor(Build.VERSION.SDK_INT >= Build.VERSION_CODES.Q)
.setUseStereoOutput(true)
.setAudioBufferCallback((audioBuffer, audioFormat, channelCount, sampleRate, bytesRead, captureTimeNs) -> {
if (bytesRead > 0) {
WebRTCModuleOptions.ScreenAudioBytesProvider provider =
WebRTCModuleOptions.getInstance().screenAudioBytesProvider;
if (provider != null) {
java.nio.ByteBuffer screenBuffer = provider.getScreenAudioBytes(bytesRead);
if (screenBuffer != null && screenBuffer.remaining() > 0) {
mixScreenAudioIntoBuffer(audioBuffer, screenBuffer, bytesRead);
}
}
}
return captureTimeNs;
})
.createAudioDeviceModule();
}

/**
* Mixes screen audio into the microphone buffer using PCM 16-bit additive mixing
* with clamping. Handles different buffer sizes safely: each buffer is read only
* within its own bounds. When one buffer is shorter, the other's samples pass
* through unmodified (mic samples stay as-is, or screen-only samples are written).
*/
private static void mixScreenAudioIntoBuffer(java.nio.ByteBuffer micBuffer,
java.nio.ByteBuffer screenBuffer,
int bytesRead) {
micBuffer.position(0);
screenBuffer.position(0);

micBuffer.order(java.nio.ByteOrder.LITTLE_ENDIAN);
screenBuffer.order(java.nio.ByteOrder.LITTLE_ENDIAN);

java.nio.ShortBuffer micShorts = micBuffer.asShortBuffer();
java.nio.ShortBuffer screenShorts = screenBuffer.asShortBuffer();

int micSamples = Math.min(bytesRead / 2, micShorts.remaining());
int screenSamples = screenShorts.remaining();
int totalSamples = Math.max(micSamples, screenSamples);

for (int i = 0; i < totalSamples; i++) {
int sum;
if (i >= micSamples) {
// Screen-only: mic buffer is shorter — write screen sample directly
sum = screenShorts.get(i);
} else if (i >= screenSamples) {
// Mic-only: screen buffer is shorter — keep mic sample as-is
break;
} else {
// Both buffers have data — add samples
sum = micShorts.get(i) + screenShorts.get(i);
}
if (sum > Short.MAX_VALUE) sum = Short.MAX_VALUE;
if (sum < Short.MIN_VALUE) sum = Short.MIN_VALUE;
micShorts.put(i, (short) sum);
}
}

@NonNull
@Override
public String getName() {
Expand All @@ -142,6 +195,10 @@ public AudioDeviceModule getAudioDeviceModule() {
return mAudioDeviceModule;
}

public GetUserMediaImpl getUserMediaImpl() {
return getUserMediaImpl;
}

public PeerConnectionObserver getPeerConnectionObserver(int id) {
return mPeerConnectionObservers.get(id);
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,8 @@
import org.webrtc.VideoEncoderFactory;
import org.webrtc.audio.AudioDeviceModule;

import java.nio.ByteBuffer;

public class WebRTCModuleOptions {
private static WebRTCModuleOptions instance;

Expand All @@ -20,6 +22,29 @@ public class WebRTCModuleOptions {
public boolean enableMediaProjectionService;
public AudioProcessingFactoryProvider audioProcessingFactoryProvider;

/**
* Provider for screen share audio bytes. When set, the AudioDeviceModule's
* AudioBufferCallback will mix screen audio into the mic buffer before
* WebRTC processing. This allows screen audio mixing to work alongside
* any audio processing factory (including noise cancellation).
*
* Set this when screen share audio capture starts, clear it when it stops.
*/
public volatile ScreenAudioBytesProvider screenAudioBytesProvider;

/**
* Functional interface for providing screen audio bytes on demand.
*/
public interface ScreenAudioBytesProvider {
/**
* Returns a ByteBuffer containing screen audio PCM data.
*
* @param bytesRequested number of bytes to read (matching mic buffer size)
* @return ByteBuffer with screen audio, or null if not available
*/
ByteBuffer getScreenAudioBytes(int bytesRequested);
}

public static WebRTCModuleOptions getInstance() {
if (instance == null) {
instance = new WebRTCModuleOptions();
Expand Down
18 changes: 18 additions & 0 deletions ios/RCTWebRTC/InAppScreenCaptureController.h
Original file line number Diff line number Diff line change
@@ -0,0 +1,18 @@
#import <Foundation/Foundation.h>
#import "CaptureController.h"
#import "CapturerEventsDelegate.h"

NS_ASSUME_NONNULL_BEGIN

@class InAppScreenCapturer;

@interface InAppScreenCaptureController : CaptureController

- (instancetype)initWithCapturer:(nonnull InAppScreenCapturer *)capturer;

/// The underlying RPScreenRecorder-based capturer.
@property(nonatomic, strong, readonly) InAppScreenCapturer *capturer;

@end

NS_ASSUME_NONNULL_END
49 changes: 49 additions & 0 deletions ios/RCTWebRTC/InAppScreenCaptureController.m
Original file line number Diff line number Diff line change
@@ -0,0 +1,49 @@
#if TARGET_OS_IOS

#import "InAppScreenCaptureController.h"
#import "InAppScreenCapturer.h"

@interface InAppScreenCaptureController () <CapturerEventsDelegate>
@end

@implementation InAppScreenCaptureController

- (instancetype)initWithCapturer:(nonnull InAppScreenCapturer *)capturer {
self = [super init];
if (self) {
_capturer = capturer;
_capturer.eventsDelegate = self;
self.deviceId = @"in-app-screen-capture";
}
return self;
}

- (void)dealloc {
[self.capturer stopCapture];
}

- (void)startCapture {
[self.capturer startCapture];
}

- (void)stopCapture {
[self.capturer stopCapture];
}

- (NSDictionary *)getSettings {
return @{
@"deviceId": self.deviceId ?: @"in-app-screen-capture",
@"groupId": @"",
@"frameRate": @(30)
};
}

#pragma mark - CapturerEventsDelegate

- (void)capturerDidEnd:(RTCVideoCapturer *)capturer {
[self.eventsDelegate capturerDidEnd:capturer];
}

@end

#endif
21 changes: 21 additions & 0 deletions ios/RCTWebRTC/InAppScreenCapturer.h
Original file line number Diff line number Diff line change
@@ -0,0 +1,21 @@
#import <AVFoundation/AVFoundation.h>
#import <WebRTC/RTCVideoCapturer.h>
#import "CapturerEventsDelegate.h"

NS_ASSUME_NONNULL_BEGIN

@interface InAppScreenCapturer : RTCVideoCapturer

@property(nonatomic, weak) id<CapturerEventsDelegate> eventsDelegate;

/// Callback invoked for each .audioApp CMSampleBuffer from RPScreenRecorder.
/// Set this before calling startCapture if audio mixing is desired.
@property(nonatomic, copy, nullable) void (^audioBufferHandler)(CMSampleBufferRef);

- (instancetype)initWithDelegate:(__weak id<RTCVideoCapturerDelegate>)delegate;
- (void)startCapture;
- (void)stopCapture;

@end

NS_ASSUME_NONNULL_END
Loading
Loading