Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
92 changes: 92 additions & 0 deletions android/src/main/java/com/oney/WebRTCModule/AudioTrackAdapter.java
Original file line number Diff line number Diff line change
@@ -0,0 +1,92 @@
package com.oney.WebRTCModule;

import android.util.Log;

import com.facebook.react.bridge.Arguments;
import com.facebook.react.bridge.WritableMap;

import org.webrtc.AudioTrack;
import org.webrtc.AudioTrackSink;

import java.nio.ByteBuffer;
import java.util.HashMap;
import java.util.Map;
import java.util.concurrent.atomic.AtomicBoolean;

/**
* Fires the W3C 'unmute' event on a remote audio track when the first
* decoded PCM buffer arrives via {@link AudioTrackSink}.
*
* IMPORTANT — only the initial muted → unmuted transition is detectable.
* Subsequent mute events (e.g. network stall mid-call) cannot be detected
* from the sink: Android's audio render path and WebRTC's NetEq synthesize
* silence / PLC frames whenever RTP stops, so {@code onData} keeps firing
* at a steady rate regardless of network state. For "remote participant
* muted their mic" UI, use the out-of-band participant state from your
* signaling layer — that is the correct source of truth, not this adapter.
*
* Only attach to remote audio tracks. {@code AudioTrackSink} callbacks
* are not delivered for local tracks.
*/
public class AudioTrackAdapter {
static final String TAG = AudioTrackAdapter.class.getCanonicalName();

private final Map<String, FirstDataUnmuteSink> sinks = new HashMap<>();
private final int peerConnectionId;
private final WebRTCModule webRTCModule;

public AudioTrackAdapter(WebRTCModule webRTCModule, int peerConnectionId) {
this.peerConnectionId = peerConnectionId;
this.webRTCModule = webRTCModule;
}

public void addAdapter(AudioTrack audioTrack) {
String trackId = audioTrack.id();
if (sinks.containsKey(trackId)) {
Log.w(TAG, "Attempted to add adapter twice for track ID: " + trackId);
return;
}
FirstDataUnmuteSink sink = new FirstDataUnmuteSink(trackId);
sinks.put(trackId, sink);
audioTrack.addSink(sink);
Log.d(TAG, "Created adapter for " + trackId);
}

public void removeAdapter(AudioTrack audioTrack) {
String trackId = audioTrack.id();
FirstDataUnmuteSink sink = sinks.remove(trackId);
if (sink == null) {
Log.w(TAG, "removeAdapter - no adapter for " + trackId);
return;
}
audioTrack.removeSink(sink);
Log.d(TAG, "Deleted adapter for " + trackId);
}
Comment thread
coderabbitai[bot] marked this conversation as resolved.

private class FirstDataUnmuteSink implements AudioTrackSink {
private final AtomicBoolean fired = new AtomicBoolean(false);
private final String trackId;

FirstDataUnmuteSink(String trackId) {
this.trackId = trackId;
}

@Override
public void onData(ByteBuffer audioData,
int bitsPerSample,
int sampleRate,
int numberOfChannels,
int numberOfFrames,
long absoluteCaptureTimestampMs) {
if (!fired.compareAndSet(false, true)) {
return;
}
WritableMap params = Arguments.createMap();
params.putInt("pcId", peerConnectionId);
params.putString("trackId", trackId);
params.putBoolean("muted", false);
Log.d(TAG, "Unmute event pcId: " + peerConnectionId + " trackId: " + trackId);
webRTCModule.sendEvent("mediaStreamTrackMuteChanged", params);
}
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -43,6 +43,7 @@ class PeerConnectionObserver implements PeerConnection.Observer {
final Map<String, MediaStream> remoteStreams; // React tag -> MediaStream
final Map<String, MediaStreamTrack> remoteTracks;
final VideoTrackAdapter videoTrackAdapters;
final AudioTrackAdapter audioTrackAdapters;
private final WebRTCModule webRTCModule;

PeerConnectionObserver(WebRTCModule webRTCModule, int id) {
Expand All @@ -53,6 +54,7 @@ class PeerConnectionObserver implements PeerConnection.Observer {
this.remoteStreams = new HashMap<>();
this.remoteTracks = new HashMap<>();
this.videoTrackAdapters = new VideoTrackAdapter(webRTCModule, id);
this.audioTrackAdapters = new AudioTrackAdapter(webRTCModule, id);
}

PeerConnection getPeerConnection() {
Expand All @@ -72,11 +74,13 @@ void close() {
void dispose() {
Log.d(TAG, "PeerConnection.dispose() for " + id);

// Remove video track adapters
// Remove track adapters for remote tracks
for (MediaStreamTrack track : this.remoteTracks.values()) {
if (track instanceof VideoTrack) {
videoTrackAdapters.removeAdapter((VideoTrack) track);
videoTrackAdapters.removeDimensionDetector((VideoTrack) track);
} else if (track instanceof AudioTrack) {
audioTrackAdapters.removeAdapter((AudioTrack) track);
}
}

Expand Down Expand Up @@ -463,6 +467,7 @@ public void onAddTrack(final RtpReceiver receiver, final MediaStream[] mediaStre
videoTrackAdapters.addAdapter((VideoTrack) track);
videoTrackAdapters.addDimensionDetector((VideoTrack) track);
} else if (track.kind().equals(MediaStreamTrack.AUDIO_TRACK_KIND)) {
audioTrackAdapters.addAdapter((AudioTrack) track);
((AudioTrack) track).setVolume(WebRTCModuleOptions.getInstance().defaultTrackVolume);
}
remoteTracks.put(track.id(), track);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -101,7 +101,8 @@ private class TrackMuteUnmuteImpl implements VideoSink {
private TimerTask emitMuteTask;
private volatile boolean disposed;
private AtomicInteger frameCounter;
private boolean mutedState;
// Per W3C spec, remote tracks MUST start muted.
private volatile boolean mutedState = true;
private final String trackId;

TrackMuteUnmuteImpl(String trackId) {
Expand All @@ -111,7 +112,13 @@ private class TrackMuteUnmuteImpl implements VideoSink {

@Override
public void onFrame(VideoFrame frame) {
frameCounter.addAndGet(1);
// incrementAndGet() == 1 is the atomic "first frame" check — fire
// unmute immediately instead of waiting up to INITIAL_MUTE_DELAY
// for the periodic timer.
if (frameCounter.incrementAndGet() == 1 && mutedState) {
mutedState = false;
emitMuteEvent(false);
}
}

private void start() {
Expand Down
6 changes: 6 additions & 0 deletions ios/RCTWebRTC.xcodeproj/project.pbxproj
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@
4EE3A8BD25B8416500FAA24A /* WebRTCModule+RTCMediaStream.m in Sources */ = {isa = PBXBuildFile; fileRef = 4EE3A8BC25B8416500FAA24A /* WebRTCModule+RTCMediaStream.m */; };
4EE3A8C125B8416F00FAA24A /* WebRTCModule+RTCPeerConnection.m in Sources */ = {isa = PBXBuildFile; fileRef = 4EE3A8BF25B8416F00FAA24A /* WebRTCModule+RTCPeerConnection.m */; };
4EE3A8C525B8417800FAA24A /* WebRTCModule+VideoTrackAdapter.m in Sources */ = {isa = PBXBuildFile; fileRef = 4EE3A8C325B8417800FAA24A /* WebRTCModule+VideoTrackAdapter.m */; };
A100000125B8417800FAA24A /* WebRTCModule+AudioTrackAdapter.m in Sources */ = {isa = PBXBuildFile; fileRef = A100000225B8417800FAA24A /* WebRTCModule+AudioTrackAdapter.m */; };
4EE3A8D125B841DD00FAA24A /* SocketConnection.m in Sources */ = {isa = PBXBuildFile; fileRef = 4EE3A8C825B841DD00FAA24A /* SocketConnection.m */; };
4EE3A8D225B841DD00FAA24A /* CaptureController.m in Sources */ = {isa = PBXBuildFile; fileRef = 4EE3A8C925B841DD00FAA24A /* CaptureController.m */; };
4EE3A8D325B841DD00FAA24A /* ScreenCaptureController.m in Sources */ = {isa = PBXBuildFile; fileRef = 4EE3A8CC25B841DD00FAA24A /* ScreenCaptureController.m */; };
Expand Down Expand Up @@ -64,6 +65,8 @@
4EE3A8C025B8416F00FAA24A /* WebRTCModule+RTCPeerConnection.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = "WebRTCModule+RTCPeerConnection.h"; path = "RCTWebRTC/WebRTCModule+RTCPeerConnection.h"; sourceTree = SOURCE_ROOT; };
4EE3A8C325B8417800FAA24A /* WebRTCModule+VideoTrackAdapter.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; name = "WebRTCModule+VideoTrackAdapter.m"; path = "RCTWebRTC/WebRTCModule+VideoTrackAdapter.m"; sourceTree = SOURCE_ROOT; };
4EE3A8C425B8417800FAA24A /* WebRTCModule+VideoTrackAdapter.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = "WebRTCModule+VideoTrackAdapter.h"; path = "RCTWebRTC/WebRTCModule+VideoTrackAdapter.h"; sourceTree = SOURCE_ROOT; };
A100000225B8417800FAA24A /* WebRTCModule+AudioTrackAdapter.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; name = "WebRTCModule+AudioTrackAdapter.m"; path = "RCTWebRTC/WebRTCModule+AudioTrackAdapter.m"; sourceTree = SOURCE_ROOT; };
A100000325B8417800FAA24A /* WebRTCModule+AudioTrackAdapter.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = "WebRTCModule+AudioTrackAdapter.h"; path = "RCTWebRTC/WebRTCModule+AudioTrackAdapter.h"; sourceTree = SOURCE_ROOT; };
4EE3A8C725B841DD00FAA24A /* ScreenCapturer.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = ScreenCapturer.h; path = RCTWebRTC/ScreenCapturer.h; sourceTree = SOURCE_ROOT; };
4EE3A8C825B841DD00FAA24A /* SocketConnection.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; name = SocketConnection.m; path = RCTWebRTC/SocketConnection.m; sourceTree = SOURCE_ROOT; };
4EE3A8C925B841DD00FAA24A /* CaptureController.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; name = CaptureController.m; path = RCTWebRTC/CaptureController.m; sourceTree = SOURCE_ROOT; };
Expand Down Expand Up @@ -142,6 +145,8 @@
4EE3A8BF25B8416F00FAA24A /* WebRTCModule+RTCPeerConnection.m */,
4EE3A8C425B8417800FAA24A /* WebRTCModule+VideoTrackAdapter.h */,
4EE3A8C325B8417800FAA24A /* WebRTCModule+VideoTrackAdapter.m */,
A100000325B8417800FAA24A /* WebRTCModule+AudioTrackAdapter.h */,
A100000225B8417800FAA24A /* WebRTCModule+AudioTrackAdapter.m */,
4EE3A8B125B8414000FAA24A /* WebRTCModule.h */,
4EE3A8B025B8414000FAA24A /* WebRTCModule.m */,
4EE3A8CB25B841DD00FAA24A /* CaptureController.h */,
Expand Down Expand Up @@ -256,6 +261,7 @@
buildActionMask = 2147483647;
files = (
4EE3A8C525B8417800FAA24A /* WebRTCModule+VideoTrackAdapter.m in Sources */,
A100000125B8417800FAA24A /* WebRTCModule+AudioTrackAdapter.m in Sources */,
4EE3A8BA25B8415900FAA24A /* WebRTCModule+RTCDataChannel.m in Sources */,
4EE3A8B625B8414A00FAA24A /* WebRTCModule+Permissions.m in Sources */,
DEC96577264176C10052DB35 /* DataChannelWrapper.m in Sources */,
Expand Down
13 changes: 13 additions & 0 deletions ios/RCTWebRTC/WebRTCModule+AudioTrackAdapter.h
Original file line number Diff line number Diff line change
@@ -0,0 +1,13 @@

#import <WebRTC/RTCAudioRenderer.h>
#import <WebRTC/RTCPeerConnection.h>
#import "WebRTCModule.h"

@interface RTCPeerConnection (AudioTrackAdapter)

@property(nonatomic, strong) NSMutableDictionary<NSString *, id> *audioTrackAdapters;

- (void)addAudioTrackAdapter:(RTCAudioTrack *)track;
- (void)removeAudioTrackAdapter:(RTCAudioTrack *)track;
Comment thread
santhoshvai marked this conversation as resolved.

@end
114 changes: 114 additions & 0 deletions ios/RCTWebRTC/WebRTCModule+AudioTrackAdapter.m
Original file line number Diff line number Diff line change
@@ -0,0 +1,114 @@

#import <AVFoundation/AVFoundation.h>
#import <Foundation/Foundation.h>
#import <objc/runtime.h>
#import <stdatomic.h>

#import <React/RCTBridge.h>
#import <React/RCTEventDispatcher.h>
#import <React/RCTLog.h>

#import <WebRTC/RTCAudioRenderer.h>
#import <WebRTC/RTCAudioTrack.h>

#import "WebRTCModule+AudioTrackAdapter.h"
#import "WebRTCModule+RTCPeerConnection.h"
#import "WebRTCModule.h"

/* Fires the W3C 'unmute' event on a remote audio track when the first
* decoded PCM buffer arrives via RTCAudioRenderer.
*
* IMPORTANT — only the initial muted → unmuted transition is detectable.
* Subsequent mute events (network stall mid-call) cannot be detected
* from the renderer: the iOS audio render path and WebRTC's NetEq
* synthesize silence / PLC frames whenever RTP stops, so
* renderPCMBuffer: keeps firing at a steady rate regardless of network
* state. For "remote participant muted their mic" UI, use the
* out-of-band participant state from your signaling layer — that is the
* correct source of truth, not this adapter.
*/
@interface FirstBufferUnmuteRenderer : NSObject<RTCAudioRenderer>

@property(copy, nonatomic) NSNumber *peerConnectionId;
@property(copy, nonatomic) NSString *trackId;
@property(weak, nonatomic) WebRTCModule *module;

- (instancetype)initWith:(NSNumber *)peerConnectionId
trackId:(NSString *)trackId
webRTCModule:(WebRTCModule *)module;

@end

@implementation FirstBufferUnmuteRenderer {
atomic_flag _fired;
}

- (instancetype)initWith:(NSNumber *)peerConnectionId
trackId:(NSString *)trackId
webRTCModule:(WebRTCModule *)module {
self = [super init];
if (self) {
self.peerConnectionId = peerConnectionId;
self.trackId = trackId;
self.module = module;
atomic_flag_clear(&_fired);
}
return self;
}

- (void)renderPCMBuffer:(AVAudioPCMBuffer *)pcmBuffer {
if (atomic_flag_test_and_set(&_fired)) {
return;
}
[self.module sendEventWithName:kEventMediaStreamTrackMuteChanged
body:@{
@"pcId" : self.peerConnectionId,
@"trackId" : self.trackId,
@"muted" : @NO
}];
RCTLog(@"[AudioTrackAdapter] Unmute event for pc %@ track %@", self.peerConnectionId, self.trackId);
}

@end

@implementation RTCPeerConnection (AudioTrackAdapter)

- (NSMutableDictionary<NSString *, id> *)audioTrackAdapters {
return objc_getAssociatedObject(self, _cmd);
}

- (void)setAudioTrackAdapters:(NSMutableDictionary<NSString *, id> *)audioTrackAdapters {
objc_setAssociatedObject(
self, @selector(audioTrackAdapters), audioTrackAdapters, OBJC_ASSOCIATION_RETAIN_NONATOMIC);
}
Comment thread
santhoshvai marked this conversation as resolved.

- (void)addAudioTrackAdapter:(RTCAudioTrack *)track {
NSString *trackId = track.trackId;
if ([self.audioTrackAdapters objectForKey:trackId] != nil) {
RCTLogWarn(@"[AudioTrackAdapter] Adapter already exists for track %@", trackId);
return;
}

FirstBufferUnmuteRenderer *renderer = [[FirstBufferUnmuteRenderer alloc] initWith:self.reactTag
trackId:trackId
webRTCModule:self.webRTCModule];
[self.audioTrackAdapters setObject:renderer forKey:trackId];
[track addRenderer:renderer];

RCTLogTrace(@"[AudioTrackAdapter] Adapter created for track %@", trackId);
}

- (void)removeAudioTrackAdapter:(RTCAudioTrack *)track {
NSString *trackId = track.trackId;
FirstBufferUnmuteRenderer *renderer = [self.audioTrackAdapters objectForKey:trackId];
if (renderer == nil) {
RCTLogWarn(@"[AudioTrackAdapter] Adapter doesn't exist for track %@", trackId);
return;
}

[track removeRenderer:renderer];
[self.audioTrackAdapters removeObjectForKey:trackId];
RCTLogTrace(@"[AudioTrackAdapter] Adapter removed for track %@", trackId);
}
Comment thread
santhoshvai marked this conversation as resolved.

@end
7 changes: 6 additions & 1 deletion ios/RCTWebRTC/WebRTCModule+RTCPeerConnection.m
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@
#import <WebRTC/RTCStatisticsReport.h>

#import "SerializeUtils.h"
#import "WebRTCModule+AudioTrackAdapter.h"
#import "WebRTCModule+RTCDataChannel.h"
#import "WebRTCModule+RTCPeerConnection.h"
#import "WebRTCModule+VideoTrackAdapter.h"
Expand Down Expand Up @@ -165,6 +166,7 @@ - (nullable RTCRtpTransceiver *)getTransceiverByPeerConnectionId:(nonnull NSNumb
peerConnection.remoteTracks = [NSMutableDictionary new];
peerConnection.videoTrackAdapters = [NSMutableDictionary new];
peerConnection.videoDimensionDetectors = [NSMutableDictionary new];
peerConnection.audioTrackAdapters = [NSMutableDictionary new];
peerConnection.webRTCModule = self;

self.peerConnections[objectID] = peerConnection;
Expand Down Expand Up @@ -395,12 +397,14 @@ - (nullable RTCRtpTransceiver *)getTransceiverByPeerConnectionId:(nonnull NSNumb
return;
}

// Remove video track adapters
// Remove track adapters for remote tracks
for (NSString *key in peerConnection.remoteTracks.allKeys) {
RTCMediaStreamTrack *track = peerConnection.remoteTracks[key];
if (track.kind == kRTCMediaStreamTrackKindVideo) {
[peerConnection removeVideoTrackAdapter:(RTCVideoTrack *)track];
[peerConnection removeVideoDimensionDetector:(RTCVideoTrack *)track];
} else if (track.kind == kRTCMediaStreamTrackKindAudio) {
[peerConnection removeAudioTrackAdapter:(RTCAudioTrack *)track];
}
}

Expand Down Expand Up @@ -981,6 +985,7 @@ - (void)peerConnection:(RTC_OBJC_TYPE(RTCPeerConnection) *)peerConnection
[peerConnection addVideoDimensionDetector:videoTrack];
} else if (track.kind == kRTCMediaStreamTrackKindAudio) {
RTCAudioTrack *audioTrack = (RTCAudioTrack *)track;
[peerConnection addAudioTrackAdapter:audioTrack];
WebRTCModuleOptions *options = [WebRTCModuleOptions sharedInstance];
audioTrack.source.volume = options.defaultTrackVolume;
}
Expand Down
11 changes: 9 additions & 2 deletions ios/RCTWebRTC/WebRTCModule+VideoTrackAdapter.m
Original file line number Diff line number Diff line change
Expand Up @@ -50,7 +50,8 @@ - (instancetype)initWith:(NSNumber *)peerConnectionId trackId:(NSString *)trackI

_disposed = NO;
_frameCount = 0;
_muted = NO;
// Per W3C spec, remote tracks MUST start muted.
_muted = YES;
_timer = nil;
}

Expand Down Expand Up @@ -112,7 +113,13 @@ - (void)start {
}

- (void)renderFrame:(nullable RTCVideoFrame *)frame {
atomic_fetch_add(&_frameCount, 1);
// atomic_fetch_add returns the prior value; == 0 is the atomic "first
// frame" check — fire unmute immediately instead of waiting up to
// INITIAL_MUTE_DELAY for the periodic timer.
if (atomic_fetch_add(&_frameCount, 1) == 0 && self->_muted) {
self->_muted = NO;
[self emitMuteEvent:NO];
}
Comment thread
coderabbitai[bot] marked this conversation as resolved.
}

- (void)setSize:(CGSize)size {
Expand Down
Loading
Loading