I'm trying to find a way to stream both video and audio from browser to my native program. and here is my way so far.
To stream video from browser to your native program without gui, just follow the example here. https://chromium.googlesource.com/external/webrtc/+/refs/heads/master/examples/peerconnection/client/
use AddOrUpdateSink
to add your own VideoSinkInterface
and you will receive your frame data in callback void OnFrame(const cricket::VideoFrame& frame)
. Instead of render the frame to GUI as the example does, you can do whatever you want.
To stream audio from browser to your native program without real audio device. you can use a fake audio device.
- modify variable
rtc_use_dummy_audio_file_devices
to true
in file https://chromium.googlesource.com/external/webrtc/+/master/webrtc/build/webrtc.gni
- invoke the global static function to specify the filename
webrtc::FileAudioDeviceFactory::SetFilenamesToUse("", "file_to_save_audio");
- patch
file_audio_device.cc
with the code blew. (as I write this answer, FileAudioDevice has some issues, may already be fixed)
- recompile your program,
touch file_to_save_audio
and you will see pcm data in file_to_save_audio
after webrtc connection is established.
patch:
diff --git a/webrtc/modules/audio_device/dummy/file_audio_device.cc b/webrtc/modules/audio_device/dummy/file_audio_device.cc
index 8b3fa5e..2717cda 100644
--- a/webrtc/modules/audio_device/dummy/file_audio_device.cc
+++ b/webrtc/modules/audio_device/dummy/file_audio_device.cc
@@ -35,6 +35,7 @@ FileAudioDevice::FileAudioDevice(const int32_t id,
_recordingBufferSizeIn10MS(0),
_recordingFramesIn10MS(0),
_playoutFramesIn10MS(0),
+ _initialized(false),
_playing(false),
_recording(false),
_lastCallPlayoutMillis(0),
@@ -135,12 +136,13 @@ int32_t FileAudioDevice::InitPlayout() {
// Update webrtc audio buffer with the selected parameters
_ptrAudioBuffer->SetPlayoutSampleRate(kPlayoutFixedSampleRate);
_ptrAudioBuffer->SetPlayoutChannels(kPlayoutNumChannels);
+ _initialized = true;
}
return 0;
}
bool FileAudioDevice::PlayoutIsInitialized() const {
- return true;
+ return _initialized;
}
int32_t FileAudioDevice::RecordingIsAvailable(bool& available) {
@@ -236,7 +238,7 @@ int32_t FileAudioDevice::StopPlayout() {
}
bool FileAudioDevice::Playing() const {
- return true;
+ return _playing;
}
int32_t FileAudioDevice::StartRecording() {
diff --git a/webrtc/modules/audio_device/dummy/file_audio_device.h b/webrtc/modules/audio_device/dummy/file_audio_device.h
index a69b47e..3f3c841 100644
--- a/webrtc/modules/audio_device/dummy/file_audio_device.h
+++ b/webrtc/modules/audio_device/dummy/file_audio_device.h
@@ -185,6 +185,7 @@ class FileAudioDevice : public AudioDeviceGeneric {
std::unique_ptr<rtc::PlatformThread> _ptrThreadRec;
std::unique_ptr<rtc::PlatformThread> _ptrThreadPlay;
+ bool _initialized;;
bool _playing;
bool _recording;
uint64_t _lastCallPlayoutMillis;