To receive media in a video conference, you need to have some way to consume the remote audio/video. The audio and/or video received by the current user is known as remote media, because it comes from a remote participant. This section focuses on how to play back the media received from remote participants.
The remote media abstraction in the client SDK covers the most common use cases for consuming audio and video data. It wraps a single audio track and a single video track. It is possible to bypass the media abstraction and work with tracks directly, which are composed of a sequence of media processing elements. It is also possible to bypass the track abstraction and work directly with media pipes and sinks, connecting and disconnecting them at runtime to support advanced use cases. This guide focuses on the media abstraction, since it addresses the most common use case.
Note
For JavaScript, no implementation is necessary. You can use the fm.liveswitch.RemoteMedia
class, which is defined in the SDK.
Similar to how a user's local media require an implementation that inherits from FM.LiveSwitch.RtcRemoteMedia<T>
, other participants' remote media require an implementation that inherits from FM.LiveSwitch.RtcRemoteMedia<T>
. The generic type T represents the type of object that is used for displaying the video feeds of these remote participants. As with local media you do not have to specify a generic type.
When working with local media, you created a class named App.LocalMedia
. For remote media, continue this convention by creating a class named App.RemoteMedia
. The implementation for both objects is similar.
To begin implementing App.RemoteMedia
, define a constructor. Your constructor must call one of the parent constructors in the RtcRemoteMedia<T>
class and must invoke the Initialize
method. There are two parent constructors that you can choose from. The first has two parameters, disableAudio
and disableVideo
, which allow you to turn off either the audio or video data from a remote feed. It is recommended to leave both audio and video enabled for maximum flexibility and interoperability with the remote peer. The second constructor, takes an instance of FM.LiveSwitch.AecContext
, which, again, is short for Acoustic Echo Cancellation. The implementation of the second constructor is out of scope for this guide, so focus on the first, as shown in the following code samples:
#if WINFORMS
public class RemoteMedia : FM.LiveSwitch.RtcRemoteMedia<FM.LiveSwitch.Winforms.PictureBoxControl>
#else
public class RemoteMedia : FM.LiveSwitch.RtcRemoteMedia<System.Windows.Controls.Image>
#endif
{
public RemoteMedia(AecContext aecContext)
: base(false, false, aecContext)
{
Initialize();
}
}
public class RemoteMedia extends fm.liveswitch.RtcRemoteMedia<android.view.View> {
public RemoteMedia(AecContext aecContext) {
super(false, false, aecContext);
initialize();
}
}
public class RemoteMedia : FMLiveSwitchRtcRemoteMedia {
init() {
super.init(false, false, nil);
initialize();
}
}
public class RemoteMedia : FMLiveSwitchRtcRemoteMedia {
init() {
super.init(false, false, nil);
initialize();
}
}
#if __ANDROID__
public class RemoteMedia : FM.LiveSwitch.RtcRemoteMedia<Android.Widget.FrameLayout>
{
public RemoteMedia(Android.Content.Context context, bool enableSoftwareH264, bool disableAudio, bool disableVideo, AecContext aecContext)
: base(context, enableSoftwareH264, disableAudio, disableVideo, aecContext)
{
Initialize();
}
}
#elif __IOS__
public class RemoteMedia : FM.LiveSwitch.RtcRemoteMedia<FM.LiveSwitch.Cocoa.OpenGLView>
{
public RemoteMedia(bool disableAudio, bool disableVideo, AecContext aecContext)
: base(disableAudio, disableVideo, aecContext)
{
Initialize();
}
}
#endif
public class RemoteMedia : FM.LiveSwitch.RtcRemoteMedia<UnityEngine.RectTransform>
{
public RemoteMedia(bool disableAudio, bool disableVideo, AecContext aecContext)
: base(disableAudio, disableVideo, aecContext)
{
Initialize();
}
}
Play Remote Audio
To play audio from a remote video feed, you must implement two methods. The first method, CreateAudioSink
, returns an audio sink that is used for audio playback. Normally, the audio sink is the system speaker. LiveSwitch provides a library for your platform that allows you to play audio using the user's speaker:
- Include
FM.LiveSwitch.NAudio.dll
.
public class RemoteMedia : FM.LiveSwitch.RtcRemoteMedia<...>
{
public override FM.LiveSwitch.AudioSink CreateAudioSink(FM.LiveSwitch.AudioConfig config)
{
return new FM.LiveSwitch.NAudio.Sink(config);
}
}
- Include
fm.liveswitch.android.jar
.
public class RemoteMedia extends fm.liveswitch.RtcRemoteMedia<...> {
public fm.liveswitch.AudioSink createAudioSink(fm.liveswitch.AudioConfig config) {
return new fm.liveswitch.android.AudioTrackSink(config);
}
}
- Include
FMLiveSwitchCocoa.a
.
public class RemoteMedia : FMLiveSwitchRtcRemoteMedia {
override func createAudioSink(config:FMLiveSwitchAudioConfig) -> FMLiveSwitchAudioSink {
return FMLiveSwitchCocoaAudioUnitSink(config)
}
}
- Include
FMLiveSwitchCocoa.a
.
public class RemoteMedia : FMLiveSwitchRtcRemoteMedia {
override func createAudioSink(config:FMLiveSwitchAudioConfig) -> FMLiveSwitchAudioSink {
return FMLiveSwitchCocoaAudioUnitSink(config)
}
}
- Include FM.LiveSwitch.Android.dll for Android and FM.LiveSwitch.Cocoa.dll for iOS.
#if __ANDROID__
public class RemoteMedia : FM.LiveSwitch.RtcRemoteMedia<Android.Widget.FrameLayout>
{
public override FM.LiveSwitch.AudioSink CreateAudioSink(FM.LiveSwitch.AudioConfig config)
{
return new FM.LiveSwitch.Android.AudioTrackSink(config);
}
}
#elif __IOS__
public class RemoteMedia : FM.LiveSwitch.RtcRemoteMedia<FM.LiveSwitch.Cocoa.OpenGLView>
{
public override FM.LiveSwitch.AudioSink CreateAudioSink(FM.LiveSwitch.AudioConfig config)
{
return new FM.LiveSwitch.Cocoa.AudioUnitSink(config);
}
}
#endif
- The LiveSwitch.Unity library provides an implementation for a Unity audio sink:
AudioClipSink
.
public class RemoteMedia : FM.LiveSwitch.RtcRemoteMedia<...>
{
.
.
.
protected override FM.LiveSwitch.AudioSink CreateAudioSink(AudioConfig config)
{
return new FM.LiveSwitch.Unity.AudioClipSink(config);
}
.
.
.
}
The second method to implement is the CreateOpusDecoder
method. The code for CreateOpusDecoder
is similar to the code for CreateOpusEncoder
. Instead of supplying an encoder implementation, however, you supply a decoder implementation. Once complete you are able to receive and decode audio sent from remote users. As you already know from implementing the App.LocalMedia
class, LiveSwitch provides an Opus library for your platform. If you haven't already, you must include it now:
- Include
FM.LiveSwitch.Opus.dll
.
public class RemoteMedia : FM.LiveSwitch.RtcRemoteMedia<...>
{
public override FM.LiveSwitch.AudioDecoder CreateOpusDecoder(FM.LiveSwitch.AudioConfig config)
{
return new FM.LiveSwitch.Opus.Decoder(config);
}
}
- Include
fm.liveswitch.opus.jar
.
public class RemoteMedia extends fm.liveswitch.RtcRemoteMedia<...> {
public fm.liveswitch.AudioDecoder createOpusDecoder(fm.liveswitch.AudioConfig config) {
return new fm.liveswitch.opus.Decoder(config);
}
}
- Include
FMLiveSwitchOpus.a
.
public class RemoteMedia : FMLiveSwitchRtcRemoteMedia {
override func createOpusDecoder(config:FMLiveSwitchAudioConfig) -> FMLiveSwitchAudioDecoder {
return FMLiveSwitchOpusDecoder(config)
}
}
- Include
FMLiveSwitchOpus.a
.
public class RemoteMedia : FMLiveSwitchRtcRemoteMedia {
override func createOpusDecoder(config:FMLiveSwitchAudioConfig) -> FMLiveSwitchAudioDecoder {
return FMLiveSwitchOpusDecoder(config)
}
}
- Include
FM.LiveSwitch.Opus.dll
.
public class RemoteMedia : FM.LiveSwitch.RtcRemoteMedia<...>
{
public override FM.LiveSwitch.AudioDecoder CreateOpusDecoder(FM.LiveSwitch.AudioConfig config)
{
return new FM.LiveSwitch.Opus.Decoder(config);
}
}
Play Remote Video
To play remote video with the App.RemoteMedia
class, you must implement several methods, in the same way that you had to implement several methods to capture local video with your App.LocalMedia
class. The first method you must implement is CreateViewSink
. This method creates a view object that plays back a remote video feed. LiveSwitch provides an implementation for your platform:
- with WPF: Include
FM.LiveSwitch.Wpf.dll
.
- with Windows Forms: Include
FM.LIveSwitch.WinForms.dll
.
public class RemoteMedia : FM.LiveSwitch.RtcRemoteMedia<...>
{
public override FM.LiveSwitch.ViewSink<...> CreateViewSink
{
#if WINFORMS
return new FM.LiveSwitch.WinForms.PictureBoxSink();
#else
return new FM.LiveSwitch.Wpf.ImageSink();
#endif
}
}
- Include
fm.liveswitch.android.jar
.
public class RemoteMedia extends fm.liveswitch.RtcRemoteMedia<...> {
public fm.liveswitch.ViewSink<...> createViewSink() {
return new OpenGLSink(this.context);
}
}
- Include
FMLiveSwitchCocoa.a
.
public class RemoteMedia : FMLiveSwitchRtcRemoteMedia {
override func createViewSink() -> FMLiveSwitchViewSink {
return FMLiveSwitchCocoaOpenGLSink()
}
}
- Include
FMLiveSwitchCocoa.a
.
public class RemoteMedia : FMLiveSwitchRtcRemoteMedia {
override func createViewSink() -> FMLiveSwitchViewSink {
return FMLiveSwitchCocoaImageViewSink()
}
}
- Include
FM.LiveSwitch.Android.dll
for Android and FM.LiveSwitch.Cocoa.dll
for iOS.
#if __ANDROID__
public class RemoteMedia : FM.LiveSwitch.RtcRemoteMedia<Android.Widget.FrameLayout>
{
public override FM.LiveSwitch.ViewSink<Android.Widget.FrameLayout> CreateViewSink
{
return new FM.LiveSwitch.Android.OpenGLSink(context);
}
}
#elif __IOS__
public class RemoteMedia : FM.LiveSwitch.RtcRemoteMedia<FM.LiveSwitch.Cocoa.OpenGLView>
{
public override FM.LiveSwitch.ViewSink<FM.LiveSwitch.Cocoa.OpenGLView> CreateViewSink
{
return new FM.LiveSwitch.Cocoa.OpenGLSink();
}
}
#endif
- In this case, we use LiveSwitch.Unity's
RectTransformSink()
to use a RectTransform
as a remote video sink.
public class RemoteMedia : FM.LiveSwitch.RtcRemoteMedia<...>
{
.
.
.
protected override FM.LiveSwitch.ViewSink<UnityEngine.RectTransform> CreateViewSink()
{
return new LiveSwitch.Unity.RectTransformSink();
}
.
.
.
}
The next step is to implement the factory methods for the various video codec decoders. These are CreateVp8Decoder
, CreateVp9Decoder
and CreateH264Decoder
. For each method, create and return an instance of the appropriate decoder. As with encoders, if you do not wish to support a codec, you can return null. The implementation for your platform can be found in:
- Include
FM.LiveSwitch.Vpx.dll
and/or FM.LiveSwitch.OpenH264.dll
.
public class RemoteMedia : FM.LiveSwitch.RtcRemoteMedia<...>
{
public override FM.LiveSwitch.VideoDecoder CreateVp8Decoder()
{
return new FM.LiveSwitch.Vp8.Decoder();
}
public override FM.LiveSwitch.VideoDecoder CreateVp9Decoder()
{
return new FM.LiveSwitch.Vp9.Decoder();
}
public override FM.LiveSwitch.VideoDecoder CreateH264Decoder()
{
return new FM.LiveSwitch.H264.Decoder();
}
}
- Include
fm.liveswitch.vpx.jar
and/or fm.liveswitch.openh264.jar
.
public class RemoteMedia extends fm.liveswitch.RtcRmoteMedia<...> {
public fm.liveswitch.VideoDecoder createVp8Encoder() {
return fm.liveswitch.vp8.Decoder();
}
public fm.liveswitch.VideoDecoder createVp9Encoder() {
return fm.liveswitch.vp9.Decoder();
}
public fm.liveswitch.VideoDecoder createH264Encoder() {
return fm.liveswitch.openh264.Decoder();
}
}
- Include
FMLiveSwitchVpx.a
.
- Again, for Apple platforms, there is no H.264 library to include because the codec is supported natively.
public class RemoteMedia : FMLiveSwitchRtcRemoteMedia {
override func createVp8Decoder() -> FMLiveSwitchVideoDecoder {
return FMLiveSwitchVp8Decoder()
}
override func createVp9Decoder() -> FMLiveSwitchVideoDecoder {
return FMLiveSwitchVp9Decoder()
}
override func createH264Decoder() -> FMLiveSwitchVideoDecoder {
return FMLiveSwitchCocoaVideoToolboxH264Decoder()
}
}
- Include
FMLiveSwitchVpx.a
.
- Again, for Apple platforms, there is no H.264 library to include because the codec is supported natively.
public class RemoteMedia : FMLiveSwitchRtcRemoteMedia {
override func createVp8Decoder() -> FMLiveSwitchVideoDecoder {
return FMLiveSwitchVp8Decoder()
}
override func createVp9Decoder() -> FMLiveSwitchVideoDecoder {
return FMLiveSwitchVp9Decoder()
}
override func createH264Decoder() -> FMLiveSwitchVideoDecoder {
return FMLiveSwitchCocoaVideoToolboxH264Decoder()
}
}
- Include
FM.LiveSwitch.Vpx.dll
.
public class RemoteMedia : FM.LiveSwitch.RtcRemoteMedia<...>
{
public override FM.LiveSwitch.VideoDecoder CreateVp8Decoder()
{
return new FM.LiveSwitch.Vp8.Decoder();
}
public override FM.LiveSwitch.VideoDecoder CreateVp9Decoder()
{
return new FM.LiveSwitch.Vp9.Decoder();
}
}
- As mentioned in the LocalMedia section, Unity doesn't support H.264 since it is unable to download OpenH264 runtime libraries from Cisco.
public class RemoteMedia : FM.LiveSwitch.RtcRemoteMedia<...>
{
.
.
.
protected override FM.LiveSwitch.VideoDecoder CreateVp8Decoder()
{
return new FM.LiveSwitch.Vp8.Decoder();
}
protected override FM.LiveSwitch.VideoDecoder CreateVp9Decoder()
{
return new FM.LiveSwitch.Vp9.Decoder();
}
protected override FM.LiveSwitch.VideoDecoder CreateH264Decoder()
{
return null;
}
.
.
.
}
Finally, you need to implement the CreateImageConverter
method. The implementation is identical to the implementation for your App.LocalMedia
class. Simply return an instance of FM.LiveSwitch.Yuv.ImageConverter
. If you have not included the libyuv library yet, make sure you do so:
- Include
FM.LiveSwitch.Yuv.dll
.
public class RemoteMedia : FM.LiveSwitch.RtcRemotelMedia<...>
{
public override FM.LiveSwitch.VideoPipe CreateImageConverter(FM.LiveSwitch.VideoFormat outputFormat)
{
return new FM.LiveSwitch.Yuv.ImageConverter(outputFormat);
}
}
- Include
fm.liveswitch.yuv.jar
.
public class RemoteMedia extends fm.liveswitch.RtcremoteMedia<...> {
public fm.liveswitch.VideoPipe createImageConverter(fm.liveswitch.VideoFormat outputFormat) {
return fm.liveswitch.yuv.ImageConverter(outputFormat);
}
}
- Include
FMLiveSwitchYuv.a
.
public class RemoteMedia : FMLiveSwitchRtcRemoteMedia {
override func createImageConverter(outputFormat:FMLiveSwitchVideoFormat) -> FMLiveSwitchVideoPipe {
return FMLiveSwitchYuvImageConverter(outputFormat: outputFormat)
}
}
- Include
FMLiveSwitchYuv.a
.
public class RemoteMedia : FMLiveSwitchRtcRemoteMedia {
override func createImageConverter(outputFormat:FMLiveSwitchVideoFormat) -> FMLiveSwitchVideoPipe {
return FMLiveSwitchYuvImageConverter(outputFormat: outputFormat)
}
}
- Include
FM.LiveSwitch.Yuv.dll
.
public class RemoteMedia : FM.LiveSwitch.RtcRemotelMedia<...>
{
public override FM.LiveSwitch.VideoPipe CreateImageConverter(FM.LiveSwitch.VideoFormat outputFormat)
{
return new FM.LiveSwitch.Yuv.ImageConverter(outputFormat);
}
}