In this tutorial, you will learn how to do broadcasting.
LiveSwitch supports massive-scale broadcasting of audio and video data from SFU upstream connections to SFU downstream connections. In broadcasting, there is one Broadcaster and multiple Receivers.
Prerequisites
This tutorial requires the SFU connection app or any apps you have built earlier on top of it.
Create Participant
Both the Broadcaster and Receiver follow the same registration process, we can create an abstract class Participant to handle registration and abstract the Broadcaster and the Receiver classes from it.
The Participant class is similar to the HelloWorldLogic class that handles registration, token generation, channel creation, and other shared fields. For broadcasting, we will need to add the following:
Media ID: We use an optional Media ID to identify the Broadcaster's media streams. We must use the same Media ID for Receivers when creating their downstream connection. The media ID replaces the remote connection info.
Disable remote client events: In a normal conference, everyone in the channel is notified when clients come and go, and when upstream connections are opened and closed. In broadcasting, this is unnecessary and negatively impacts the performance of the presentation.
Create the EstablishConnection, Start, and Stop methods for Broadcaster and Receiver to implement.
Note
When creating the Participant class, remember to replace the applicationId and sharedSecret with your own Application ID and Shared Secret, just as you did in the Hello World tutorial.
Paste the following code into the HelloWorld namespace in the Participant.ts file.
exportabstractclass Participant {
private applicationId: string = Config.applicationId;
private channelId: string = Config.channelId;
private gatewayUrl: string = Config.gatewayUrl;
private sharedSecret: string = Config.sharedSecret;
// Client and channelpublic client: fm.liveswitch.Client;
public channel: fm.liveswitch.Channel;
// Media Id, which will be used by both the broadcaster and receivers.protected presentationId = "presentation-id";
// Layout managerprotected layoutManager = new fm.liveswitch.DomLayoutManager(document.getElementById("my-container"));
constructor() {
// Log to console.
fm.liveswitch.Log.registerProvider(new fm.liveswitch.ConsoleLogProvider(fm.liveswitch.LogLevel.Debug));
}
public joinAsync(): fm.liveswitch.Future<Object> {
const promise = new fm.liveswitch.Promise<Object>();
// Create the client.this.client = new fm.liveswitch.Client(this.gatewayUrl, this.applicationId);
// Write registration state to log.this.client.addOnStateChange(() => fm.liveswitch.Log.debug(`Client is ${new fm.liveswitch.ClientStateWrapper(this.client.getState())}.`));
// Generate a token (do this on the server to avoid exposing your shared secret).const token: string = fm.liveswitch.Token.generateClientRegisterToken(this.applicationId, this.client.getUserId(), this.client.getDeviceId(), this.client.getId(), null, [new fm.liveswitch.ChannelClaim(this.channelId)], this.sharedSecret);
// Register client with token.this.client.register(token)
.then(channels => {
// Store our channel reference.this.channel = channels[0];
fm.liveswitch.Log.info(`Client ${this.client.getId()} has successfully connected to channel ${this.channel.getId()}, Hello World!`);
this.establishConnection();
promise.resolve(null);
})
.fail(ex => {
fm.liveswitch.Log.error("Failed to register with Gateway.");
promise.reject(ex);
});
return promise;
}
protectedabstract establishConnection(): void;
publicabstract start(): fm.liveswitch.Future<Object>;
publicabstract stop(): fm.liveswitch.Future<Object>;
}
Create Broadcaster
The Broadcaster class sends an upstream connection to the server without receiving any remote connections. It essentially "broadcasts" its streams to its subscribers. We only need local media to make this class.
We create the class by the following:
Create SFU upstream connections by passing the Media ID into the CreateSfuUptreamConnection channel method.
Start and stop the broadcaster local media using the same method as starting and stopping the camera local media.
Paste the following code to the Broadcaster.java file.
publicclassBroadcasterextendsParticipant{
private LocalMedia<View> media;
privatestatic Broadcaster instance;
privateBroadcaster(Context context){
super(context);
}
publicstatic Broadcaster getInstance(Context context){
if (instance == null) {
instance = new Broadcaster(context);
}
return instance;
}
@OverrideprotectedvoidestablishConnection(){
// Create audio and video stream from local media.
AudioStream audioStream = (media.getAudioTrack() != null) ? new AudioStream(media) : null;
VideoStream videoStream = (media.getVideoTrack() != null) ? new VideoStream(media) : null;
// Create a SFU upstream connection with local audio and video and the presentation ID.
SfuUpstreamConnection connection = channel.createSfuUpstreamConnection(audioStream, videoStream, presentationId);
connection.addOnStateChange((ManagedConnection conn) -> {
Log.info(String.format("Upstream connection %s is in a %s state.", conn.getId(), conn.getState().toString()));
if ((conn.getState() == ConnectionState.Closing) || (conn.getState() == ConnectionState.Failing)) {
if (conn.getRemoteClosed()) {
Log.info(String.format("Media server has closed the upstream connection %s.", conn.getId()));
}
} elseif (connection.getState() == ConnectionState.Failed) {
// Reconnect if connection failed.
establishConnection();
}
});
connection.open();
}
@Overridepublic Future<Object> start(final Activity activity, final RelativeLayout container){
final Promise<Object> promise = new Promise<>();
activity.runOnUiThread(() -> {
// Create a new local media with audio and video enabled.
media = new CameraLocalMedia(context, false, false, new AecContext());
// Set local media in the layout.
layoutManager = new LayoutManager(container);
layoutManager.setLocalView(media.getView());
// Start capturing local media.
media.start().then(result -> {
promise.resolve(null);
}).fail(promise::reject);
});
return promise;
}
@Overridepublic Future<Object> stop(){
final Promise<Object> promise = new Promise<>();
if (media == null) {
promise.resolve(null);
} else {
// Stop capturing local media.
media.stop().then(result -> {
if (layoutManager != null) {
// Remove views from the layout.
layoutManager.unsetLocalView();
layoutManager = null;
}
if (media != null) {
media.destroy();
media = null;
}
promise.resolve(null);
}).fail(promise::reject);
}
return promise;
}
}
Paste the following code to the Broadcaster.swift file.
Paste the following code into the HelloWorld namespace in the Broadcaster.ts file.
exportclass Broadcaster extends Participant {
private localMedia: fm.liveswitch.LocalMedia;
protected establishConnection(): void {
// Create a SFU upstream connection with local audio and video and the presentation ID.const audioStream = new fm.liveswitch.AudioStream(this.localMedia);
const videoStream = new fm.liveswitch.VideoStream(this.localMedia);
const connection: fm.liveswitch.SfuUpstreamConnection = this.channel.createSfuUpstreamConnection(audioStream, videoStream, this.presentationId);
connection.open();
}
public start(): fm.liveswitch.Future<Object> {
const promise = new fm.liveswitch.Promise<Object>();
// Create local media with audio and video enabled.const audioEnabled = true;
const videoEnabled = true;
this.localMedia = new fm.liveswitch.LocalMedia(audioEnabled, videoEnabled);
// Set local media in the layout.this.layoutManager.setLocalMedia(this.localMedia);
// Start local media capturing.this.localMedia.start()
.then(() => {
fm.liveswitch.Log.debug("Media capture started.");
promise.resolve(null);
})
.fail(ex => {
fm.liveswitch.Log.error(ex.message);
promise.reject(ex);
});
return promise;
}
public stop(): fm.liveswitch.Future<Object> {
const promise = new fm.liveswitch.Promise<Object>();
// Stop local media capturing.this.localMedia.stop()
.then(() => {
fm.liveswitch.Log.debug("Media capture stopped.");
promise.resolve(null);
})
.fail(ex => {
fm.liveswitch.Log.error(ex.message);
promise.reject(ex);
});
return promise;
}
}
Create Receiver
For the Receiver class, we only expect a downstream connection from the server. It acts like a subscriber that "subscribes" to the Broadcaster streams. We only need the remote media class to make the class.
We create the class by the following:
Create SFU downstream connections by passing the Media ID into the CreateSfuDownstreamConnection channel method. This enables users to open their connections prior to receiving the streams. When using Media ID, we don't rely on channel event handlers.
Paste the following code into the HelloWorld namespace in the Receiver.cs file.
publicclassReceiver : Participant
{
RemoteMedia media;
privatestatic Receiver _Context;
publicstatic Receiver Instance()
{
if(_Context == null)
{
_Context = new Receiver();
}
return _Context;
}
publicoverridevoidEstablishConnection()
{
AudioStream audioStream = new AudioStream(media);
VideoStream videoStream = new VideoStream(media);
SfuDownstreamConnection connection = Channel.CreateSfuDownstreamConnection(PresentationId, audioStream, videoStream);
LayoutManager.AddRemoteView(media.Id, media.View);
connection.OnStateChange += (conn) =>
{
Log.Info(string.Format("Downstream connection {0} is currently in a {1} state.", conn.Id, conn.State.ToString()));
if (conn.State == ConnectionState.Closing || conn.State == ConnectionState.Failing)
{
var layoutManager = LayoutManager;
if (layoutManager != null)
{
layoutManager.RemoveRemoteView(media.Id);
}
Dispatcher.Invoke(new Action(() =>
{
media.Destroy();
}));
}
elseif (conn.State == ConnectionState.Failed)
{
EstablishConnection();
}
};
connection.Open();
}
publicoverride Future<object> Start(MainWindow window)
{
Promise<object> promise = new Promise<object>();
LayoutManager = new FM.LiveSwitch.Wpf.LayoutManager(window.videoContainer);
media = new RemoteMedia(false, false, new AecContext());
Dispatcher = window.Dispatcher;
promise.Resolve(null);
return promise;
}
publicoverride Future<object> Stop()
{
Promise<object> promise = new Promise<object>();
if(LayoutManager != null)
{
LayoutManager.RemoveRemoteViews();
LayoutManager = null;
}
return promise;
}
}
Paste the following code to the Receiver.java file.
publicclassReceiverextendsParticipant{
private RemoteMedia media;
privatefinal Handler handler;
privatestatic Receiver instance;
privateReceiver(Context context){
super(context);
this.handler = new Handler(context.getMainLooper());
}
publicstatic Receiver getInstance(Context context){
if (instance == null) {
instance = new Receiver(context);
}
return instance;
}
@OverrideprotectedvoidestablishConnection(){
// Create remote media.
media = new RemoteMedia(context, false, false, new AecContext());
VideoStream videoStream = new VideoStream(media);
AudioStream audioStream = new AudioStream(media);
// Create a SFU downstream connection with remote audio and video and the presentation ID.
SfuDownstreamConnection connection = channel.createSfuDownstreamConnection(presentationId, audioStream, videoStream);
// Adding remote view to UI.
handler.post(() -> layoutManager.addRemoteView(media.getId(), media.getView()));
connection.addOnStateChange((ManagedConnection conn) -> {
Log.info(String.format("Downstream connection %s is currently in a %s state.", conn.getId(), conn.getState().toString()));
if ((conn.getState() == ConnectionState.Closing) || (conn.getState() == ConnectionState.Failing)) {
if (conn.getRemoteClosed()) {
Log.info(String.format("Media server has closed the downstream connection %s.", conn.getId()));
}
handler.post(() -> {
// Remove the remote media from the layout if the remote is closed.
layoutManager.removeRemoteView(media.getId());
media.destroy();
});
} elseif (conn.getState() == ConnectionState.Failed) {
// Reconnect if connection failed.
establishConnection();
}
});
connection.open();
}
@Overridepublic Future<Object> start(final Activity activity, final RelativeLayout container){
final Promise<Object> promise = new Promise<>();
activity.runOnUiThread(() -> {
layoutManager = new LayoutManager(container);
promise.resolve(null);
});
return promise;
}
@Overridepublic Future<Object> stop(){
if (layoutManager != null) {
layoutManager.removeRemoteViews();
layoutManager = null;
}
return Promise.resolveNow();
}
}
Paste the following code to the Receiver.swift file.
classReceiver : Participant{
var _media: RemoteMedia?
staticlet instance = Receiver()
overrideinit() {
super.init()
}
overridefuncestablishConnection() {
let audioStream: FMLiveSwitchAudioStream = (_media?.audioTrack() != nil ? FMLiveSwitchAudioStream.init(remoteMedia: _media) : nil)!
let videoStream: FMLiveSwitchVideoStream = (_media?.videoTrack() != nil ? FMLiveSwitchVideoStream.init(remoteMedia: _media) : nil)!
let connection = _channel?.createSfuDownstreamConnection(withRemoteMediaId: _presenterId, audioStream: audioStream, videoStream: videoStream)
DispatchQueue.main.async {
self._layoutManager?.addRemoteView(withId: self._media?.id(), view: self._media?.view())
}
connection?.addOnStateChange({[weakself] (obj: Any!) inlet conn = obj as! FMLiveSwitchSfuDownstreamConnectionlet state = conn.state()
FMLiveSwitchLog.info(withMessage: "Downstream connection \(String(describing: conn.id()!)) is currently in a \(String(describing: FMLiveSwitchConnectionStateWrapper(value: state).description()!)).")
if (state == FMLiveSwitchConnectionState.closing || state == FMLiveSwitchConnectionState.failing) {
if (conn.remoteClosed()) {
FMLiveSwitchLog.info(withMessage: "Downstream connection \(String(describing: conn.id()!)) is closed by media server.")
}
if (self?._layoutManager != nil) {
DispatchQueue.main.async {
self?._layoutManager?.removeRemoteView(withId: self?._media?.id())
}
}
// Taking down the audio session from the remote media for our local media// https://forums.developer.apple.com/thread/22133// https://trac.pjsip.org/repos/ticket/1697// Workaround to fix reduced volume issue after the teardown of audio unit.do {
if #available(iOS 10.0, *) {
tryAVAudioSession.sharedInstance().setCategory(.record, mode: .default)
} else {
AVAudioSession.sharedInstance().perform(NSSelectorFromString("setCategory:error:"), with: AVAudioSession.Category.record)
}
} catch {
FMLiveSwitchLog.error(withMessage: "Could not set audio session category for local media.")
}
self?._media!.destroy()
do {
if #available(iOS 10.0, *) {
tryAVAudioSession.sharedInstance().setCategory(.playAndRecord, mode: .default, options: [.allowBluetooth, (AVAudioSession.CategoryOptions.defaultToSpeaker)])
} else {
AVAudioSession.sharedInstance().perform(NSSelectorFromString("setCategory:withOptions:error:"),with: AVAudioSession.Category.playAndRecord, with: [.allowBluetooth, AVAudioSession.CategoryOptions.defaultToSpeaker])
}
} catch {
FMLiveSwitchLog.error(withMessage: "Could not set audio session category for local media.")
}
}
})
connection?.open()
}
overridefuncstart(container: UIView) -> FMLiveSwitchFuture {
let promise = FMLiveSwitchPromise()
self._media = RemoteMedia(disableAudio: false, disableVideo: false, aecContext: nil)
self._layoutManager = FMLiveSwitchCocoaLayoutManager(container: container)
promise?.resolve(withResult: nil)
return promise!
}
overridefuncstop() -> FMLiveSwitchFuture {
let promise = FMLiveSwitchPromise()
if (self._layoutManager != nil) {
DispatchQueue.main.async {
self._layoutManager?.removeRemoteViews()
self._layoutManager = nil
}
}
promise?.resolve(withResult: nil)
return promise!
}
}
Paste the following code into the HelloWorld namespace in the Receiver.ts file.
exportclass Receiver extends Participant {
private remoteMedia: fm.liveswitch.RemoteMedia;
protected establishConnection(): void {
// Create remote media.this.remoteMedia = new fm.liveswitch.RemoteMedia();
const audioStream = new fm.liveswitch.AudioStream(this.remoteMedia);
const videoStream = new fm.liveswitch.VideoStream(this.remoteMedia);
// Add remote media to the layout.this.layoutManager.addRemoteMedia(this.remoteMedia);
// Create a SFU downstream connection with remote audio and video and the presentation ID.const connection: fm.liveswitch.SfuDownstreamConnection = this.channel.createSfuDownstreamConnection(this.presentationId, audioStream, videoStream);
connection.addOnStateChange(conn => {
// Remove the remote media from the layout if the remote is closed.if (conn.getRemoteClosed()) {
this.layoutManager.removeRemoteMedia(this.remoteMedia);
}
});
connection.open();
}
// Not needed because receiver only receives media from the broadcaster.public start(): fm.liveswitch.Future<Object> {
return fm.liveswitch.Promise.resolveNow();
}
// Not needed because receiver only receives media from the broadcaster.public stop(): fm.liveswitch.Future<Object> {
return fm.liveswitch.Promise.resolveNow();
}
}
Uncomment UI Components
Now go to the files for the UI components and uncomment the codes for broadcasting.