Search Results for

    Show / Hide Table of Contents
    Note

    About the code examples on this page:

    • For .NET MAUI and Unity, use the C# code.
    • For macOS, use the iOS code.

    Insertable Streams

    In WebRTC, "insertable streams" refers to the ability to manipulate raw audio or video content and introduce new components to streams.

    Some use cases for insertable streams include:

    • "Funny hats" or other video conferencing gadgets
    • Machine learning
    • Virtual reality gaming
    • Voice processing
    • Background removal

    LiveSwitch can handle these same use cases. You can define processing steps in the encoding and decoding of a media track. Take an existing track, add an effect to its frames, and then create a new track with the processed frames.

    Supported Platforms

    LiveSwitch supports manipulating streams for native platforms only.

    How to Manipulate Streams in LiveSwitch

    Every media source has an OnRaiseFrame event. Intercept the OnRaiseFrame event and use it to modify the media source.

    The following code samples show how to add a logo to a video stream in LiveSwitch:

    • C#
    • Android
    • iOS
    System.Drawing.Color[,] logoPixels = ReadLogo("logo.png");
    int logoOffset = 10;
    
    _LocalMedia.VideoSource.OnRaiseFrame += (frame) =>
    {
        var frameBuffer = frame.Buffer;
        for (int y = 0; y < logoPixels.GetLength(0); y++)
        {
            for (int x = 0; x < logoPixels.GetLength(1); x++)
            {
                // Skip transparent pixels.
                if (logoPixels[y, x].A != 0)
                {
                    int index = ((logoOffset + y) * frame.Buffer.Width) + (logoOffset + x);
                    frameBuffer.SetRValue(logoPixels[y, x].R, index);
                    frameBuffer.SetGValue(logoPixels[y, x].G, index);
                    frameBuffer.SetBValue(logoPixels[y, x].B, index);
                }
            }
        }
    };
    
    // Helper function for reading the logo's pixels.
    private System.Drawing.Color[,] ReadLogo(string fileName)
    {
        Bitmap image = new Bitmap(fileName);
        System.Drawing.Color[,] pixels = new System.Drawing.Color[image.Height, image.Width];
    
        for (int y = 0; y < image.Height; y++)
        {
            for (int x = 0; x < image.Width; x++)
            {
                pixels[y, x] = image.GetPixel(x, y);
            }
        }
        return pixels;
    }
    
    int[][][] logoYuvData = readLogo("logo");
    int lumaPlaneLogoOffset = 10;
    int chromaPlaneLogoOffset = lumaPlaneLogoOffset / 2;
    
    localMedia.getVideoSource().addOnRaiseFrame((frame) -> {
        VideoBuffer frameBuffer = frame.getBuffer();
    
        int lumaPlaneWidth = frameBuffer.getWidth();
        int chromaPlaneWidth = lumaPlaneWidth / 2;
    
        for (int y = 0; y < logoYuvData.length; y++) {
            for (int x = 0; x < logoYuvData[0].length; x++) {
                if (logoYuvData[y][x] != null) {
                    int lumaIndex = ((lumaPlaneLogoOffset + y) * lumaPlaneWidth) + (lumaPlaneLogoOffset + x);
                    frameBuffer.setYValue(logoYuvData[y][x][0], lumaIndex);
    
                    if (y % 2 == 0 && x % 2 == 0) {
                        int chromaIndex = ((chromaPlaneLogoOffset + y / 2) * chromaPlaneWidth) + (chromaPlaneLogoOffset + x / 2);
                        frameBuffer.setUValue(logoYuvData[y][x][1], chromaIndex);
                        frameBuffer.setVValue(logoYuvData[y][x][2], chromaIndex);
                    }
                }
            }
        }
    });
    
    // Helper function for reading the logo's pixels.
    private int[][][] readLogo(String drawableName) {
        Resources resources = context.getResources();
    
        // Disable bitmap scaling.
        BitmapFactory.Options options = new BitmapFactory.Options();
        options.inScaled = false;
    
        Bitmap image = BitmapFactory.decodeResource(resources, resources.getIdentifier(drawableName, "drawable", context.getPackageName()), options);
        int[][][] yuvData = new int[image.getHeight()][image.getWidth()][];
    
        for (int y = 0; y < image.getHeight(); y++) {
            for (int x = 0; x < image.getWidth(); x++) {
                int pixel = image.getPixel(x, y);
    
                // Skip transparent pixels.
                if (Color.alpha(pixel) == 0) {
                    yuvData[y][x] = null;
                } else {
                    yuvData[y][x] = rgbToYcbcr(Color.red(pixel), Color.green(pixel), Color.blue(pixel));
                }
            }
        }
        return yuvData;
    }
    
    // Helper function for converting RGB to YCbCr.
    private int[] rgbToYcbcr(int r, int g, int b) {
        int y = (int)(0.299 * r + 0.587 * g + 0.114 * b);
        int cb = (int)(-0.168736 * r - 0.331264 * g + 0.5 * b) + 128;
        int cr = (int)(0.5 * r - 0.418688 * g - 0.081312 * b) + 128;
        return new int[]{y, cb, cr};
    }
    
    let logoYuvData = readLogo(resourceName: "logo.png")
    let lumaPlaneLogoOffset = 10
    let chromaPlaneLogoOffset = lumaPlaneLogoOffset / 2
    
    self._localMedia!.videoSource().addOnRaiseFrame({ (frame: Any!) in
        let frame = frame as! FMLiveSwitchVideoFrame
        let buffer = frame.buffer() as! FMLiveSwitchVideoBuffer
        
        let lumaPlaneWidth = Int(buffer.width())
        let chromaPlaneWidth = lumaPlaneWidth / 2
        
        for y in 0 ..< logoYuvData.count {
            for x in 0 ..< logoYuvData[0].count {
                if let pixelData = logoYuvData[y][x] {
                    let lumaIndex = Int32(((lumaPlaneLogoOffset + y) * lumaPlaneWidth) + (lumaPlaneLogoOffset + x))
                    buffer.setYValue(pixelData[0], index: lumaIndex)
                    
                    if (y % 2 == 0 && x % 2 == 0) {
                        let chromaIndex = Int32(((chromaPlaneLogoOffset + y / 2) * chromaPlaneWidth) + (chromaPlaneLogoOffset + x / 2))
                        buffer.setUValue(pixelData[1], index: chromaIndex)
                        buffer.setVValue(pixelData[2], index: chromaIndex)
                    }
                }
            }
        }
    })
    
    // Helper function for reading the logo's pixels.
    func readLogo(resourceName: String) -> [[[Int32]?]] {
        let image = UIImage(imageLiteralResourceName: resourceName)
        let cgImage = image.cgImage!
        let data = cgImage.dataProvider!.data
        let rawBytes = CFDataGetBytePtr(data)!
        var yuvData = [[[Int32]?]](repeating: [[Int32]?](repeating: [Int32](), count: cgImage.width), count: cgImage.height)
        
        let bytesPerPixel = cgImage.bitsPerPixel / cgImage.bitsPerComponent
        
        for y in 0 ..< cgImage.height {
            for x in 0 ..< cgImage.width {
                let byteOffset = (y * cgImage.bytesPerRow) + (x * bytesPerPixel)
    
                // Skip transparent pixels.
                if (rawBytes[byteOffset + 3] == 0) {
                    yuvData[y][x] = nil
                } else {
                    // Reverse alpha premultiplication
                    let alpha = Double(rawBytes[byteOffset + 3])
                    let originalR = Double(rawBytes[byteOffset]) * (255.0 / alpha)
                    let originalG = Double(rawBytes[byteOffset + 1]) * (255.0 / alpha)
                    let originalB = Double(rawBytes[byteOffset + 2]) * (255.0 / alpha)
                                        
                    yuvData[y][x] = rgbToYcbcr(r: originalR, g: originalG, b: originalB)
                }
            }
        }
        return yuvData
    }
    
    // Helper function for converting RGB to YCbCr.
    func rgbToYcbcr(r: Double, g: Double, b: Double) -> [Int32] {
        let y = Int32(0.299 * r + 0.587 * g + 0.114 * b)
        let cb = Int32(-0.168736 * r - 0.331264 * g + 0.5 * b) + 128
        let cr = Int32(0.5 * r - 0.418688 * g - 0.081312 * b) + 128
        return [y, cb, cr]
    }
    
    In This Article
    Back to top Copyright © LiveSwitch Inc. All Rights Reserved.Documentation for LiveSwitch Version 1.24.6