VideoFrame

Configurations of the video frame.

public class VideoFrame implements RefCounted {

  public interface Buffer extends RefCounted {

    @CalledByNative("Buffer") int getWidth();

    @CalledByNative("Buffer") int getHeight();

    @CalledByNative("Buffer") I420Buffer toI420();

    @Override @CalledByNative("Buffer") void release();

    @Override @CalledByNative("Buffer") void retain();

    @CalledByNative("Buffer")
    Buffer cropAndScale(
        int cropX, int cropY, int cropWidth, int cropHeight, int scaleWidth, int scaleHeight);

    @CalledByNative("Buffer") @Nullable Buffer mirror(int frameRotation);

    @CalledByNative("Buffer") @Nullable Buffer rotate(int frameRotation);

    @CalledByNative("Buffer")
    @Nullable
    Buffer transform(int cropX, int cropY, int cropWidth, int cropHeight, int scaleWidth,
        int scaleHeight, int frameRotation);
  }

  public interface I420Buffer extends Buffer {

    @CalledByNative("I420Buffer") ByteBuffer getDataY();

    @CalledByNative("I420Buffer") ByteBuffer getDataU();

    @CalledByNative("I420Buffer") ByteBuffer getDataV();
    @CalledByNative("I420Buffer") int getStrideY();
    @CalledByNative("I420Buffer") int getStrideU();
    @CalledByNative("I420Buffer") int getStrideV();
  }

  public interface I422Buffer extends Buffer {
    @CalledByNative("I422Buffer") ByteBuffer getDataY();
    @CalledByNative("I422Buffer") ByteBuffer getDataU();
    @CalledByNative("I422Buffer") ByteBuffer getDataV();
    @CalledByNative("I422Buffer") int getStrideY();
    @CalledByNative("I422Buffer") int getStrideU();
    @CalledByNative("I422Buffer") int getStrideV();
  }
  public interface RgbaBuffer extends Buffer { @CalledByNative("RgbaBuffer") ByteBuffer getData(); }

  public interface TextureBuffer extends Buffer {

    enum Type {

      OES(GLES11Ext.GL_TEXTURE_EXTERNAL_OES),

      RGB(GLES20.GL_TEXTURE_2D);
      private final int glTarget;
      private Type(final int glTarget) {
        this.glTarget = glTarget;
      }
      public int getGlTarget() {
        return glTarget;
      }
    }
    enum ContextType {
      EGL_CONTEXT_10,
      EGL_CONTEXT_14;
    }
    Type getType();

    @CalledByNative("TextureBuffer") int getTextureId();

    Matrix getTransformMatrix();

    @CalledByNative("TextureBuffer") EglBase.Context getEglBaseContext();
    @CalledByNative("TextureBuffer") Object getSourceTexturePool();
    @CalledByNative("TextureBuffer") long getNativeEglContext();
    @CalledByNative("TextureBuffer") int getEglContextType();
    @CalledByNative("TextureBuffer") float[] getTransformMatrixArray();

    @CalledByNative("TextureBuffer") int getSequence();
    @CalledByNative("TextureBuffer") long getFenceObject();
    @CalledByNative("TextureBuffer") boolean is10BitTexture();
  }
  public interface ColorSpace {
    enum Range {
      Invalid(0),
      Limited(1),
      Full(2);
      private final int range;
      private Range(int range) {
        this.range = range;
      }
      public int getRange() {
        return range;
      };
    }
    enum Matrix {
      RGB(0),
      BT709(1),
      Unspecified(2),
      FCC(4),
      BT470BG(5),
      SMPTE170M(6),
      SMPTE240M(7),
      YCOCG(8),
      BT2020_NCL(9),
      BT2020_CL(10),
      SMPTE2085(11),
      CDNCLS(12),
      CDCLS(13),
      BT2100_ICTCP(14);
      private final int matrix;
      private Matrix(int matrix) {
        this.matrix = matrix;
      }
      public int getMatrix() {
        return matrix;
      };
    }
    enum Transfer {
      BT709(1),
      Unspecified(2),
      GAMMA22(4),
      GAMMA28(5),
      SMPTE170M(6),
      SMPTE240M(7),
      LINEAR(8),
      LOG(9),
      LOG_SQRT(10),
      IEC61966_2_4(11),
      BT1361_ECG(12),
      IEC61966_2_1(13),
      BT2020_10(14),
      BT2020_12(15),
      SMPTEST2084(16),
      SMPTEST428(17),
      ARIB_STD_B67(18);
      private final int transfer;
      private Transfer(int transfer) {
        this.transfer = transfer;
      }
      public int getTransfer() {
        return transfer;
      }
    }
    enum Primary {
      BT709(1),
      Unspecified(2),
      BT470M(4),
      BT470BG(5),
      kSMPTE170M(6),
      kSMPTE240M(7),
      kFILM(8),
      kBT2020(9),
      kSMPTEST428(10),
      kSMPTEST431(11),
      kSMPTEST432(12),
      kJEDECP22(22);
      private final int primary;
      private Primary(int primary) {
        this.primary = primary;
      }
      public int getPrimary() {
        return primary;
      }
    }
    Range getRange();
    Matrix getMatrix();
    Transfer getTransfer();
    Primary getPrimary();
  }
  public enum SourceType {
    kFrontCamera,
    kBackCamera,
    kUnspecified,
  }
    public enum AlphaStitchMode {
    ALPHA_NO_STITCH(0),
    ALPHA_STITCH_UP(1),
    ALPHA_STITCH_BELOW(2),
    ALPHA_STITCH_LEFT(3),
    ALPHA_STITCH_RIGHT(4);
    private final int stitchMode;
    private AlphaStitchMode(int stitchMode) {
      this.stitchMode = stitchMode;
    }
    public int value() {
      return stitchMode;
    }
  }

  private Buffer buffer;

  private int rotation;

  private long timestampNs;
  private ColorSpace colorSpace;
  private SourceType sourceType;
  private float sampleAspectRatio;

  private AlphaStitchMode alphaStitchMode = AlphaStitchMode.ALPHA_NO_STITCH;
  private VideoFrameMetaInfo metaInfo = new VideoFrameMetaInfo();

  private @Nullable ByteBuffer alphaBuffer;
  private long nativeAlphaBuffer;

  public VideoFrame(Buffer buffer, int rotation, long timestampNs) {
    this(buffer, rotation, timestampNs, new WrappedNativeColorSpace(), null, 0L, 1.0f,
        SourceType.kUnspecified.ordinal());
  }
  @CalledByNative
  public VideoFrame(Buffer buffer, int rotation, long timestampNs, ColorSpace colorSpace,
      ByteBuffer alphaBuffer, long nativeAlphaBuffer, float sampleAspectRatio, int sourceType) {
    if (buffer == null) {
      throw new IllegalArgumentException("buffer not allowed to be null");
    }
    if (rotation % 90 != 0) {
      throw new IllegalArgumentException("rotation must be a multiple of 90");
    }
    this.buffer = buffer;
    this.rotation = rotation;
    this.timestampNs = timestampNs;
    this.colorSpace = colorSpace;
    this.alphaBuffer = alphaBuffer;
    this.nativeAlphaBuffer = nativeAlphaBuffer;
    this.sampleAspectRatio = sampleAspectRatio;
    this.sourceType = SourceType.values()[sourceType];
  }
  @CalledByNative
  public SourceType getSourceType() {
    return sourceType;
  }
  public float getSampleAspectRatio() {
    return sampleAspectRatio;
  }

  @CalledByNative
  public Buffer getBuffer() {
    return buffer;
  }

  @CalledByNative
  public int getRotation() {
    return rotation;
  }
  @CalledByNative
  public int getAlphaStitchMode() {
    return alphaStitchMode.value();
  }
  @CalledByNative
  public void setAlphaStitchMode(int stitchMode) {
    alphaStitchMode = AlphaStitchMode.values()[stitchMode];
  }

  @CalledByNative
  public long getTimestampNs() {
    return timestampNs;
  }
  @CalledByNative
  public VideoFrameMetaInfo getMetaInfo() {
    return metaInfo;
  }

  public int getRotatedWidth() {
    if (rotation % 180 == 0) {
      return (alphaStitchMode == AlphaStitchMode.ALPHA_STITCH_LEFT
                 || alphaStitchMode == AlphaStitchMode.ALPHA_STITCH_RIGHT)
          ? buffer.getWidth() / 2
          : buffer.getWidth();
    }
    return (alphaStitchMode == AlphaStitchMode.ALPHA_STITCH_UP
               || alphaStitchMode == AlphaStitchMode.ALPHA_STITCH_BELOW)
        ? buffer.getHeight() / 2
        : buffer.getHeight();
  }

  public int getRotatedHeight() {
    if (rotation % 180 == 0) {
      return (alphaStitchMode == AlphaStitchMode.ALPHA_STITCH_UP
                 || alphaStitchMode == AlphaStitchMode.ALPHA_STITCH_BELOW)
          ? buffer.getHeight() / 2
          : buffer.getHeight();
    }
    return (alphaStitchMode == AlphaStitchMode.ALPHA_STITCH_LEFT
               || alphaStitchMode == AlphaStitchMode.ALPHA_STITCH_RIGHT)
        ? buffer.getWidth() / 2
        : buffer.getWidth();
  }

  public void replaceBuffer(Buffer buffer, int rotation, long timestampNs) {
    release();
    this.buffer = buffer;
    this.rotation = rotation;
    this.timestampNs = timestampNs;
  }
  @CalledByNative
  public ColorSpace getColorSpace() {
    return colorSpace;
  }
  public void setColorSpace(ColorSpace colorSpace) {
    this.colorSpace = colorSpace;
  }
  @CalledByNative
  private int getColorSpaceRange() {
    if (colorSpace == null) {
      return ColorSpace.Range.Invalid.getRange();
    }
    return colorSpace.getRange().getRange();
  }
  @CalledByNative
  private int getColorSpaceMatrix() {
    if (colorSpace == null) {
      return ColorSpace.Matrix.Unspecified.getMatrix();
    }
    return colorSpace.getMatrix().getMatrix();
  }
  @CalledByNative
  private int getColorSpaceTransfer() {
    if (colorSpace == null) {
      return ColorSpace.Transfer.Unspecified.getTransfer();
    }
    return colorSpace.getTransfer().getTransfer();
  }
  @CalledByNative
  private int getColorSpacePrimary() {
    if (colorSpace == null) {
      return ColorSpace.Primary.Unspecified.getPrimary();
    }
    return colorSpace.getPrimary().getPrimary();
  }
  @CalledByNative
  public ByteBuffer getAlphaBuffer() {
    return alphaBuffer;
  }
  public void retainAlphaBuffer() {
    JniCommon.nativeAddRef(nativeAlphaBuffer);
  }
  public void releaseAlphaBuffer() {
    JniCommon.nativeReleaseRef(nativeAlphaBuffer);
  }
  public void fillAlphaData(ByteBuffer buffer) {
    alphaBuffer = buffer;
  }

  @Override
  public void retain() {
    buffer.retain();
  }

  @Override
  @CalledByNative
  public void release() {
    buffer.release();
  }
}

Note that the buffer provides a pointer to a pointer. This interface cannot modify the pointer of the buffer, but it can modify the content of the buffer.

Attributes

buffer
Attention: This parameter cannot be empty; otherwise, an error can occur.
Buffer data. The methods associated with this parameter are as follows:
  • getRotatedWidth: Gets the width of the rotated video frame.
  • getRotatedHeight: Gets the height of the rotated video frame.
  • replaceBuffer: Replaces the data in the buffer with the new video frames.
  • retain: Increments the reference count of the buffer by 1.
  • release: Decrements the reference count of the buffer by 1. When the count reaches 0, the buffer's resources are released.
rotation
The clockwise rotation of the video frame before rendering. Supported values include 0, 90, 180, and 270 degrees.
timestampNs
The timestamp (ns) of a video frame.
colorSpace
By default, the color space properties of video frames will apply the Full Range and BT.709 standard configurations. You can configure the settings according your needs for custom video capturing and rendering. See VideoColorSpace. The methods associated with this parameter are as follows:
  • getColorSpace: Get the color space attribute of the video frame.
  • setColorSpace:Set the color space attribute of the video frame.
sourceType
When using the SDK to capture video, this indicates the type of the video source.
  • kFrontCamera: The front camera.
  • kBackCamera: The rear camera.
  • kUnspecified: (Default) The video source type is unknown.
sampleAspectRatio
The aspect ratio of a single pixel, which is the ratio of the width to the height of each pixel.
alphaBuffer

The alpha channel data output by using portrait segmentation algorithm. This data matches the size of the video frame, with each pixel value ranging from [0,255], where 0 represents the background and 255 represents the foreground (portrait).

By setting this parameter, you can render the video background into various effects, such as transparent, solid color, image, video, etc.

Attention:
  • In custom video rendering scenarios, ensure that both the video frame and alphaBuffer are of the Full Range type; other types may cause abnormal alpha data rendering.
  • Make sure that alphaBuffer is exactly the same size as the video frame (width × height), otherwise it may cause the app to crash.
alphaStitchMode
When the video frame contains alpha channel data, it represents the relative position of alphaBuffer and the video frame.
  • ALPHA_NO_STITCH0: (Default) Only video frame, that is, alphaBuffer is not stitched with the video frame.
  • ALPHA_STITCH_UP(1): alphaBuffer is above the video frame.
  • ALPHA_STITCH_BELOW(2): alphaBuffer is below the video frame.
  • ALPHA_STITCH_LEFT(3): alphaBuffer is to the left of the video frame.
  • ALPHA_STITCH_RIGHT(4): alphaBuffer is to the right of the video frame.
metaInfo

The meta information in the video frame. To use this parameter, contact technical support.