AudioFrame
Raw audio data.
public class AudioFrame
{
public AudioFrame()
{
type = AUDIO_FRAME_TYPE.FRAME_TYPE_PCM16;
samplesPerChannel = 0;
bytesPerSample = BYTES_PER_SAMPLE.TWO_BYTES_PER_SAMPLE;
channels = 0;
samplesPerSec = 0;
RawBuffer = new byte[0];
renderTimeMs = 0;
avsync_type = 0;
}
public AudioFrame(AUDIO_FRAME_TYPE type, int samplesPerChannel, BYTES_PER_SAMPLE bytesPerSample, int channels, int samplesPerSec,
byte[] buffer, long renderTimeMs, int avsync_type)
{
this.type = type;
this.samplesPerChannel = samplesPerChannel;
this.bytesPerSample = bytesPerSample;
this.channels = channels;
this.samplesPerSec = samplesPerSec;
this.RawBuffer = buffer;
this.renderTimeMs = renderTimeMs;
this.avsync_type = avsync_type;
}
public AUDIO_FRAME_TYPE type { set; get; }
public int samplesPerChannel { set; get; }
public BYTES_PER_SAMPLE bytesPerSample { set; get; }
public int channels { set; get; }
public int samplesPerSec { set; get; }
public byte[] RawBuffer { set; get; }
public long renderTimeMs { set; get; }
public int avsync_type { set; get; }
}
Properties
- type
- Audio frame type. See AUDIO_FRAME_TYPE.
- samplesPerChannel
- Number of samples per channel.
- bytesPerSample
- Number of bytes per sample. For PCM, typically 16 bits, i.e., two bytes.
- channels
- Number of channels (for stereo, data is interleaved).
- 1: Mono
- 2: Stereo
- samplesPerSec
- Number of samples per second per channel.
- RawBuffer
- Audio data buffer (for stereo, data is interleaved).
Buffer size
buffer=samples×channels×bytesPerSample. - renderTimeMs
- Render timestamp of the external audio frame. You can use this timestamp to restore the order of audio frames. In scenarios with video (including those using external video sources), this parameter can be used to achieve audio-video synchronization.
- avsync_type
- Reserved parameter.