Streaming System.Drawing.Bitmap with PixelFormat.Format32bppArgb #1366
Replies: 4 comments 5 replies
-
It doesn't seem to be very consistent. Now I'm getting the right colors. |
Beta Was this translation helpful? Give feedback.
-
I'm experimenting with ffmpeg VP8 encoding and it has better quality. But I'm still having some reliability issues. |
Beta Was this translation helpful? Give feedback.
-
This is what I have now, and it seems to have good enough quality, throughput and performance: public unsafe class FfmpegVideoEncoder : IVideoEncoder
{
private readonly object _sync = new();
private readonly AVCodecID _codecId;
private AVCodecContext* _codecContext;
private AVFrame* _frame;
private AVPacket* _packet;
private SwsContext* _swsContext;
private bool _disposed;
private int _invocationCount;
public FfmpegVideoEncoder(AVCodecID codecId)
{
_codecId = codecId;
}
~FfmpegVideoEncoder()
{
Dispose(disposing: false);
}
public ArraySegment<byte> Encode(ArraySegment<byte> bgraData, int width, int height, int fps)
{
Interlocked.Increment(ref _invocationCount);
lock (_sync)
{
if (_codecContext == null || _codecContext->width != width || _codecContext->height != height)
{
Reset(width, height, fps);
}
int result;
try
{
// Make frame writable before each scaling operation
if ((result = ffmpeg.av_frame_make_writable(_frame)) < 0)
{
Reset(_codecContext->width, _codecContext->height, _codecContext->framerate.num);
return ArraySegment<byte>.Empty;
}
fixed (byte* pBgraData = bgraData.Array)
{
var srcData = new byte_ptrArray4 { [0] = pBgraData + bgraData.Offset };
var srcLinesize = new int_array4 { [0] = 4 * width };
if ((result = ffmpeg.sws_scale(
_swsContext,
srcData, srcLinesize, 0, _codecContext->height,
_frame->data, _frame->linesize)) < 0)
{
Reset(_codecContext->width, _codecContext->height, _codecContext->framerate.num);
return ArraySegment<byte>.Empty;
}
// TODO: _frame->pts = timestamp;
Debug.Assert(_frame->width == _codecContext->width && _frame->height == _codecContext->height);
Debug.Assert(_frame->format == (int)_codecContext->pix_fmt);
Debug.Assert(_frame != null && _frame->data[0] != null);
if ((result = ffmpeg.avcodec_send_frame(_codecContext, _frame)) < 0)
{
Reset(_codecContext->width, _codecContext->height, _codecContext->framerate.num);
return ArraySegment<byte>.Empty;
}
}
if ((result = ffmpeg.avcodec_receive_packet(_codecContext, _packet)) < 0)
{
Reset(_codecContext->width, _codecContext->height, _codecContext->framerate.num);
return ArraySegment<byte>.Empty;
}
var packetDataSize = _packet->size;
var packetData = ArrayPool<byte>.Shared.Rent(packetDataSize);
Marshal.Copy((IntPtr)_packet->data, packetData, 0, packetDataSize);
ffmpeg.av_packet_unref(_packet);
while ((result = ffmpeg.avcodec_receive_packet(_codecContext, _packet)) == 0)
{
ffmpeg.av_packet_unref(_packet);
}
return new(packetData, 0, packetDataSize);
}
finally
{
//ffmpeg.av_frame_unref(sourceFrame);
ffmpeg.avcodec_flush_buffers(_codecContext);
}
}
}
public void Dispose()
{
Dispose(disposing: true);
GC.SuppressFinalize(this);
}
private void Dispose(bool disposing)
{
if (!_disposed)
{
if (disposing)
{
// Dispose managed resources if any
}
// Free unmanaged resources
Release();
_disposed = true;
}
}
private void Reset(int width, int height, int fps)
{
Release();
var codec = ffmpeg.avcodec_find_encoder(_codecId);
if (codec == null)
{
throw new EncoderException("Codec not found.");
}
const AVPixelFormat pixelFormat = AVPixelFormat.AV_PIX_FMT_YUV420P;
if (codec->pix_fmts[0] != pixelFormat)
{
throw new EncoderException($"Invalid codec pixel format {codec->pix_fmts[0]}. Should be 'AV_PIX_FMT_YUV420P'.");
}
_codecContext = ffmpeg.avcodec_alloc_context3(codec);
if (_codecContext == null)
{
throw new EncoderException("Could not allocate codec context.");
}
_codecContext->pix_fmt = pixelFormat;
_codecContext->width = width;
_codecContext->height = height;
_codecContext->time_base.den = fps;
_codecContext->time_base.num = 1;
_codecContext->framerate.den = 1;
_codecContext->framerate.num = fps;
_codecContext->gop_size = fps < 5 ? 1 : fps;
_codecContext->thread_count = 1; // Single thread to avoid race conditions
_codecContext->bit_rate = 8_000_000; // Target bitrate
_codecContext->rc_min_rate = _codecContext->bit_rate;
_codecContext->rc_max_rate = _codecContext->bit_rate;
_codecContext->rc_buffer_size = 16_000_000;
_codecContext->qmin = 4;
_codecContext->qmax = 48;
_codecContext->max_b_frames = 0; // Disable B-frames for WebRTC to reduce latency
_codecContext->flags |= ffmpeg.AV_CODEC_FLAG_LOW_DELAY;
_codecContext->flags2 |= ffmpeg.AV_CODEC_FLAG2_FAST;
// provide tunings for known codecs
switch (Marshal.PtrToStringAnsi((nint)codec->name))
{
case "libx264":
ffmpeg.av_opt_set(_codecContext->priv_data, "profile", "baseline", 0).ThrowIfError();
ffmpeg.av_opt_set(_codecContext->priv_data, "tune", "zerolatency", 0);
break;
case "h264_qsv":
ffmpeg.av_opt_set(_codecContext->priv_data, "profile", "66" /* baseline */, 0).ThrowIfError();
ffmpeg.av_opt_set(_codecContext->priv_data, "preset", "7" /* veryfast */, 0).ThrowIfError();
break;
case "libvpx":
ffmpeg.av_opt_set(_codecContext->priv_data, "quality", "realtime", 0).ThrowIfError();
break;
default:
break;
}
if (ffmpeg.avcodec_open2(_codecContext, codec, null) < 0)
{
throw new EncoderException("Could not open codec.");
}
_frame = ffmpeg.av_frame_alloc();
if (_frame == null)
{
ffmpeg.avcodec_flush_buffers(_codecContext);
return;
}
// Let FFmpeg calculate the linesize
_frame->format = (int)_codecContext->pix_fmt;
_frame->width = _codecContext->width;
_frame->height = _codecContext->height;
// Don't set linesize manually, let av_frame_get_buffer handle it
if (ffmpeg.av_frame_get_buffer(_frame, 32) < 0)
{
throw new EncoderException("Could not allocate frame buffer.");
}
_packet = ffmpeg.av_packet_alloc();
if (_packet == null)
{
throw new EncoderException("Could not allocate packet.");
}
_swsContext = ffmpeg.sws_getContext(
width, height, AVPixelFormat.AV_PIX_FMT_BGRA,
width, height, AVPixelFormat.AV_PIX_FMT_YUV420P,
ffmpeg.SWS_BILINEAR, null, null, null);
}
private void Release()
{
if (_frame != null)
{
var tempFrame = _frame;
_frame = null;
ffmpeg.av_frame_unref(tempFrame);
ffmpeg.av_free(tempFrame);
}
if (_packet != null)
{
var tempPacket = _packet;
_packet = null;
ffmpeg.av_packet_unref(tempPacket);
ffmpeg.av_free(tempPacket);
}
if (_swsContext != null)
{
var tempSwsContext = _swsContext;
_swsContext = null;
ffmpeg.sws_freeContext(tempSwsContext);
}
if (_codecContext != null)
{
var tempCodecContext = _codecContext;
_codecContext = null;
ffmpeg.avcodec_free_context(&tempCodecContext);
}
Interlocked.Exchange(ref _invocationCount, 0);
}
} And used like this: using (var frame = _streamService.GetClonedFrame())
{
lock (_encoderLock)
{
if (_videoEncoder == null)
{
switch (_videoFormat.Codec)
{
case VideoCodecsEnum.VP8:
_videoEncoder = _videoEncoderFactory.CreateEncoder(_id, "VP8");
break;
default:
_videoCancellationTokenSource.Cancel();
throw new InvalidOperationException($"Invalid codec: {_videoFormat.Codec}");
}
}
}
var videoSample = _videoEncoder.Encode(frame.Bytes, frame.Width, frame.Height, fps);
if (videoSample.Count > 0)
{
try
{
var videoSampleBytes = videoSample.Array;
if (videoSample.Array!.Length != videoSample.Count)
{
videoSampleBytes = videoSample.ToArray();
}
_peerConnection.VideoStream.SendVp8Frame(duration, _videoFormat.FormatID, videoSampleBytes);
}
finally
{
ArrayPool<byte>.Shared.Return(videoSample.Array!);
}
}
} |
Beta Was this translation helpful? Give feedback.
-
I want to add a little bit if you use FFmpeg. There are already a few encoders that can directly accept RGB data:
and one planar GBRP:
for GBRP, you can just rearrange the RGB into planar,
|
Beta Was this translation helpful? Give feedback.
Uh oh!
There was an error while loading. Please reload this page.
-
Hi,
I'm trying to stream a
System.Drawing.Bitmap
and I copied most ofVideoTestPatternSource
:But I'm getting this:
instead of this:
What am I doing wrong?
Beta Was this translation helpful? Give feedback.
All reactions