From 3bca187fcef5d8c2dedfeb6afe75b0036856f3ee Mon Sep 17 00:00:00 2001 From: JoJoBond Date: Sat, 22 Jan 2022 15:12:26 +0100 Subject: [PATCH] Video.DirectShow: Added property to select MediaSubTypes. Added raw frame event. --- Sources/Video.DirectShow/Internals/Uuids.cs | 55 +++- .../Video.DirectShow/VideoCaptureDevice.cs | 264 +++++++++++++++--- Sources/Video/FrameDataFormat.cs | 147 ++++++++++ Sources/Video/Video.csproj | 1 + Sources/Video/VideoEvents.cs | 74 +++++ 5 files changed, 494 insertions(+), 47 deletions(-) create mode 100644 Sources/Video/FrameDataFormat.cs diff --git a/Sources/Video.DirectShow/Internals/Uuids.cs b/Sources/Video.DirectShow/Internals/Uuids.cs index f60aad11..6c1eac90 100644 --- a/Sources/Video.DirectShow/Internals/Uuids.cs +++ b/Sources/Video.DirectShow/Internals/Uuids.cs @@ -150,7 +150,7 @@ static internal class MediaType static internal class MediaSubType { /// - /// YUY2 (packed 4:2:2). + /// YUYV (packed 4:2:2). /// /// /// Equals to MEDIASUBTYPE_YUYV. @@ -158,6 +158,24 @@ static internal class MediaSubType public static readonly Guid YUYV = new Guid( 0x56595559, 0x0000, 0x0010, 0x80, 0x00, 0x00, 0xAA, 0x00, 0x38, 0x9B, 0x71 ); + /// + /// UYVY (packed 4:2:2). + /// + /// + /// Equals to MEDIASUBTYPE_UYVY. + /// + public static readonly Guid UYVY = + new Guid(0x59565955, 0x0000, 0x0010, 0x80, 0x00, 0x00, 0xAA, 0x00, 0x38, 0x9B, 0x71); + + /// + /// YUY2 (packed 4:2:2). + /// + /// + /// Equals to MEDIASUBTYPE_YUY2. + /// + public static readonly Guid YUY2 = + new Guid(0x32595559, 0x0000, 0x0010, 0x80, 0x00, 0x00, 0xAA, 0x00, 0x38, 0x9B, 0x71); + /// /// IYUV. /// @@ -265,6 +283,41 @@ static internal class MediaSubType /// public static readonly Guid MJpeg = new Guid( 0x47504A4D, 0x0000, 0x0010, 0x80, 0x00, 0x00, 0xaa, 0x00, 0x38, 0x9b, 0x71 ); + + public static Guid FromImageDataFormat(FrameDataFormat dataFormat) + { + switch (dataFormat) + { + case FrameDataFormat.RGB1: + return RGB1; + case FrameDataFormat.RGB4: + return RGB4; + case FrameDataFormat.RGB8: + return RGB8; + case FrameDataFormat.RGB555: + return RGB555; + case FrameDataFormat.RGB565: + return RGB565; + case FrameDataFormat.RGB24: + return RGB24; + case FrameDataFormat.RGB32: + return RGB32; + case FrameDataFormat.YUYV: + return YUYV; + case FrameDataFormat.UYVY: + return UYVY; + case FrameDataFormat.YUY2: + return YUY2; + case FrameDataFormat.IYUV: + return IYUV; + case FrameDataFormat.DVSD: + return DVSD; + case FrameDataFormat.MJPG: + return MJpeg; + default: + return Guid.Empty; + } + } } /// diff --git a/Sources/Video.DirectShow/VideoCaptureDevice.cs b/Sources/Video.DirectShow/VideoCaptureDevice.cs index fe20f26c..63989d64 100644 --- a/Sources/Video.DirectShow/VideoCaptureDevice.cs +++ b/Sources/Video.DirectShow/VideoCaptureDevice.cs @@ -66,13 +66,18 @@ public class VideoCaptureDevice : IVideoSource private VideoCapabilities videoResolution = null; private VideoCapabilities snapshotResolution = null; + // video ans snapshot formats to set + private FrameDataFormat videoDataFormat = FrameDataFormat.Unknown; + private FrameDataFormat snapshotDataFormat = FrameDataFormat.Unknown; + + private FrameDataFormat selectedVideoDataFormat = FrameDataFormat.Unknown; + private FrameDataFormat selectedSnapshotDataFormat = FrameDataFormat.Unknown; + // provide snapshots or not private bool provideSnapshots = false; // JPEG encoding preference private bool preferJpegEncoding = true; - // check if JPEG encoding is enabled - private bool jpegEncodingEnabled = false; private Thread thread = null; private ManualResetEvent stopEvent = null; @@ -263,6 +268,19 @@ public bool JpegEncodingEnabled /// public event NewFrameEventHandler NewFrame; + /// + /// New raw frame event. + /// + /// + /// Notifies clients about new available raw frame data from video source. + /// + /// Since video source may have multiple clients, each client is responsible for + /// making a copy (cloning) of the passed raw frame data, because the video source disposes its + /// own original copy after notifying of clients. + /// + /// + public event NewRawFrameEventHandler NewRawFrame; + /// /// Snapshot frame event. /// @@ -281,6 +299,24 @@ public bool JpegEncodingEnabled /// public event NewFrameEventHandler SnapshotFrame; + /// + /// Snapshot raw frame event. + /// + /// + /// Notifies clients about new available snapshot raw frame data - the one which comes when + /// camera's snapshot/shutter button is pressed. + /// + /// See documentation to for additional information. + /// + /// Since video source may have multiple clients, each client is responsible for + /// making a copy (cloning) of the passed raw snapshot frame data, because the video source disposes its + /// own original copy after notifying of clients. + /// + /// + /// + /// + public event NewRawFrameEventHandler SnapshotRawFrame; + /// /// Video source error event. /// @@ -457,6 +493,39 @@ public VideoCapabilities SnapshotResolution set { snapshotResolution = value; } } + /// + /// Video data format to set. + /// + /// + /// The property allows to set one of the video data formats supported by the camera. + /// The property must be set before camera is started to make any effect. + /// + /// Default value of the property is set to , which means RGB24 or + /// Mjpeg is used. + /// + /// + public FrameDataFormat VideoDataFormat + { + get { return videoDataFormat; } + set { videoDataFormat = value; } + } + + /// + /// Snapshot data format to set. + /// + /// + /// The property allows to set one of the snapshot data formats supported by the camera. + /// The property must be set before camera is started to make any effect. + /// + /// Default value of the property is set to , which means RGB24 used. + /// + /// + public FrameDataFormat SnapshotDataFormat + { + get { return snapshotDataFormat; } + set { snapshotDataFormat = value; } + } + /// /// Video capabilities of the device. /// @@ -1115,20 +1184,45 @@ private void WorkerThread( bool runGraph ) graph.AddFilter( videoGrabberBase, "grabber_video" ); graph.AddFilter( snapshotGrabberBase, "grabber_snapshot" ); - // check if we need and can do JPEG encoding - if ( preferJpegEncoding ) - { - jpegEncodingEnabled = IsJpegEncodingAvailable( sourceBase ); - } - // set media types AMMediaType videoMediaType = new AMMediaType( ); videoMediaType.MajorType = MediaType.Video; - videoMediaType.SubType = ( jpegEncodingEnabled ) ? MediaSubType.MJpeg : MediaSubType.RGB24; + + Guid videoSubType = MediaSubType.FromImageDataFormat(videoDataFormat); + if ( videoSubType != Guid.Empty ) + { + selectedVideoDataFormat = videoDataFormat; + videoMediaType.SubType = videoSubType; + } + else + { + // check if we need and can do JPEG encoding + if ( IsJpegEncodingAvailable( sourceBase ) ) + { + selectedVideoDataFormat = FrameDataFormat.MJPG; + videoMediaType.SubType = MediaSubType.MJpeg; + } + else + { + selectedVideoDataFormat = FrameDataFormat.RGB24; + videoMediaType.SubType = MediaSubType.RGB24; + } + } AMMediaType snapshotMediaType = new AMMediaType( ); snapshotMediaType.MajorType = MediaType.Video; - snapshotMediaType.SubType = MediaSubType.RGB24; + + Guid snapshotSubType = MediaSubType.FromImageDataFormat(videoDataFormat); + if ( snapshotSubType != Guid.Empty ) + { + selectedVideoDataFormat = videoDataFormat; + snapshotMediaType.SubType = snapshotSubType; + } + else + { + selectedVideoDataFormat = FrameDataFormat.RGB24; + snapshotMediaType.SubType = MediaSubType.RGB24; + } videoSampleGrabber.SetMediaType( videoMediaType ); snapshotSampleGrabber.SetMediaType( snapshotMediaType ); @@ -1372,7 +1466,8 @@ private void WorkerThread( bool runGraph ) PlayingFinished( this, reasonToStop ); } - jpegEncodingEnabled = false; + selectedSnapshotDataFormat = FrameDataFormat.Unknown; + selectedVideoDataFormat = FrameDataFormat.Unknown; } // Check if the filter can provide JPEG encoded images @@ -1698,13 +1793,25 @@ private void SetCurrentCrossbarInput( IAMCrossbar crossbar, VideoInput videoInpu /// private void OnNewFrame( Bitmap image ) { - framesReceived++; - bytesReceived += image.Width * image.Height * ( Bitmap.GetPixelFormatSize( image.PixelFormat ) >> 3 ); - if ( ( !stopEvent.WaitOne( 0, false ) ) && ( NewFrame != null ) ) NewFrame( this, new NewFrameEventArgs( image ) ); } + /// + /// Notifies clients about new raw frame data. + /// + /// + /// New raw frame's data. + /// New raw frame's data format. + /// New raw frame's width. + /// New raw frame's height. + /// + private void OnNewRawFrame( byte[] data, FrameDataFormat format, int width, int height ) + { + if ( (!stopEvent.WaitOne( 0, false ) ) && ( NewRawFrame != null ) ) + NewRawFrame( this, new NewRawFrameEventArgs( data, format, width, height ) ); + } + /// /// Notifies clients about new snapshot frame. /// @@ -1724,6 +1831,28 @@ private void OnSnapshotFrame( Bitmap image ) } } + /// + /// Notifies clients about new snapshot raw frame. + /// + /// + /// New raw snapshot's data. + /// New raw snapshot's data format. + /// New raw snapshot's width. + /// New raw snapshot's height. + /// + private void OnSnapshotRawFrame( byte[] data, FrameDataFormat format, int width, int height ) + { + TimeSpan timeSinceStarted = DateTime.Now - startTime; + + // TODO: need to find better way to ignore the first snapshot, which is sent + // automatically (or better disable it) + if ( timeSinceStarted.TotalSeconds >= 4 ) + { + if ( (!stopEvent.WaitOne( 0, false ) ) && ( SnapshotRawFrame != null ) ) + SnapshotRawFrame(this, new NewRawFrameEventArgs( data, format, width, height )); + } + } + // // Video grabber // @@ -1762,53 +1891,77 @@ public int SampleCB( double sampleTime, IntPtr sample ) // Callback method that receives a pointer to the sample buffer public int BufferCB( double sampleTime, IntPtr buffer, int bufferLen ) { - if ( parent.NewFrame != null ) + if (!snapshotMode) { - System.Drawing.Bitmap image = null; - - if ( !parent.jpegEncodingEnabled ) - { - // create new image - image = new Bitmap( width, height, PixelFormat.Format24bppRgb ); - - // lock bitmap data - BitmapData imageData = image.LockBits( - new Rectangle( 0, 0, width, height ), - ImageLockMode.ReadWrite, - PixelFormat.Format24bppRgb ); + parent.framesReceived++; + parent.bytesReceived += bufferLen; + } - // copy image data - int srcStride = imageData.Stride; - int dstStride = imageData.Stride; + if ( (snapshotMode && parent.SnapshotFrame != null) || (!snapshotMode && parent.NewFrame != null) ) + { + Bitmap image = null; + FrameDataFormat dataFormat = snapshotMode ? parent.selectedSnapshotDataFormat : parent.selectedVideoDataFormat; + if ( dataFormat == FrameDataFormat.MJPG ) + { unsafe { - byte* dst = (byte*) imageData.Scan0.ToPointer( ) + dstStride * ( height - 1 ); - byte* src = (byte*) buffer.ToPointer( ); - - for ( int y = 0; y < height; y++ ) - { - Win32.memcpy( dst, src, srcStride ); - dst -= dstStride; - src += srcStride; - } + image = (Bitmap)Image.FromStream(new UnmanagedMemoryStream((byte*)buffer.ToPointer(), bufferLen)); } - - // unlock bitmap data - image.UnlockBits( imageData ); } else { - unsafe + PixelFormat pixelFormat; + if ( dataFormat == FrameDataFormat.Unknown ) + pixelFormat = PixelFormat.Format24bppRgb; + else + pixelFormat = FrameDataFormatUtils.ToPixelFormat( dataFormat ); + + if ( pixelFormat != PixelFormat.Undefined ) { - image = (Bitmap)Bitmap.FromStream( new UnmanagedMemoryStream( (byte*)buffer.ToPointer( ), bufferLen ) ); + BitmapData imageData = null; + + try + { + // create new image + image = new Bitmap(width, height, pixelFormat); + + // lock bitmap data + imageData = image.LockBits( + new Rectangle(0, 0, width, height), + ImageLockMode.ReadWrite, + pixelFormat); + + // copy image data + int srcStride = imageData.Stride; + int dstStride = imageData.Stride; + + unsafe + { + byte* dst = (byte*)imageData.Scan0.ToPointer() + dstStride * (height - 1); + byte* src = (byte*)buffer.ToPointer(); + + for (int y = 0; y < height; y++) + { + Win32.memcpy(dst, src, srcStride); + dst -= dstStride; + src += srcStride; + } + } + } + finally + { + // unlock bitmap data + if ( imageData != null ) + image.UnlockBits(imageData); + } } } if ( image != null ) { // notify parent - if (snapshotMode) + if ( snapshotMode ) { parent.OnSnapshotFrame( image ); } @@ -1818,7 +1971,26 @@ public int BufferCB( double sampleTime, IntPtr buffer, int bufferLen ) } // release the image - image.Dispose( ); + image.Dispose(); + } + } + + + if ( ( snapshotMode && parent.SnapshotRawFrame != null ) || ( !snapshotMode && parent.NewRawFrame != null ) ) + { + // copy data + byte[] data = new byte[bufferLen]; + + Marshal.Copy( buffer, data, 0, bufferLen ); + + // notify parent + if ( snapshotMode ) + { + parent.OnSnapshotRawFrame( data, parent.snapshotDataFormat, Width, Height ); + } + else + { + parent.OnNewRawFrame( data, parent.videoDataFormat, Width, Height ); } } diff --git a/Sources/Video/FrameDataFormat.cs b/Sources/Video/FrameDataFormat.cs new file mode 100644 index 00000000..c9f0704a --- /dev/null +++ b/Sources/Video/FrameDataFormat.cs @@ -0,0 +1,147 @@ +// AForge Video Library +// AForge.NET framework +// http://www.aforgenet.com/framework/ +// +// Copyright © AForge.NET, 2005-2011 +// contacts@aforgenet.com +// + +namespace AForge.Video +{ + using System.Collections.Generic; + using ImagingPixelFormat = System.Drawing.Imaging.PixelFormat; + + /// + /// Format for image data representation. + /// + /// + public enum FrameDataFormat + { + /// + /// Unknown. + /// + Unknown, + + /// + /// RGB, 1 bit per pixel (bpp), palettized. + /// + RGB1, + /// + /// RGB, 4 bpp, palettized. + /// + RGB4, + /// + /// RGB, 8 bpp. + /// + RGB8, + /// + /// RGB 555, 16 bpp. + /// + RGB555, + /// + /// RGB 565, 16 bpp. + /// + RGB565, + /// + /// RGB, 24 bpp. + /// + RGB24, + /// + /// RGB, 32 bpp, no alpha channel. + /// + RGB32, + + /// + /// YUV 4:2:2. + /// + YUYV, + /// + /// YUV 4:2:2. + /// + UYVY, + /// + /// YUV 4:2:2. + /// + YUY2, + /// + /// YUV 4:2:2. + /// + IYUV, + + /// + /// Consumer DV. + /// + DVSD, + /// + /// Motion JPEG. + /// + MJPG + } + + /// + /// Some internal utilities for handling frame data formats. + /// + /// + public static class FrameDataFormatUtils + { + private static readonly Dictionary lookup; + private static readonly Dictionary reverseLookup; + + static FrameDataFormatUtils() + { + lookup = new Dictionary() + { + { ImagingPixelFormat.Format1bppIndexed, FrameDataFormat.RGB1 }, + { ImagingPixelFormat.Format4bppIndexed, FrameDataFormat.RGB4 }, + { ImagingPixelFormat.Format16bppRgb555, FrameDataFormat.RGB555 }, + { ImagingPixelFormat.Format16bppRgb565, FrameDataFormat.RGB565 }, + { ImagingPixelFormat.Format8bppIndexed, FrameDataFormat.RGB8 }, + { ImagingPixelFormat.Format24bppRgb, FrameDataFormat.RGB24 }, + { ImagingPixelFormat.Format32bppArgb, FrameDataFormat.RGB32 }, + }; + + reverseLookup = new Dictionary(); + + foreach (KeyValuePair pair in lookup) + { + reverseLookup.Add(pair.Value, pair.Key); + } + } + + /// + /// Converts System.Drawing.Imaging.PixelFormat to AForge.Video.ImageDataFormat. + /// + /// + /// Pixel format. + /// + /// Returns corresponding AForge.Video.ImageDataFormat if possible. Otherwise it returns ImageDataFormat.Unknown. + /// + public static FrameDataFormat FromPixelFormat(ImagingPixelFormat pixelFormat) + { + FrameDataFormat imageDataFormat; + + if (lookup.TryGetValue(pixelFormat, out imageDataFormat)) + return imageDataFormat; + else + return FrameDataFormat.Unknown; + } + + /// + /// Converts AForge.Video.ImageDataFormat to System.Drawing.Imaging.PixelFormat. + /// + /// + /// Data format. + /// + /// Returns corresponding System.Drawing.Imaging.PixelFormat if possible. Otherwise it returns PixelFormat.Undefined. + /// + public static ImagingPixelFormat ToPixelFormat(FrameDataFormat dataFormat) + { + ImagingPixelFormat pixelFormat; + + if (reverseLookup.TryGetValue(dataFormat, out pixelFormat)) + return pixelFormat; + else + return ImagingPixelFormat.Undefined; + } + } +} diff --git a/Sources/Video/Video.csproj b/Sources/Video/Video.csproj index 360d6de2..7c91ba51 100644 --- a/Sources/Video/Video.csproj +++ b/Sources/Video/Video.csproj @@ -69,6 +69,7 @@ + diff --git a/Sources/Video/VideoEvents.cs b/Sources/Video/VideoEvents.cs index c5729e54..f2da5818 100644 --- a/Sources/Video/VideoEvents.cs +++ b/Sources/Video/VideoEvents.cs @@ -19,6 +19,15 @@ namespace AForge.Video /// public delegate void NewFrameEventHandler( object sender, NewFrameEventArgs eventArgs ); + /// + /// Delegate for new raw frame event handler. + /// + /// + /// Sender object. + /// Event arguments. + /// + public delegate void NewRawFrameEventHandler(object sender, NewRawFrameEventArgs eventArgs); + /// /// Delegate for video source error event handler. /// @@ -94,6 +103,71 @@ public System.Drawing.Bitmap Frame } } + /// + /// Arguments for new raw frame event from video source. + /// + /// + public class NewRawFrameEventArgs : EventArgs + { + private byte[] rawFrame; + private FrameDataFormat format; + private int width; + private int height; + + /// + /// Initializes a new instance of the class. + /// + /// + /// New raw frame data. + /// Frame data format. + /// Frame width. + /// Frame height. + /// + public NewRawFrameEventArgs(byte[] rawFrame, FrameDataFormat format = FrameDataFormat.Unknown, int width = -1, int height = -1) + { + this.rawFrame = rawFrame; + this.format = format; + this.width = width; + this.height = height; + } + + /// + /// Raw data from video source. + /// + /// + public byte[] RawFrame + { + get { return rawFrame; } + } + + /// + /// Data format from video source. + /// + /// + public FrameDataFormat Format + { + get { return format; } + } + + /// + /// Width from video source. + /// + /// + public int Width + { + get { return width; } + } + + /// + /// Height from video source. + /// + /// + public int Heigt + { + get { return height; } + } + } + /// /// Arguments for video source error event from video source. ///