//----------------------------------------------------------------------------
// Copyright (C) 2004-2021 by EMGU Corporation. All rights reserved.
//----------------------------------------------------------------------------
#if !(__ANDROID__ || __UNIFIED__ || UNITY_WSA || NETSTANDARD || UNITY_ANDROID || UNITY_IOS || UNITY_WEBGL || UNITY_EDITOR || UNITY_STANDALONE)
#define WITH_SERVICE_MODEL
#endif
//#define TEST_CAPTURE
using System;
using System.Diagnostics;
#if WITH_SERVICE_MODEL
using System.ServiceModel;
#endif
using System.Runtime.InteropServices;
using System.Drawing;
using System.Threading;
using System.Threading.Tasks;
//using System.Threading.Tasks;
using Emgu.Util;
using Emgu.CV.Structure;
using Emgu.CV.Util;
namespace Emgu.CV
{
///
/// Capture images from either camera or video file.
///
/// VideoCapture class is NOT implemented in Open CV for Android, iOS or UWP platforms
#if WITH_SERVICE_MODEL
[ServiceBehavior(InstanceContextMode = InstanceContextMode.Single)]
#endif
public partial class VideoCapture :
UnmanagedObject,
#if WITH_SERVICE_MODEL
IDuplexCapture,
#endif
ICapture
{
///
/// VideoCapture API backends identifier.
///
public enum API
{
///
/// Auto detect
///
Any = 0,
///
/// Video For Windows (obsolete, removed)
///
Vfw = 200,
///
/// V4L/V4L2 capturing support
///
V4L = 200,
///
/// Same as CAP_V4L
///
V4L2 = V4L,
///
/// IEEE 1394 drivers
///
Firewire = 300,
///
/// IEEE 1394 drivers
///
IEEE1394 = Firewire,
///
/// IEEE 1394 drivers
///
DC1394 = Firewire,
///
/// IEEE 1394 drivers
///
CMU1394 = Firewire,
///
/// QuickTime (obsolete, removed)
///
QT = 500,
///
/// Unicap drivers (obsolete, removed)
///
Unicap = 600,
///
/// DirectShow (via videoInput)
///
DShow = 700,
///
/// PvAPI, Prosilica GigE SDK
///
Pvapi = 800,
///
/// OpenNI (for Kinect)
///
OpenNI = 900,
///
/// OpenNI (for Asus Xtion)
///
OpenNIAsus = 910,
///
/// Android - not used
///
Android = 1000,
///
/// XIMEA Camera API
///
XiApi = 1100,
///
/// AVFoundation framework for iOS (OS X Lion will have the same API)
///
AVFoundation = 1200,
///
/// Smartek Giganetix GigEVisionSDK
///
Giganetix = 1300,
///
/// Microsoft Media Foundation (via videoInput)
///
Msmf = 1400,
///
/// Microsoft Windows Runtime using Media Foundation
///
Winrt = 1410,
///
/// Intel Perceptual Computing SDK
///
IntelPerc = 1500,
///
/// OpenNI2 (for Kinect)
///
Openni2 = 1600,
///
/// OpenNI2 (for Asus Xtion and Occipital Structure sensors)
///
Openni2Asus = 1610,
///
/// gPhoto2 connection
///
Gphoto2 = 1700,
///
/// GStreamer
///
Gstreamer = 1800,
///
/// Open and record video file or stream using the FFMPEG library
///
Ffmpeg = 1900,
///
/// OpenCV Image Sequence (e.g. img_%02d.jpg)
///
Images = 2000,
///
/// Aravis SDK
///
Aravis = 2100,
///
/// Built-in OpenCV MotionJPEG codec
///
OpencvMjpeg = 2200,
///
/// Intel MediaSDK
///
IntelMfx = 2300,
///
/// XINE engine (Linux)
///
Xine = 2400,
}
AutoResetEvent _pauseEvent = new AutoResetEvent(false);
///
/// the type of flipping
///
private CvEnum.FlipType? _flipType = null;
///
/// The type of capture source
///
public enum CaptureModuleType
{
///
/// Capture from camera
///
Camera,
///
/// Capture from file using HighGUI
///
Highgui,
}
private CaptureModuleType _captureModuleType;
private readonly bool _needDispose;
#region Properties
///
/// Get the type of the capture module
///
public CaptureModuleType CaptureSource
{
get
{
return _captureModuleType;
}
}
///
/// Get and set the flip type. If null, no flipping will be done.
///
public CvEnum.FlipType? FlipType
{
get
{
return _flipType;
}
set
{
_flipType = value;
}
}
///
/// Get or Set if the captured image should be flipped horizontally
///
public bool FlipHorizontal
{
get
{
if (_flipType == null)
return false;
return (_flipType.Value == CvEnum.FlipType.Horizontal) || (_flipType.Value == CvEnum.FlipType.Both);
}
set
{
if (_flipType == null)
{
if (value)
_flipType = CvEnum.FlipType.Horizontal;
}
else
{
switch (_flipType.Value)
{
case CvEnum.FlipType.Both:
if (!value)
_flipType = CvEnum.FlipType.Vertical;
break;
case CvEnum.FlipType.Horizontal:
if (!value)
_flipType = null;
break;
case CvEnum.FlipType.Vertical:
if (value)
_flipType = CvEnum.FlipType.Both;
break;
}
}
}
}
///
/// Get or Set if the captured image should be flipped vertically
///
public bool FlipVertical
{
get
{
if (_flipType == null)
return false;
return (_flipType.Value == CvEnum.FlipType.Vertical) || (_flipType.Value == CvEnum.FlipType.Both);
}
set
{
if (_flipType == null)
{
if (value)
_flipType = CvEnum.FlipType.Vertical;
}
else
{
switch (FlipType.Value)
{
case CvEnum.FlipType.Vertical:
if (!value)
_flipType = null;
break;
case CvEnum.FlipType.Horizontal:
if (value)
_flipType = CvEnum.FlipType.Both;
break;
case CvEnum.FlipType.Both:
if (!value)
_flipType = CvEnum.FlipType.Horizontal;
break;
}
}
}
}
/// The width of this capture
public int Width
{
get
{
return Convert.ToInt32(Get(CvEnum.CapProp.FrameWidth));
}
}
/// The height of this capture
public int Height
{
get
{
return Convert.ToInt32(Get(CvEnum.CapProp.FrameHeight));
}
}
#endregion
#region constructors
/*
///
/// Create a capture using the specific camera
///
/// The capture type
public VideoCapture(API captureType)
: this((int)captureType)
{
}*/
internal VideoCapture(IntPtr ptr, bool needDispose)
{
_ptr = ptr;
_needDispose = needDispose;
}
private static VectorOfInt ConvertCaptureProperties(Tuple[] captureProperties)
{
VectorOfInt vectInt = new VectorOfInt();
if (captureProperties != null)
{
foreach (Tuple cp in captureProperties)
{
vectInt.Push(new int[] { (int)cp.Item1, cp.Item2 });
}
}
return vectInt;
}
/// Create a capture using the specific camera
/// The index of the camera to create capture from, starting from 0
/// The preferred Capture API backends to use. Can be used to enforce a specific reader implementation if multiple are available.
/// Optional capture properties. e.g. new Tuple<CvEnum.CapProp>(CvEnum.CapProp.HwAcceleration, (int) VideoAccelerationType.Any)
public VideoCapture(int camIndex = 0, API captureApi = API.Any, params Tuple[] captureProperties)
{
_captureModuleType = CaptureModuleType.Camera;
#if TEST_CAPTURE
#else
using (VectorOfInt vectInt = ConvertCaptureProperties(captureProperties))
{
_ptr = CvInvoke.cveVideoCaptureCreateFromDevice(camIndex, captureApi, vectInt);
}
if (_ptr == IntPtr.Zero)
{
throw new NullReferenceException(String.Format("Error: Unable to create capture from camera {0}", camIndex));
}
#endif
}
///
/// Create a capture from file or a video stream
///
/// The name of a file, or an url pointed to a stream.
/// The preferred Capture API backends to use. Can be used to enforce a specific reader implementation if multiple are available.
/// Optional capture properties. e.g. new Tuple<CvEnum.CapProp>(CvEnum.CapProp.HwAcceleration, (int) VideoAccelerationType.Any)
public VideoCapture(String fileName, API captureApi = API.Any, params Tuple[] captureProperties)
{
using (CvString s = new CvString(fileName))
{
using (VectorOfInt vectInt = ConvertCaptureProperties(captureProperties))
{
_captureModuleType = CaptureModuleType.Highgui;
_ptr = CvInvoke.cveVideoCaptureCreateFromFile(s, captureApi, vectInt);
}
if (_ptr == IntPtr.Zero)
throw new NullReferenceException(String.Format("Unable to create capture from {0}", fileName));
}
}
#endregion
#region implement UnmanagedObject
///
/// Release the resource for this capture
///
protected override void DisposeObject()
{
#if TEST_CAPTURE
#else
if (_needDispose && _ptr != IntPtr.Zero)
{
Stop();
CvInvoke.cveVideoCaptureRelease(ref _ptr);
}
#endif
}
#endregion
///
/// Obtain the capture property
///
/// The index for the property
/// Value for the specified property. Value 0 is returned when querying a property that is
/// not supported by the backend used by the VideoCapture instance.
/// Reading / writing properties involves many layers. Some unexpected result might happens
/// along this chain: "VideoCapture -> API Backend -> Operating System -> Device Driver -> Device Hardware"
/// The returned value might be different from what really used by the device or it could be encoded
/// using device dependent rules(eg.steps or percentage). Effective behaviour depends from device
/// driver and API Backend
///
public double Get(CvEnum.CapProp index)
{
return CvInvoke.cveVideoCaptureGet(_ptr, index);
}
///
/// Sets the specified property of video capture
///
/// Property identifier
/// Value of the property
/// True if the property is supported by backend used by the VideoCapture instance.
/// Even if it returns true this doesn't ensure that the property value has been accepted by the capture device.
public bool Set(CvEnum.CapProp property, double value)
{
return CvInvoke.cveVideoCaptureSet(Ptr, property, value);
}
///
/// Grab a frame
///
/// True on success
public virtual bool Grab()
{
if (_ptr == IntPtr.Zero)
return false;
bool grabbed = CvInvoke.cveVideoCaptureGrab(Ptr);
if (grabbed && ImageGrabbed != null)
ImageGrabbed(this, new EventArgs());
return grabbed;
}
#region Grab process
///
/// The event to be called when an image is grabbed
///
public event EventHandler ImageGrabbed;
private enum GrabState
{
Stopped,
Running,
Pause,
Stopping,
}
private volatile GrabState _grabState = GrabState.Stopped;
private void Run(Emgu.Util.ExceptionHandler eh = null)
{
try
{
while (_grabState == GrabState.Running || _grabState == GrabState.Pause)
{
if (_grabState == GrabState.Pause)
{
_pauseEvent.WaitOne();
}
else if (IntPtr.Zero.Equals(_ptr) || !Grab())
{
//capture has been released, or
//no more frames to grab, this is the end of the stream.
//We should stop.
_grabState = GrabState.Stopping;
}
}
}
catch (Exception e)
{
if (eh != null && eh.HandleException(e))
return;
Trace.WriteLine(e.StackTrace);
throw new Exception("Capture error", e);
}
finally
{
_grabState = GrabState.Stopped;
}
}
private Task _captureTask = null;
///
/// Start the grab process in a separate thread. Once started, use the ImageGrabbed event handler and RetrieveGrayFrame/RetrieveBgrFrame to obtain the images.
///
/// An exception handler. If provided, it will be used to handle exception in the capture thread.
public void Start(Emgu.Util.ExceptionHandler eh = null)
{
if (_grabState == GrabState.Pause)
{
_grabState = GrabState.Running;
_pauseEvent.Set();
}
else if (_grabState == GrabState.Stopped || _grabState == GrabState.Stopping)
{
_grabState = GrabState.Running;
_captureTask = new Task(delegate { Run(eh); });
_captureTask.Start();
}
}
///
/// Pause the grab process if it is running.
///
public void Pause()
{
if (_grabState == GrabState.Running)
_grabState = GrabState.Pause;
}
///
/// Stop the grabbing thread
///
public void Stop()
{
if (_grabState == GrabState.Pause)
{
_grabState = GrabState.Stopping;
_pauseEvent.Set();
}
else
if (_grabState == GrabState.Running)
_grabState = GrabState.Stopping;
if (_captureTask != null)
{
_captureTask.Wait(100);
_captureTask = null;
}
}
#endregion
///
/// Decodes and returns the grabbed video frame.
///
/// The video frame is returned here. If no frames has been grabbed the image will be empty.
/// It could be a frame index or a driver specific flag
/// False if no frames has been grabbed
public virtual bool Retrieve(IOutputArray image, int flag = 0)
{
bool success;
using (OutputArray oaImage = image.GetOutputArray())
{
success = CvInvoke.cveVideoCaptureRetrieve(Ptr, oaImage, flag);
}
if (success && (FlipType != null))
CvInvoke.Flip(image, image, FlipType.Value);
return success;
}
///
/// First call Grab() function follows by Retrieve()
///
/// The output array where the image will be read into.
/// False if no frames has been grabbed
public bool Read(IOutputArray m)
{
using (OutputArray oaM = m.GetOutputArray())
return CvInvoke.cveVideoCaptureRead(Ptr, oaM);
}
///
/// The name of the backend used by this VideoCapture
///
public String BackendName
{
get
{
using (CvString s = new CvString())
{
CvInvoke.cveVideoCaptureGetBackendName(Ptr, s);
return s.ToString();
}
}
}
///
/// Wait for ready frames from VideoCapture.
///
/// input video streams
/// stream indexes with grabbed frames (ready to use .retrieve() to fetch actual frame)
/// number of nanoseconds (0 - infinite)
/// true if streamReady is not empty
/// The primary use of the function is in multi-camera environments. The method fills the ready state vector, grabs video frame, if camera is ready.
/// After this call use VideoCapture::retrieve() to decode and fetch frame data.
public static bool WaitAny(VectorOfVideoCapture streams, VectorOfInt readyIndex, int timeoutNs = 0)
{
return CvInvoke.cveVideoCaptureWaitAny(streams, readyIndex, timeoutNs);
}
#region implement ICapture
///
/// Capture a Bgr image frame
///
/// A Bgr image frame. If no more frames are available, null will be returned.
public virtual Mat QueryFrame()
{
if (Grab())
{
Mat image = new Mat();
Retrieve(image);
return image;
}
else
{
return null;
}
}
///
/// Capture a Bgr image frame that is half width and half height.
/// Mainly used by WCF when sending image to remote locations in a bandwidth conservative scenario
///
///Internally, this is a cvQueryFrame operation follow by a cvPyrDown
/// A Bgr image frame that is half width and half height
public virtual Mat QuerySmallFrame()
{
Mat tmp = QueryFrame();
if (tmp != null)
{
if (!tmp.IsEmpty)
{
CvInvoke.PyrDown(tmp, tmp);
return tmp;
}
else
{
tmp.Dispose();
}
}
return null;
}
#endregion
/*
/// Capture Bgr image frame with timestamp
/// A timestamped Bgr image frame
public TimedImage QueryTimedFrame()
{
IntPtr img = CvInvoke.cvQueryFrame(_ptr);
TimedImage res = new TimedImage(Width, Height);
res.Timestamp = System.DateTime.Now;
if (FlipType == Emgu.CV.CvEnum.FLIP.None)
{
CvInvoke.cvCopy(img, res.Ptr, IntPtr.Zero);
return res;
}
else
{
//code = 0 indicates vertical flip only
int code = 0;
//code = -1 indicates vertical and horizontal flip
if (FlipType == (Emgu.CV.CvEnum.FLIP.HORIZONTAL | Emgu.CV.CvEnum.FLIP.VERTICAL)) code = -1;
//code = 1 indicates horizontal flip only
else if (FlipType == Emgu.CV.CvEnum.FLIP.HORIZONTAL) code = 1;
CvInvoke.cvFlip(img, res.Ptr, code);
return res;
}
}*/
#if WITH_SERVICE_MODEL
///
/// Query a frame duplexly over WCF
///
public virtual void DuplexQueryFrame()
{
IDuplexCaptureCallback callback = OperationContext.Current.GetCallbackChannel();
using (Mat img = QueryFrame())
{
callback.ReceiveFrame(img);
}
}
///
/// Query a small frame duplexly over WCF
///
public virtual void DuplexQuerySmallFrame()
{
IDuplexCaptureCallback callback = OperationContext.Current.GetCallbackChannel();
using (Mat img = QuerySmallFrame())
{
callback.ReceiveFrame(img);
}
}
#endif
}
///
/// The backend for video
///
public class Backend
{
private int _id;
///
/// Create a backend given its id
///
/// The id of the backend
public Backend(int id)
{
_id = id;
}
///
/// The ID of the backend.
///
public int ID
{
get
{
return _id;
}
}
///
/// The name of the backend
///
public String Name
{
get
{
using (CvString cvs = new CvString())
{
CvInvoke.cveGetBackendName(_id, cvs);
return cvs.ToString();
}
}
}
}
partial class CvInvoke
{
//[DllImport(ExternLibrary, CallingConvention = CvInvoke.CvCallingConvention)]
//internal static extern void cveVideoCaptureReadToMat(IntPtr capture, IntPtr mat);
[DllImport(ExternLibrary, CallingConvention = CvInvoke.CvCallingConvention)]
internal static extern IntPtr cveVideoCaptureCreateFromDevice(int index, VideoCapture.API apiPreference, IntPtr captureProperties);
[DllImport(ExternLibrary, CallingConvention = CvInvoke.CvCallingConvention)]
internal static extern IntPtr cveVideoCaptureCreateFromFile(IntPtr filename, VideoCapture.API api, IntPtr captureProperties);
[DllImport(ExternLibrary, CallingConvention = CvInvoke.CvCallingConvention)]
internal static extern void cveVideoCaptureRelease(ref IntPtr capture);
[DllImport(ExternLibrary, CallingConvention = CvInvoke.CvCallingConvention)]
[return: MarshalAs(CvInvoke.BoolToIntMarshalType)]
internal static extern bool cveVideoCaptureRead(IntPtr capture, IntPtr frame);
[DllImport(ExternLibrary, CallingConvention = CvInvoke.CvCallingConvention)]
[return: MarshalAs(CvInvoke.BoolToIntMarshalType)]
internal static extern bool cveVideoCaptureGrab(IntPtr capture);
[DllImport(ExternLibrary, CallingConvention = CvInvoke.CvCallingConvention)]
[return: MarshalAs(CvInvoke.BoolToIntMarshalType)]
internal static extern bool cveVideoCaptureRetrieve(IntPtr capture, IntPtr image, int flag);
[DllImport(ExternLibrary, CallingConvention = CvInvoke.CvCallingConvention)]
internal static extern double cveVideoCaptureGet(IntPtr capture, CvEnum.CapProp prop);
[DllImport(ExternLibrary, CallingConvention = CvInvoke.CvCallingConvention)]
[return: MarshalAs(CvInvoke.BoolToIntMarshalType)]
internal static extern bool cveVideoCaptureSet(IntPtr capture, CvEnum.CapProp propertyId, double value);
[DllImport(ExternLibrary, CallingConvention = CvInvoke.CvCallingConvention)]
internal static extern void cveVideoCaptureGetBackendName(IntPtr capture, IntPtr backendName);
[DllImport(ExternLibrary, CallingConvention = CvInvoke.CvCallingConvention)]
internal static extern void cveGetBackendName(int api, IntPtr name);
[DllImport(ExternLibrary, CallingConvention = CvInvoke.CvCallingConvention)]
[return: MarshalAs(CvInvoke.BoolToIntMarshalType)]
internal static extern bool cveVideoCaptureWaitAny(IntPtr streams, IntPtr readyIndex, int timeoutNs);
///
/// Returns list of all built-in backends
///
public static Backend[] Backends
{
get
{
using (VectorOfInt vi = new VectorOfInt())
{
cveGetBackends(vi);
int[] ids = vi.ToArray();
Backend[] backends = new Backend[ids.Length];
for (int i = 0; i < ids.Length; i++)
{
backends[i] = new Backend(ids[i]);
}
return backends;
}
}
}
[DllImport(ExternLibrary, CallingConvention = CvInvoke.CvCallingConvention)]
internal static extern void cveGetBackends(IntPtr backends);
///
/// Returns list of available backends which works via cv::VideoCapture(int index)
///
public static Backend[] CameraBackends
{
get
{
using (VectorOfInt vi = new VectorOfInt())
{
cveGetCameraBackends(vi);
int[] ids = vi.ToArray();
Backend[] backends = new Backend[ids.Length];
for (int i = 0; i < ids.Length; i++)
{
backends[i] = new Backend(ids[i]);
}
return backends;
}
}
}
[DllImport(ExternLibrary, CallingConvention = CvInvoke.CvCallingConvention)]
internal static extern void cveGetCameraBackends(IntPtr backends);
///
/// Returns list of available backends which works via cv::VideoCapture(filename)
///
public static Backend[] StreamBackends
{
get
{
using (VectorOfInt vi = new VectorOfInt())
{
cveGetStreamBackends(vi);
int[] ids = vi.ToArray();
Backend[] backends = new Backend[ids.Length];
for (int i = 0; i < ids.Length; i++)
{
backends[i] = new Backend(ids[i]);
}
return backends;
}
}
}
[DllImport(ExternLibrary, CallingConvention = CvInvoke.CvCallingConvention)]
internal static extern void cveGetStreamBackends(IntPtr backends);
///
/// Returns list of available backends which works via cv::VideoWriter()
///
public static Backend[] WriterBackends
{
get
{
using (VectorOfInt vi = new VectorOfInt())
{
cveGetWriterBackends(vi);
int[] ids = vi.ToArray();
Backend[] backends = new Backend[ids.Length];
for (int i = 0; i < ids.Length; i++)
{
backends[i] = new Backend(ids[i]);
}
return backends;
}
}
}
[DllImport(ExternLibrary, CallingConvention = CvInvoke.CvCallingConvention)]
internal static extern void cveGetWriterBackends(IntPtr backends);
}
}