You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

3824 lines
175 KiB

13 years ago
13 years ago
13 years ago
13 years ago
13 years ago
13 years ago
13 years ago
13 years ago
13 years ago
13 years ago
13 years ago
13 years ago
13 years ago
13 years ago
13 years ago
13 years ago
13 years ago
13 years ago
13 years ago
13 years ago
13 years ago
13 years ago
13 years ago
13 years ago
13 years ago
13 years ago
13 years ago
13 years ago
13 years ago
13 years ago
13 years ago
13 years ago
13 years ago
13 years ago
13 years ago
13 years ago
13 years ago
13 years ago
13 years ago
13 years ago
13 years ago
13 years ago
13 years ago
13 years ago
13 years ago
13 years ago
13 years ago
13 years ago
13 years ago
13 years ago
13 years ago
13 years ago
13 years ago
13 years ago
13 years ago
13 years ago
13 years ago
13 years ago
8 years ago
8 years ago
8 years ago
8 years ago
  1. //----------------------------------------------------------------------------
  2. // Copyright (C) 2004-2021 by EMGU Corporation. All rights reserved.
  3. //----------------------------------------------------------------------------
  4. using System;
  5. using System.Collections.Generic;
  6. using System.Diagnostics;
  7. using System.Drawing;
  8. using System.IO;
  9. using Emgu.CV.CvEnum;
  10. using System.Runtime.InteropServices;
  11. using System.Runtime.Serialization;
  12. using Emgu.CV.Reflection;
  13. using Emgu.CV.Structure;
  14. using Emgu.CV.Util;
  15. using Emgu.Util;
  16. namespace Emgu.CV
  17. {
  18. /// <summary>
  19. /// An Image is a wrapper to IplImage of OpenCV.
  20. /// </summary>
  21. /// <typeparam name="TColor">Color type of this image (either Gray, Bgr, Bgra, Hsv, Hls, Lab, Luv, Xyz, Ycc, Rgb or Rbga)</typeparam>
  22. /// <typeparam name="TDepth">Depth of this image (either Byte, SByte, Single, double, UInt16, Int16 or Int32)</typeparam>
  23. [Serializable]
  24. public partial class Image<TColor, TDepth>
  25. : CvArray<TDepth>, IEquatable<Image<TColor, TDepth>>, IInputOutputArray
  26. where TColor : struct, IColor
  27. where TDepth : new()
  28. {
  29. private ImageDataReleaseMode _imageDataReleaseMode;
  30. private TDepth[,,] _array;
  31. /// <summary>
  32. /// The dimension of color
  33. /// </summary>
  34. private static readonly int _numberOfChannels = new TColor().Dimension;
  35. #region constructors
  36. /// <summary>
  37. /// Create an empty Image
  38. /// </summary>
  39. protected Image()
  40. {
  41. }
  42. /// <summary>
  43. /// Create image from the specific multi-dimensional data, where the 1st dimension is # of rows (height), the 2nd dimension is # cols (width) and the 3rd dimension is the channel
  44. /// </summary>
  45. /// <param name="data">The multi-dimensional data where the 1st dimension is # of rows (height), the 2nd dimension is # cols (width) and the 3rd dimension is the channel </param>
  46. public Image(TDepth[,,] data)
  47. {
  48. Data = data;
  49. }
  50. /// <summary>
  51. /// Create an Image from unmanaged data.
  52. /// </summary>
  53. /// <param name="width">The width of the image</param>
  54. /// <param name="height">The height of the image</param>
  55. /// <param name="stride">Size of aligned image row in bytes</param>
  56. /// <param name="scan0">Pointer to aligned image data, <b>where each row should be 4-align</b> </param>
  57. /// <remarks>The caller is responsible for allocating and freeing the block of memory specified by the scan0 parameter, however, the memory should not be released until the related Image is released. </remarks>
  58. public Image(int width, int height, int stride, IntPtr scan0)
  59. {
  60. MapDataToImage(width, height, stride, scan0);
  61. }
  62. /// <summary>
  63. /// Let this Image object use the specific Image data.
  64. /// </summary>
  65. /// <param name="width">The width of the image</param>
  66. /// <param name="height">The height of the image</param>
  67. /// <param name="stride">The data stride (bytes per row)</param>
  68. /// <param name="scan0">The origin of the data</param>
  69. protected void MapDataToImage(int width, int height, int stride, IntPtr scan0)
  70. {
  71. _ptr = CvInvoke.cvCreateImageHeader(new Size(width, height), CvDepth, NumberOfChannels);
  72. _imageDataReleaseMode = ImageDataReleaseMode.ReleaseHeaderOnly;
  73. GC.AddMemoryPressure(StructSize.MIplImage);
  74. MIplImage iplImage = MIplImage;
  75. iplImage.ImageData = scan0;
  76. iplImage.WidthStep = stride;
  77. Marshal.StructureToPtr(iplImage, _ptr, false);
  78. }
  79. /// <summary>
  80. /// Allocate the image from the image header.
  81. /// </summary>
  82. /// <param name="ptr">This should be only a header to the image. When the image is disposed, the cvReleaseImageHeader will be called on the pointer.</param>
  83. internal Image(IntPtr ptr)
  84. {
  85. _ptr = ptr;
  86. }
  87. /// <summary>
  88. /// Read image from a file
  89. /// </summary>
  90. /// <param name="fileName">the name of the file that contains the image</param>
  91. public Image(String fileName)
  92. {
  93. if (!File.Exists(fileName))
  94. throw new ArgumentException(String.Format("File {0} does not exist", fileName));
  95. try
  96. {
  97. using (Mat m = CvInvoke.Imread(fileName, CvEnum.ImreadModes.AnyColor | CvEnum.ImreadModes.AnyDepth))
  98. {
  99. if (m.IsEmpty)
  100. throw new NullReferenceException(String.Format("Unable to load image from file \"{0}\".", fileName));
  101. LoadImageFromMat(m);
  102. }
  103. }
  104. catch (TypeInitializationException e)
  105. {
  106. //possibly Exception in CvInvoke's static constructor.
  107. throw e;
  108. }
  109. catch (Exception e)
  110. {
  111. throw new ArgumentException(String.Format("Unable to decode file: {0}", fileName), e);
  112. }
  113. }
  114. /// <summary>
  115. /// Create a blank Image of the specified width, height and color.
  116. /// </summary>
  117. /// <param name="width">The width of the image</param>
  118. /// <param name="height">The height of the image</param>
  119. /// <param name="value">The initial color of the image</param>
  120. public Image(int width, int height, TColor value)
  121. : this(width, height)
  122. {
  123. //int n1 = MIplImage.nSize;
  124. SetValue(value);
  125. //int n2 = MIplImage.nSize;
  126. //int nDiff = n2 - n1;
  127. }
  128. /// <summary>
  129. /// Create a blank Image of the specified width and height.
  130. /// </summary>
  131. /// <param name="width">The width of the image</param>
  132. /// <param name="height">The height of the image</param>
  133. public Image(int width, int height)
  134. {
  135. AllocateData(height, width, NumberOfChannels);
  136. }
  137. /// <summary>
  138. /// Create a blank Image of the specific size
  139. /// </summary>
  140. /// <param name="size">The size of the image</param>
  141. public Image(Size size)
  142. : this(size.Width, size.Height)
  143. {
  144. }
  145. /// <summary>
  146. /// Get or Set the data for this matrix. The Get function has O(1) complexity. The Set function make a copy of the data
  147. /// </summary>
  148. /// <remarks>
  149. /// If the image contains Byte and width is not a multiple of 4. The second dimension of the array might be larger than the Width of this image.
  150. /// This is necessary since the length of a row need to be 4 align for OpenCV optimization.
  151. /// The Set function always make a copy of the specific value. If the image contains Byte and width is not a multiple of 4. The second dimension of the array created might be larger than the Width of this image.
  152. /// </remarks>
  153. public TDepth[,,] Data
  154. {
  155. get
  156. {
  157. return _array;
  158. }
  159. set
  160. {
  161. Debug.Assert(value != null, "Data cannot be set to null");
  162. Debug.Assert(value.GetLength(2) == NumberOfChannels, "The number of channels must equal");
  163. AllocateData(value.GetLength(0), value.GetLength(1), NumberOfChannels);
  164. int rows = value.GetLength(0);
  165. int valueRowLength = value.GetLength(1) * value.GetLength(2);
  166. int arrayRowLength = _array.GetLength(1) * _array.GetLength(2);
  167. for (int i = 0; i < rows; i++)
  168. Array.Copy(value, i * valueRowLength, _array, i * arrayRowLength, valueRowLength);
  169. }
  170. }
  171. /// <summary>
  172. /// Re-allocate data for the array
  173. /// </summary>
  174. /// <param name="rows">The number of rows</param>
  175. /// <param name="cols">The number of columns</param>
  176. /// <param name="numberOfChannels">The number of channels of this image</param>
  177. protected override void AllocateData(int rows, int cols, int numberOfChannels)
  178. {
  179. DisposeObject();
  180. Debug.Assert(!_dataHandle.IsAllocated, "Handle should be free");
  181. _ptr = CvInvoke.cvCreateImageHeader(new Size(cols, rows), CvDepth, numberOfChannels);
  182. _imageDataReleaseMode = ImageDataReleaseMode.ReleaseHeaderOnly;
  183. GC.AddMemoryPressure(StructSize.MIplImage);
  184. Debug.Assert(MIplImage.Align == 4, "Only 4 align is supported at this moment");
  185. if (typeof(TDepth) == typeof(Byte) && (cols & 3) != 0 && (numberOfChannels & 3) != 0)
  186. { //if the managed data isn't 4 aligned, make it so
  187. _array = new TDepth[rows, (cols & (~3)) + 4, numberOfChannels];
  188. }
  189. else
  190. {
  191. _array = new TDepth[rows, cols, numberOfChannels];
  192. }
  193. _dataHandle = GCHandle.Alloc(_array, GCHandleType.Pinned);
  194. //int n1 = MIplImage.nSize;
  195. CvInvoke.cvSetData(_ptr, _dataHandle.AddrOfPinnedObject(), _array.GetLength(1) * _array.GetLength(2) * SizeOfElement);
  196. //int n2 = MIplImage.nSize;
  197. //int nDiff = n2 - n1;
  198. }
  199. /// <summary>
  200. /// Create a multi-channel image from multiple gray scale images
  201. /// </summary>
  202. /// <param name="channels">The image channels to be merged into a single image</param>
  203. public Image(Image<Gray, TDepth>[] channels)
  204. {
  205. Debug.Assert(NumberOfChannels == channels.Length);
  206. AllocateData(channels[0].Height, channels[0].Width, NumberOfChannels);
  207. if (NumberOfChannels == 1)
  208. {
  209. //if this image only have a single channel
  210. CvInvoke.cvCopy(channels[0].Ptr, Ptr, IntPtr.Zero);
  211. }
  212. else
  213. {
  214. using (VectorOfMat mv = new VectorOfMat())
  215. {
  216. for (int i = 0; i < channels.Length; i++)
  217. {
  218. mv.Push(channels[i].Mat);
  219. }
  220. CvInvoke.Merge(mv, this);
  221. }
  222. }
  223. }
  224. #endregion
  225. #region Implement ISerializable interface
  226. /// <summary>
  227. /// Constructor used to deserialize runtime serialized object
  228. /// </summary>
  229. /// <param name="info">The serialization info</param>
  230. /// <param name="context">The streaming context</param>
  231. public Image(SerializationInfo info, StreamingContext context)
  232. {
  233. DeserializeObjectData(info, context);
  234. ROI = (Rectangle)info.GetValue("Roi", typeof(Rectangle));
  235. }
  236. /// <summary>
  237. /// A function used for runtime serialization of the object
  238. /// </summary>
  239. /// <param name="info">Serialization info</param>
  240. /// <param name="context">streaming context</param>
  241. public override void GetObjectData(SerializationInfo info, StreamingContext context)
  242. {
  243. if (IsROISet)
  244. {
  245. Rectangle roi = ROI;
  246. ROI = Rectangle.Empty;
  247. base.GetObjectData(info, context);
  248. ROI = roi;
  249. info.AddValue("Roi", roi);
  250. }
  251. else
  252. {
  253. base.GetObjectData(info, context);
  254. info.AddValue("Roi", ROI);
  255. }
  256. }
  257. #endregion
  258. #region Image Properties
  259. /// <summary>
  260. /// The IplImage structure
  261. /// </summary>
  262. public MIplImage MIplImage
  263. {
  264. get
  265. {
  266. return (MIplImage)Marshal.PtrToStructure(Ptr, typeof(MIplImage));
  267. }
  268. }
  269. /// <summary>
  270. /// Get or Set the region of interest for this image. To clear the ROI, set it to System.Drawing.Rectangle.Empty
  271. /// </summary>
  272. public Rectangle ROI
  273. {
  274. set
  275. {
  276. if (value.Equals(Rectangle.Empty))
  277. {
  278. //reset the image ROI
  279. CvInvoke.cvResetImageROI(Ptr);
  280. }
  281. else
  282. { //set the image ROI to the specific value
  283. CvInvoke.cvSetImageROI(Ptr, value);
  284. }
  285. if (_cvMat != null)
  286. {
  287. _cvMat.Dispose();
  288. _cvMat = null;
  289. }
  290. _cvMat = CvInvoke.CvArrToMat(Ptr);
  291. }
  292. get
  293. {
  294. //return the image ROI
  295. return CvInvoke.cvGetImageROI(Ptr);
  296. }
  297. }
  298. /// <summary>
  299. /// Get the number of channels for this image
  300. /// </summary>
  301. public override int NumberOfChannels
  302. {
  303. get
  304. {
  305. return _numberOfChannels;
  306. }
  307. }
  308. /// <summary>
  309. /// Get the underneath managed array
  310. /// </summary>
  311. public override Array ManagedArray
  312. {
  313. get { return _array; }
  314. set
  315. {
  316. TDepth[,,] data = value as TDepth[,,];
  317. if (data == null)
  318. throw new InvalidCastException(String.Format("Cannot convert ManagedArray to type of {0}[,,].", typeof(TDepth).ToString()));
  319. Data = data;
  320. }
  321. }
  322. /// <summary>
  323. /// Get the equivalent opencv depth type for this image
  324. /// </summary>
  325. public static CvEnum.IplDepth CvDepth
  326. {
  327. get
  328. {
  329. Type typeOfDepth = typeof(TDepth);
  330. if (typeOfDepth == typeof(Single))
  331. return CvEnum.IplDepth.IplDepth32F;
  332. else if (typeOfDepth == typeof(Byte))
  333. return CvEnum.IplDepth.IplDepth_8U;
  334. else if (typeOfDepth == typeof(Double))
  335. return CvEnum.IplDepth.IplDepth64F;
  336. else if (typeOfDepth == typeof(SByte))
  337. return Emgu.CV.CvEnum.IplDepth.IplDepth_8S;
  338. else if (typeOfDepth == typeof(UInt16))
  339. return Emgu.CV.CvEnum.IplDepth.IplDepth16U;
  340. else if (typeOfDepth == typeof(Int16))
  341. return Emgu.CV.CvEnum.IplDepth.IplDepth16S;
  342. else if (typeOfDepth == typeof(Int32))
  343. return Emgu.CV.CvEnum.IplDepth.IplDepth32S;
  344. else
  345. throw new NotImplementedException("Unsupported image depth");
  346. }
  347. }
  348. /// <summary>
  349. /// Indicates if the region of interest has been set
  350. /// </summary>
  351. public bool IsROISet
  352. {
  353. get
  354. {
  355. return Marshal.ReadIntPtr(Ptr, ImageConstants.RoiOffset) != IntPtr.Zero;
  356. }
  357. }
  358. /// <summary>
  359. /// Get the average value on this image
  360. /// </summary>
  361. /// <returns>The average color of the image</returns>
  362. public TColor GetAverage()
  363. {
  364. return GetAverage(null);
  365. }
  366. /// <summary>
  367. /// Get the average value on this image, using the specific mask
  368. /// </summary>
  369. /// <param name="mask">The mask for find the average value</param>
  370. /// <returns>The average color of the masked area</returns>
  371. public TColor GetAverage(Image<Gray, Byte> mask)
  372. {
  373. TColor res = new TColor();
  374. res.MCvScalar = CvInvoke.Mean(this, mask);
  375. return res;
  376. }
  377. /// <summary>Get the sum for each color channel </summary>
  378. /// <returns>The sum for each color channel</returns>
  379. public TColor GetSum()
  380. {
  381. TColor res = new TColor();
  382. res.MCvScalar = CvInvoke.Sum(this);
  383. return res;
  384. }
  385. #endregion
  386. #region Coping and Filling
  387. /// <summary>
  388. /// Set every pixel of the image to the specific color
  389. /// </summary>
  390. /// <param name="color">The color to be set</param>
  391. public void SetValue(TColor color)
  392. {
  393. SetValue(color.MCvScalar);
  394. }
  395. /// <summary>
  396. /// Set every pixel of the image to the specific color, using a mask
  397. /// </summary>
  398. /// <param name="color">The color to be set</param>
  399. /// <param name="mask">The mask for setting color</param>
  400. public void SetValue(TColor color, Image<Gray, Byte> mask)
  401. {
  402. SetValue(color.MCvScalar, mask);
  403. }
  404. /// <summary>
  405. /// Copy the masked area of this image to destination
  406. /// </summary>
  407. /// <param name="dest">the destination to copy to</param>
  408. /// <param name="mask">the mask for copy</param>
  409. public void Copy(Image<TColor, TDepth> dest, Image<Gray, Byte> mask)
  410. {
  411. CvInvoke.cvCopy(Ptr, dest.Ptr, mask == null ? IntPtr.Zero : mask.Ptr);
  412. }
  413. /// <summary>
  414. /// Make a copy of the image using a mask, if ROI is set, only copy the ROI
  415. /// </summary>
  416. /// <param name="mask">the mask for coping</param>
  417. /// <returns> A copy of the image</returns>
  418. public Image<TColor, TDepth> Copy(Image<Gray, Byte> mask)
  419. {
  420. Image<TColor, TDepth> res = new Image<TColor, TDepth>(Size);
  421. Copy(res, mask);
  422. return res;
  423. }
  424. /// <summary>
  425. /// Make a copy of the specific ROI (Region of Interest) from the image
  426. /// </summary>
  427. /// <param name="roi">The roi to be copied</param>
  428. /// <returns>The region of interest</returns>
  429. public Image<TColor, TDepth> Copy(Rectangle roi)
  430. {
  431. /*
  432. Rectangle currentRoi = ROI; //cache the current roi
  433. Image<TColor, TDepth> res = new Image<TColor, TDepth>(roi.Size);
  434. ROI = roi;
  435. CvInvoke.cvCopy(Ptr, res.Ptr, IntPtr.Zero);
  436. ROI = currentRoi; //reset the roi
  437. return res;*/
  438. using (Image<TColor, TDepth> subrect = GetSubRect(roi))
  439. {
  440. return subrect.Copy();
  441. }
  442. }
  443. /// <summary>
  444. /// Get a copy of the boxed region of the image
  445. /// </summary>
  446. /// <param name="box">The boxed region of the image</param>
  447. /// <returns>A copy of the boxed region of the image</returns>
  448. public Image<TColor, TDepth> Copy(RotatedRect box)
  449. {
  450. PointF[] srcCorners = box.GetVertices();
  451. PointF[] destCorners = new PointF[] {
  452. new PointF(0, box.Size.Height - 1),
  453. new PointF(0, 0),
  454. new PointF(box.Size.Width - 1, 0),
  455. new PointF(box.Size.Width - 1, box.Size.Height - 1)};
  456. using (Mat rot = CvInvoke.GetAffineTransform(srcCorners, destCorners))
  457. {
  458. Image<TColor, TDepth> res = new Image<TColor, TDepth>((int)box.Size.Width, (int)box.Size.Height);
  459. CvInvoke.WarpAffine(this, res, rot, res.Size);
  460. return res;
  461. }
  462. }
  463. /// <summary> Make a copy of the image, if ROI is set, only copy the ROI</summary>
  464. /// <returns> A copy of the image</returns>
  465. public Image<TColor, TDepth> Copy()
  466. {
  467. return Copy(null);
  468. }
  469. /// <summary>
  470. /// Create an image of the same size
  471. /// </summary>
  472. /// <remarks>The initial pixel in the image equals zero</remarks>
  473. /// <returns> The image of the same size</returns>
  474. public Image<TColor, TDepth> CopyBlank()
  475. {
  476. return new Image<TColor, TDepth>(Size);
  477. }
  478. /// <summary>
  479. /// Make a clone of the current image. All image data as well as the COI and ROI are cloned
  480. /// </summary>
  481. /// <returns>A clone of the current image. All image data as well as the COI and ROI are cloned</returns>
  482. public Image<TColor, TDepth> Clone()
  483. {
  484. int coi = CvInvoke.cvGetImageCOI(Ptr); //get the COI for current image
  485. Rectangle roi = ROI; //get the ROI for current image
  486. CvInvoke.cvSetImageCOI(Ptr, 0); //clear COI for current image
  487. ROI = Rectangle.Empty; // clear ROI for current image
  488. #region create a clone of the current image with the same COI and ROI
  489. Image<TColor, TDepth> res = Copy();
  490. CvInvoke.cvSetImageCOI(res.Ptr, coi);
  491. res.ROI = roi;
  492. #endregion
  493. CvInvoke.cvSetImageCOI(Ptr, coi); //reset the COI for the current image
  494. ROI = roi; // reset the ROI for the current image
  495. return res;
  496. }
  497. /// <summary>
  498. /// Get a subimage which image data is shared with the current image.
  499. /// </summary>
  500. /// <param name="rect">The rectangle area of the sub-image</param>
  501. /// <returns>A subimage which image data is shared with the current image</returns>
  502. public Image<TColor, TDepth> GetSubRect(Rectangle rect)
  503. {
  504. Image<TColor, TDepth> subRect = new Image<TColor, TDepth>();
  505. subRect._array = _array;
  506. GC.AddMemoryPressure(StructSize.MIplImage); //This pressure will be released once the result image is disposed.
  507. subRect._ptr = CvInvoke.cvGetImageSubRect(_ptr, ref rect);
  508. return subRect;
  509. }
  510. #endregion
  511. #region Drawing functions
  512. /// <summary>Draw an Rectangle of the specific color and thickness </summary>
  513. /// <param name="rect">The rectangle to be drawn</param>
  514. /// <param name="color">The color of the rectangle </param>
  515. /// <param name="thickness">If thickness is less than 1, the rectangle is filled up </param>
  516. /// <param name="lineType">Line type</param>
  517. /// <param name="shift">Number of fractional bits in the center coordinates and radius value</param>
  518. public virtual void Draw(Rectangle rect, TColor color, int thickness = 1, CvEnum.LineType lineType = CvEnum.LineType.EightConnected, int shift = 0)
  519. {
  520. CvInvoke.Rectangle(this, rect, color.MCvScalar, thickness, lineType, shift);
  521. }
  522. /// <summary>Draw a 2D Cross using the specific color and thickness </summary>
  523. /// <param name="cross">The 2D Cross to be drawn</param>
  524. /// <param name="color">The color of the cross </param>
  525. /// <param name="thickness">Must be &gt; 0 </param>
  526. public void Draw(Cross2DF cross, TColor color, int thickness)
  527. {
  528. Debug.Assert(thickness > 0, "Thickness should be > 0");
  529. if (thickness > 0)
  530. {
  531. Draw(cross.Horizontal, color, thickness);
  532. Draw(cross.Vertical, color, thickness);
  533. }
  534. }
  535. /// <summary>Draw a line segment using the specific color and thickness </summary>
  536. /// <param name="line">The line segment to be drawn</param>
  537. /// <param name="color">The color of the line segment </param>
  538. /// <param name="thickness">The thickness of the line segment </param>
  539. /// <param name="lineType">Line type</param>
  540. /// <param name="shift">Number of fractional bits in the center coordinates and radius value</param>
  541. public virtual void Draw(LineSegment2DF line, TColor color, int thickness, CvEnum.LineType lineType = CvEnum.LineType.EightConnected, int shift = 0)
  542. {
  543. Debug.Assert(thickness > 0, "Thickness should be > 0");
  544. if (thickness > 0)
  545. CvInvoke.Line(
  546. this,
  547. Point.Round(line.P1),
  548. Point.Round(line.P2),
  549. color.MCvScalar,
  550. thickness,
  551. lineType,
  552. shift);
  553. }
  554. /// <summary> Draw a line segment using the specific color and thickness </summary>
  555. /// <param name="line"> The line segment to be drawn</param>
  556. /// <param name="color"> The color of the line segment </param>
  557. /// <param name="thickness"> The thickness of the line segment </param>
  558. /// <param name="lineType">Line type</param>
  559. /// <param name="shift">Number of fractional bits in the center coordinates and radius value</param>
  560. public virtual void Draw(LineSegment2D line, TColor color, int thickness, CvEnum.LineType lineType = CvEnum.LineType.EightConnected, int shift = 0)
  561. {
  562. Debug.Assert(thickness > 0, "Thickness should be > 0");
  563. if (thickness > 0)
  564. CvInvoke.Line(
  565. this,
  566. line.P1,
  567. line.P2,
  568. color.MCvScalar,
  569. thickness,
  570. lineType,
  571. shift);
  572. }
  573. /// <summary> Draw a convex polygon using the specific color and thickness </summary>
  574. /// <param name="polygon"> The convex polygon to be drawn</param>
  575. /// <param name="color"> The color of the triangle </param>
  576. /// <param name="thickness"> If thickness is less than 1, the triangle is filled up </param>
  577. public virtual void Draw(IConvexPolygonF polygon, TColor color, int thickness)
  578. {
  579. PointF[] polygonVertices = polygon.GetVertices();
  580. Point[] vertices = new Point[polygonVertices.Length];
  581. for (int i = 0; i < polygonVertices.Length; i++)
  582. vertices[i] = Point.Round(polygonVertices[i]);
  583. if (thickness > 0)
  584. DrawPolyline(vertices, true, color, thickness);
  585. else
  586. {
  587. FillConvexPoly(vertices, color);
  588. }
  589. }
  590. /// <summary>
  591. /// Fill the convex polygon with the specific color
  592. /// </summary>
  593. /// <param name="pts">The array of points that define the convex polygon</param>
  594. /// <param name="color">The color to fill the polygon with</param>
  595. /// <param name="lineType">Line type</param>
  596. /// <param name="shift">Number of fractional bits in the center coordinates and radius value</param>
  597. public void FillConvexPoly(Point[] pts, TColor color, Emgu.CV.CvEnum.LineType lineType = CvEnum.LineType.EightConnected, int shift = 0)
  598. {
  599. using (VectorOfPoint vp = new VectorOfPoint(pts))
  600. CvInvoke.FillConvexPoly(this, vp, color.MCvScalar, lineType, shift);
  601. }
  602. /// <summary>
  603. /// Draw the polyline defined by the array of 2D points
  604. /// </summary>
  605. /// <param name="pts">A polyline defined by its point</param>
  606. /// <param name="isClosed">if true, the last line segment is defined by the last point of the array and the first point of the array</param>
  607. /// <param name="color">the color used for drawing</param>
  608. /// <param name="thickness">the thinkness of the line</param>
  609. /// <param name="lineType">Line type</param>
  610. /// <param name="shift">Number of fractional bits in the center coordinates and radius value</param>
  611. public void DrawPolyline(Point[] pts, bool isClosed, TColor color, int thickness = 1, CvEnum.LineType lineType = CvEnum.LineType.EightConnected, int shift = 0)
  612. {
  613. DrawPolyline(new Point[][] { pts }, isClosed, color, thickness, lineType, shift);
  614. }
  615. /// <summary>
  616. /// Draw the polylines defined by the array of array of 2D points
  617. /// </summary>
  618. /// <param name="pts">An array of polylines each represented by an array of points</param>
  619. /// <param name="isClosed">if true, the last line segment is defined by the last point of the array and the first point of the array</param>
  620. /// <param name="color">the color used for drawing</param>
  621. /// <param name="thickness">the thickness of the line</param>
  622. /// <param name="lineType">Line type</param>
  623. /// <param name="shift">Number of fractional bits in the center coordinates and radius value</param>
  624. public void DrawPolyline(Point[][] pts, bool isClosed, TColor color, int thickness = 1, CvEnum.LineType lineType = CvEnum.LineType.EightConnected, int shift = 0)
  625. {
  626. if (thickness > 0)
  627. {
  628. using (VectorOfVectorOfPoint vvp = new VectorOfVectorOfPoint(pts))
  629. {
  630. CvInvoke.Polylines(this, vvp, isClosed, color.MCvScalar, thickness, lineType, shift);
  631. }
  632. }
  633. }
  634. /// <summary> Draw a Circle of the specific color and thickness </summary>
  635. /// <param name="circle"> The circle to be drawn</param>
  636. /// <param name="color"> The color of the circle </param>
  637. /// <param name="thickness"> If thickness is less than 1, the circle is filled up </param>
  638. /// <param name="lineType">Line type</param>
  639. /// <param name="shift">Number of fractional bits in the center coordinates and radius value</param>
  640. public virtual void Draw(CircleF circle, TColor color, int thickness = 1, CvEnum.LineType lineType = CvEnum.LineType.EightConnected, int shift = 0)
  641. {
  642. CvInvoke.Circle(
  643. this,
  644. Point.Round(circle.Center),
  645. (int)circle.Radius,
  646. color.MCvScalar,
  647. (thickness <= 0) ? -1 : thickness,
  648. lineType,
  649. shift);
  650. }
  651. /// <summary> Draw a Ellipse of the specific color and thickness </summary>
  652. /// <param name="ellipse"> The ellipse to be draw</param>
  653. /// <param name="color"> The color of the ellipse </param>
  654. /// <param name="thickness"> If thickness is less than 1, the ellipse is filled up </param>
  655. /// <param name="lineType">Line type</param>
  656. /// <param name="shift">Number of fractional bits in the center coordinates and radius value</param>
  657. public void Draw(Ellipse ellipse, TColor color, int thickness = 1, CvEnum.LineType lineType = CvEnum.LineType.EightConnected, int shift = 0)
  658. {
  659. CvInvoke.Ellipse(this, ellipse.RotatedRect, color.MCvScalar, thickness, lineType, shift);
  660. }
  661. /// <summary>
  662. /// Draw the text using the specific font on the image
  663. /// </summary>
  664. /// <param name="message">The text message to be draw</param>
  665. /// <param name="fontFace">Font type.</param>
  666. /// <param name="fontScale">Font scale factor that is multiplied by the font-specific base size.</param>
  667. /// <param name="bottomLeft">The location of the bottom left corner of the font</param>
  668. /// <param name="color">The color of the text</param>
  669. /// <param name="thickness">Thickness of the lines used to draw a text.</param>
  670. /// <param name="lineType">Line type</param>
  671. /// <param name="bottomLeftOrigin">When true, the image data origin is at the bottom-left corner. Otherwise, it is at the top-left corner.</param>
  672. public virtual void Draw(String message, Point bottomLeft, CvEnum.FontFace fontFace, double fontScale, TColor color, int thickness = 1, CvEnum.LineType lineType = CvEnum.LineType.EightConnected, bool bottomLeftOrigin = false)
  673. {
  674. CvInvoke.PutText(this,
  675. message,
  676. bottomLeft,
  677. fontFace,
  678. fontScale,
  679. color.MCvScalar,
  680. thickness,
  681. lineType,
  682. bottomLeftOrigin);
  683. }
  684. /// <summary>
  685. /// Draws contour outlines in the image if thickness&gt;=0 or fills area bounded by the contours if thickness&lt;0
  686. /// </summary>
  687. /// <param name="contours">All the input contours. Each contour is stored as a point vector.</param>
  688. /// <param name="contourIdx">Parameter indicating a contour to draw. If it is negative, all the contours are drawn.</param>
  689. /// <param name="color">Color of the contours </param>
  690. /// <param name="maxLevel">Maximal level for drawn contours. If 0, only contour is drawn. If 1, the contour and all contours after it on the same level are drawn. If 2, all contours after and all contours one level below the contours are drawn, etc. If the value is negative, the function does not draw the contours following after contour but draws child contours of contour up to abs(maxLevel)-1 level. </param>
  691. /// <param name="thickness">Thickness of lines the contours are drawn with. If it is negative the contour interiors are drawn</param>
  692. /// <param name="lineType">Type of the contour segments</param>
  693. /// <param name="hierarchy">Optional information about hierarchy. It is only needed if you want to draw only some of the contours</param>
  694. /// <param name="offset">Shift all the point coordinates by the specified value. It is useful in case if the contours retrieved in some image ROI and then the ROI offset needs to be taken into account during the rendering. </param>
  695. public void Draw(
  696. IInputArrayOfArrays contours,
  697. int contourIdx,
  698. TColor color,
  699. int thickness = 1,
  700. CvEnum.LineType lineType = CvEnum.LineType.EightConnected,
  701. IInputArray hierarchy = null,
  702. int maxLevel = int.MaxValue,
  703. Point offset = new Point())
  704. {
  705. CvInvoke.DrawContours(
  706. this,
  707. contours,
  708. contourIdx,
  709. color.MCvScalar,
  710. thickness,
  711. lineType,
  712. hierarchy,
  713. maxLevel,
  714. offset);
  715. }
  716. /// <summary>
  717. /// Draws contour outlines in the image if thickness&gt;=0 or fills area bounded by the contours if thickness&lt;0
  718. /// </summary>
  719. /// <param name="contours">The input contour stored as a point vector.</param>
  720. /// <param name="color">Color of the contours </param>
  721. /// <param name="thickness">Thickness of lines the contours are drawn with. If it is negative the contour interiors are drawn</param>
  722. /// <param name="lineType">Type of the contour segments</param>
  723. /// <param name="offset">Shift all the point coordinates by the specified value. It is useful in case if the contours retrieved in some image ROI and then the ROI offset needs to be taken into account during the rendering. </param>
  724. public void Draw(
  725. Point[] contours,
  726. TColor color,
  727. int thickness = 1,
  728. CvEnum.LineType lineType = CvEnum.LineType.EightConnected,
  729. Point offset = new Point())
  730. {
  731. using (VectorOfPoint vp = new VectorOfPoint(contours))
  732. using (VectorOfVectorOfPoint vvp = new VectorOfVectorOfPoint())
  733. {
  734. vvp.Push(vp);
  735. Draw(vvp, 0, color, thickness, lineType, null, int.MaxValue);
  736. }
  737. }
  738. #endregion
  739. #region Hough line and circles
  740. /// <summary>
  741. /// Apply Probabilistic Hough transform to find line segments.
  742. /// The current image must be a binary image (eg. the edges as a result of the Canny edge detector)
  743. /// </summary>
  744. /// <param name="rhoResolution">Distance resolution in pixel-related units.</param>
  745. /// <param name="thetaResolution">Angle resolution measured in radians</param>
  746. /// <param name="threshold">A line is returned by the function if the corresponding accumulator value is greater than threshold</param>
  747. /// <param name="minLineWidth">Minimum width of a line</param>
  748. /// <param name="gapBetweenLines">Minimum gap between lines</param>
  749. /// <returns>The line segments detected for each of the channels</returns>
  750. public LineSegment2D[][] HoughLinesBinary(double rhoResolution, double thetaResolution, int threshold, double minLineWidth, double gapBetweenLines)
  751. {
  752. return this.ForEachDuplicateChannel<LineSegment2D[]>(
  753. delegate (IInputArray img, int channel)
  754. {
  755. return CvInvoke.HoughLinesP(this, rhoResolution, thetaResolution, threshold, minLineWidth, gapBetweenLines);
  756. });
  757. }
  758. /// <summary>
  759. /// Apply Canny Edge Detector follows by Probabilistic Hough transform to find line segments in the image
  760. /// </summary>
  761. /// <param name="cannyThreshold">The threshold to find initial segments of strong edges</param>
  762. /// <param name="cannyThresholdLinking">The threshold used for edge Linking</param>
  763. /// <param name="rhoResolution">Distance resolution in pixel-related units.</param>
  764. /// <param name="thetaResolution">Angle resolution measured in radians</param>
  765. /// <param name="threshold">A line is returned by the function if the corresponding accumulator value is greater than threshold</param>
  766. /// <param name="minLineWidth">Minimum width of a line</param>
  767. /// <param name="gapBetweenLines">Minimum gap between lines</param>
  768. /// <returns>The line segments detected for each of the channels</returns>
  769. public LineSegment2D[][] HoughLines(double cannyThreshold, double cannyThresholdLinking, double rhoResolution, double thetaResolution, int threshold, double minLineWidth, double gapBetweenLines)
  770. {
  771. using (Image<Gray, Byte> canny = Canny(cannyThreshold, cannyThresholdLinking))
  772. {
  773. return canny.HoughLinesBinary(
  774. rhoResolution,
  775. thetaResolution,
  776. threshold,
  777. minLineWidth,
  778. gapBetweenLines);
  779. }
  780. }
  781. /// <summary>
  782. /// First apply Canny Edge Detector on the current image,
  783. /// then apply Hough transform to find circles
  784. /// </summary>
  785. /// <param name="cannyThreshold">The higher threshold of the two passed to Canny edge detector (the lower one will be twice smaller).</param>
  786. /// <param name="accumulatorThreshold">Accumulator threshold at the center detection stage. The smaller it is, the more false circles may be detected. Circles, corresponding to the larger accumulator values, will be returned first</param>
  787. /// <param name="dp">Resolution of the accumulator used to detect centers of the circles. For example, if it is 1, the accumulator will have the same resolution as the input image, if it is 2 - accumulator will have twice smaller width and height, etc</param>
  788. /// <param name="minRadius">Minimal radius of the circles to search for</param>
  789. /// <param name="maxRadius">Maximal radius of the circles to search for</param>
  790. /// <param name="minDist">Minimum distance between centers of the detected circles. If the parameter is too small, multiple neighbor circles may be falsely detected in addition to a true one. If it is too large, some circles may be missed</param>
  791. /// <returns>The circle detected for each of the channels</returns>
  792. public CircleF[][] HoughCircles(TColor cannyThreshold, TColor accumulatorThreshold, double dp, double minDist, int minRadius = 0, int maxRadius = 0)
  793. {
  794. double[] cannyThresh = cannyThreshold.MCvScalar.ToArray();
  795. double[] accumulatorThresh = accumulatorThreshold.MCvScalar.ToArray();
  796. return this.ForEachDuplicateChannel(
  797. delegate (IInputArray img, int channel)
  798. {
  799. return CvInvoke.HoughCircles(img, CvEnum.HoughModes.Gradient, dp, minDist, cannyThresh[channel], accumulatorThresh[channel], minRadius, maxRadius);
  800. });
  801. }
  802. #endregion
  803. #region Indexer
  804. /// <summary>
  805. /// Get or Set the specific channel of the current image.
  806. /// For Get operation, a copy of the specific channel is returned.
  807. /// For Set operation, the specific channel is copied to this image.
  808. /// </summary>
  809. /// <param name="channel">The channel to get from the current image, zero based index</param>
  810. /// <returns>The specific channel of the current image</returns>
  811. public Image<Gray, TDepth> this[int channel]
  812. {
  813. get
  814. {
  815. Image<Gray, TDepth> imageChannel = new Image<Gray, TDepth>(Size);
  816. CvInvoke.MixChannels(this, imageChannel, new int[] { channel, 0 });
  817. return imageChannel;
  818. }
  819. set
  820. {
  821. CvInvoke.MixChannels(value, this, new int[] { 0, channel });
  822. }
  823. }
  824. /// <summary>
  825. /// Get or Set the color in the <paramref name="row"/>th row (y direction) and <paramref name="column"/>th column (x direction)
  826. /// </summary>
  827. /// <param name="row">The zero-based row (y direction) of the pixel </param>
  828. /// <param name="column">The zero-based column (x direction) of the pixel</param>
  829. /// <returns>The color in the specific <paramref name="row"/> and <paramref name="column"/></returns>
  830. public TColor this[int row, int column]
  831. {
  832. get
  833. {
  834. TColor res = new TColor();
  835. res.MCvScalar = CvInvoke.cvGet2D(Ptr, row, column);
  836. return res;
  837. }
  838. set
  839. {
  840. CvInvoke.cvSet2D(Ptr, row, column, value.MCvScalar);
  841. }
  842. }
  843. /// <summary>
  844. /// Get or Set the color in the <paramref name="location"/>
  845. /// </summary>
  846. /// <param name="location">the location of the pixel </param>
  847. /// <returns>the color in the <paramref name="location"/></returns>
  848. public TColor this[Point location]
  849. {
  850. get
  851. {
  852. return this[location.Y, location.X];
  853. }
  854. set
  855. {
  856. this[location.Y, location.X] = value;
  857. }
  858. }
  859. #endregion
  860. #region utilities
  861. /// <summary>
  862. /// Return parameters based on ROI
  863. /// </summary>
  864. /// <param name="ptr">The Pointer to the IplImage</param>
  865. /// <param name="start">The address of the pointer that point to the start of the Bytes taken into consideration ROI</param>
  866. /// <param name="elementCount">ROI.Width * ColorType.Dimension</param>
  867. /// <param name="byteWidth">The number of bytes in a row taken into consideration ROI</param>
  868. /// <param name="rows">The number of rows taken into consideration ROI</param>
  869. /// <param name="widthStep">The width step required to jump to the next row</param>
  870. protected static void RoiParam(IntPtr ptr, out Int64 start, out int rows, out int elementCount, out int byteWidth, out int widthStep)
  871. {
  872. MIplImage ipl = (MIplImage)Marshal.PtrToStructure(ptr, typeof(MIplImage));
  873. start = ipl.ImageData.ToInt64();
  874. widthStep = ipl.WidthStep;
  875. if (ipl.Roi != IntPtr.Zero)
  876. {
  877. Rectangle rec = CvInvoke.cvGetImageROI(ptr);
  878. elementCount = rec.Width * ipl.NChannels;
  879. byteWidth = ((int)ipl.Depth >> 3) * elementCount;
  880. start += rec.Y * widthStep
  881. + ((int)ipl.Depth >> 3) * rec.X;
  882. rows = rec.Height;
  883. }
  884. else
  885. {
  886. byteWidth = widthStep;
  887. elementCount = ipl.Width * ipl.NChannels;
  888. rows = ipl.Height;
  889. }
  890. }
  891. /// <summary>
  892. /// Apply convertor and compute result for each channel of the image.
  893. /// </summary>
  894. /// <remarks>
  895. /// For single channel image, apply converter directly.
  896. /// For multiple channel image, set the COI for the specific channel before appling the convertor
  897. /// </remarks>
  898. /// <typeparam name="TResult">The return type</typeparam>
  899. /// <param name="conv">The converter such that accept the IntPtr of a single channel IplImage, and image channel index which returning result of type R</param>
  900. /// <returns>An array which contains result for each channel</returns>
  901. private TResult[] ForEachChannel<TResult>(Func<IntPtr, int, TResult> conv)
  902. {
  903. TResult[] res = new TResult[NumberOfChannels];
  904. if (NumberOfChannels == 1)
  905. res[0] = conv(Ptr, 0);
  906. else
  907. {
  908. for (int i = 0; i < NumberOfChannels; i++)
  909. {
  910. CvInvoke.cvSetImageCOI(Ptr, i + 1);
  911. res[i] = conv(Ptr, i);
  912. }
  913. CvInvoke.cvSetImageCOI(Ptr, 0);
  914. }
  915. return res;
  916. }
  917. /// <summary>
  918. /// If the image has only one channel, apply the action directly on the IntPtr of this image and <paramref name="dest"/>,
  919. /// otherwise, make copy each channel of this image to a temperary one, apply action on it and another temperory image and copy the resulting image back to image2
  920. /// </summary>
  921. /// <typeparam name="TOtherDepth">The type of the depth of the <paramref name="dest"/> image</typeparam>
  922. /// <param name="act">The function which acepts the src IntPtr, dest IntPtr and index of the channel as input</param>
  923. /// <param name="dest">The destination image</param>
  924. private void ForEachDuplicateChannel<TOtherDepth>(Action<IInputArray, IOutputArray, int> act, Image<TColor, TOtherDepth> dest)
  925. where TOtherDepth : new()
  926. {
  927. if (NumberOfChannels == 1)
  928. act(this, dest, 0);
  929. else
  930. {
  931. using (Mat tmp1 = new Mat())
  932. using (Mat tmp2 = new Mat())
  933. {
  934. for (int i = 0; i < NumberOfChannels; i++)
  935. {
  936. CvInvoke.ExtractChannel(this, tmp1, i);
  937. act(tmp1, tmp2, i);
  938. CvInvoke.InsertChannel(tmp2, dest, i);
  939. }
  940. }
  941. }
  942. }
  943. #endregion
  944. #region Gradient, Edges and Features
  945. /// <summary>
  946. /// Calculates the image derivative by convolving the image with the appropriate kernel
  947. /// The Sobel operators combine Gaussian smoothing and differentiation so the result is more or less robust to the noise. Most often, the function is called with (xorder=1, yorder=0, aperture_size=3) or (xorder=0, yorder=1, aperture_size=3) to calculate first x- or y- image derivative.
  948. /// </summary>
  949. /// <param name="xorder">Order of the derivative x</param>
  950. /// <param name="yorder">Order of the derivative y</param>
  951. /// <param name="apertureSize">Size of the extended Sobel kernel, must be 1, 3, 5 or 7. In all cases except 1, aperture_size xaperture_size separable kernel will be used to calculate the derivative.</param>
  952. /// <returns>The result of the sobel edge detector</returns>
  953. [ExposableMethod(Exposable = true, Category = "Gradients, Edges")]
  954. public Image<TColor, Single> Sobel(int xorder, int yorder, int apertureSize)
  955. {
  956. Image<TColor, Single> res = new Image<TColor, float>(Size);
  957. CvInvoke.Sobel(this, res, CvInvoke.GetDepthType(typeof(Single)), xorder, yorder, apertureSize, 1.0, 0.0, CvEnum.BorderType.Default);
  958. return res;
  959. }
  960. /// <summary>
  961. /// Calculates Laplacian of the source image by summing second x- and y- derivatives calculated using Sobel operator.
  962. /// Specifying aperture_size=1 gives the fastest variant that is equal to convolving the image with the following kernel:
  963. ///
  964. /// |0 1 0|
  965. /// |1 -4 1|
  966. /// |0 1 0|
  967. /// </summary>
  968. /// <param name="apertureSize">Aperture size </param>
  969. /// <returns>The Laplacian of the image</returns>
  970. [ExposableMethod(Exposable = true, Category = "Gradients, Edges")]
  971. public Image<TColor, Single> Laplace(int apertureSize)
  972. {
  973. Image<TColor, Single> res = new Image<TColor, float>(Size);
  974. CvInvoke.Laplacian(this, res, CvInvoke.GetDepthType(typeof(Single)), apertureSize, 1.0, 0.0, CvEnum.BorderType.Default);
  975. return res;
  976. }
  977. /// <summary> Find the edges on this image and marked them in the returned image.</summary>
  978. /// <param name="thresh"> The threshhold to find initial segments of strong edges</param>
  979. /// <param name="threshLinking"> The threshold used for edge Linking</param>
  980. /// <returns> The edges found by the Canny edge detector</returns>
  981. [ExposableMethod(Exposable = true, Category = "Gradients, Edges")]
  982. public Image<Gray, Byte> Canny(double thresh, double threshLinking)
  983. {
  984. return Canny(thresh, threshLinking, 3, false);
  985. }
  986. /// <summary> Find the edges on this image and marked them in the returned image.</summary>
  987. /// <param name="thresh"> The threshhold to find initial segments of strong edges</param>
  988. /// <param name="threshLinking"> The threshold used for edge Linking</param>
  989. /// <param name="apertureSize">The aperture size, use 3 for default</param>
  990. /// <param name="l2Gradient">a flag, indicating whether a more accurate norm should be used to calculate the image gradient magnitude ( L2gradient=true ), or whether the default norm is enough ( L2gradient=false ).</param>
  991. /// <returns> The edges found by the Canny edge detector</returns>
  992. public Image<Gray, Byte> Canny(double thresh, double threshLinking, int apertureSize, bool l2Gradient)
  993. {
  994. Image<Gray, Byte> res = new Image<Gray, Byte>(Size);
  995. CvInvoke.Canny(this, res, thresh, threshLinking, apertureSize, l2Gradient);
  996. return res;
  997. }
  998. /// <summary>
  999. /// Iterates to find the sub-pixel accurate location of corners, or radial saddle points
  1000. /// </summary>
  1001. /// <param name="corners">Coordinates of the input corners, the values will be modified by this function call</param>
  1002. /// <param name="win">Half sizes of the search window. For example, if win=(5,5) then 5*2+1 x 5*2+1 = 11 x 11 search window is used</param>
  1003. /// <param name="zeroZone">Half size of the dead region in the middle of the search zone over which the summation in formulae below is not done. It is used sometimes to avoid possible singularities of the autocorrelation matrix. The value of (-1,-1) indicates that there is no such size</param>
  1004. /// <param name="criteria">Criteria for termination of the iterative process of corner refinement. That is, the process of corner position refinement stops either after certain number of iteration or when a required accuracy is achieved. The criteria may specify either of or both the maximum number of iteration and the required accuracy</param>
  1005. /// <returns>Refined corner coordinates</returns>
  1006. public void FindCornerSubPix(
  1007. PointF[][] corners,
  1008. Size win,
  1009. Size zeroZone,
  1010. MCvTermCriteria criteria)
  1011. {
  1012. this.ForEachDuplicateChannel(delegate (IInputArray img, int channel)
  1013. {
  1014. using (VectorOfPointF vec = new VectorOfPointF())
  1015. {
  1016. vec.Push(corners[channel]);
  1017. CvInvoke.CornerSubPix(
  1018. img,
  1019. vec,
  1020. win,
  1021. zeroZone,
  1022. criteria);
  1023. Array.Copy(vec.ToArray(), corners[channel], corners[channel].Length);
  1024. }
  1025. });
  1026. }
  1027. #endregion
  1028. #region Matching
  1029. /// <summary>
  1030. /// The function slides through image, compares overlapped patches of size wxh with templ using the specified method and return the comparison results
  1031. /// </summary>
  1032. /// <param name="template">Searched template; must be not greater than the source image and the same data type as the image</param>
  1033. /// <param name="method">Specifies the way the template must be compared with image regions </param>
  1034. /// <returns>The comparison result: width = this.Width - template.Width + 1; height = this.Height - template.Height + 1 </returns>
  1035. public Image<Gray, Single> MatchTemplate(Image<TColor, TDepth> template, CvEnum.TemplateMatchingType method)
  1036. {
  1037. Image<Gray, Single> res = new Image<Gray, Single>(Width - template.Width + 1, Height - template.Height + 1);
  1038. CvInvoke.MatchTemplate(this, template, res, method);
  1039. return res;
  1040. }
  1041. #endregion
  1042. #region Logic
  1043. #region And Methods
  1044. /// <summary> Perform an elementwise AND operation with another image and return the result</summary>
  1045. /// <param name="img2">The second image for the AND operation</param>
  1046. /// <returns> The result of the AND operation</returns>
  1047. public Image<TColor, TDepth> And(Image<TColor, TDepth> img2)
  1048. {
  1049. Image<TColor, TDepth> res = new Image<TColor, TDepth>(Size);
  1050. CvInvoke.BitwiseAnd(this, img2, res, null);
  1051. return res;
  1052. }
  1053. /// <summary>
  1054. /// Perform an elementwise AND operation with another image, using a mask, and return the result
  1055. /// </summary>
  1056. /// <param name="img2">The second image for the AND operation</param>
  1057. /// <param name="mask">The mask for the AND operation</param>
  1058. /// <returns> The result of the AND operation</returns>
  1059. public Image<TColor, TDepth> And(Image<TColor, TDepth> img2, Image<Gray, Byte> mask)
  1060. {
  1061. Image<TColor, TDepth> res = new Image<TColor, TDepth>(Size);
  1062. CvInvoke.BitwiseAnd(this, img2, res, mask);
  1063. return res;
  1064. }
  1065. /// <summary> Perform an binary AND operation with some color</summary>
  1066. /// <param name="val">The color for the AND operation</param>
  1067. /// <returns> The result of the AND operation</returns>
  1068. public Image<TColor, TDepth> And(TColor val)
  1069. {
  1070. return And(val, null);
  1071. }
  1072. /// <summary> Perform an binary AND operation with some color using a mask</summary>
  1073. /// <param name="val">The color for the AND operation</param>
  1074. /// <param name="mask">The mask for the AND operation</param>
  1075. /// <returns> The result of the AND operation</returns>
  1076. public Image<TColor, TDepth> And(TColor val, Image<Gray, Byte> mask)
  1077. {
  1078. Image<TColor, TDepth> res = new Image<TColor, TDepth>(Size);
  1079. using (ScalarArray ia = new ScalarArray(val.MCvScalar))
  1080. {
  1081. CvInvoke.BitwiseAnd(this, ia, res, mask);
  1082. }
  1083. return res;
  1084. }
  1085. #endregion
  1086. #region Or Methods
  1087. /// <summary> Perform an elementwise OR operation with another image and return the result</summary>
  1088. /// <param name="img2">The second image for the OR operation</param>
  1089. /// <returns> The result of the OR operation</returns>
  1090. public Image<TColor, TDepth> Or(Image<TColor, TDepth> img2)
  1091. {
  1092. return Or(img2, null);
  1093. }
  1094. /// <summary> Perform an elementwise OR operation with another image, using a mask, and return the result</summary>
  1095. /// <param name="img2">The second image for the OR operation</param>
  1096. /// <param name="mask">The mask for the OR operation</param>
  1097. /// <returns> The result of the OR operation</returns>
  1098. public Image<TColor, TDepth> Or(Image<TColor, TDepth> img2, Image<Gray, Byte> mask)
  1099. {
  1100. Image<TColor, TDepth> res = CopyBlank();
  1101. CvInvoke.BitwiseOr(this, img2, res, mask);
  1102. return res;
  1103. }
  1104. /// <summary> Perform an elementwise OR operation with some color</summary>
  1105. /// <param name="val">The value for the OR operation</param>
  1106. /// <returns> The result of the OR operation</returns>
  1107. [ExposableMethod(Exposable = true, Category = "Logic")]
  1108. public Image<TColor, TDepth> Or(TColor val)
  1109. {
  1110. return Or(val, null);
  1111. }
  1112. /// <summary> Perform an elementwise OR operation with some color using a mask</summary>
  1113. /// <param name="val">The color for the OR operation</param>
  1114. /// <param name="mask">The mask for the OR operation</param>
  1115. /// <returns> The result of the OR operation</returns>
  1116. public Image<TColor, TDepth> Or(TColor val, Image<Gray, Byte> mask)
  1117. {
  1118. Image<TColor, TDepth> res = CopyBlank();
  1119. using (ScalarArray ia = new ScalarArray(val.MCvScalar))
  1120. {
  1121. CvInvoke.BitwiseOr(this, ia, res, mask);
  1122. }
  1123. return res;
  1124. }
  1125. #endregion
  1126. #region Xor Methods
  1127. /// <summary> Perform an elementwise XOR operation with another image and return the result</summary>
  1128. /// <param name="img2">The second image for the XOR operation</param>
  1129. /// <returns> The result of the XOR operation</returns>
  1130. public Image<TColor, TDepth> Xor(Image<TColor, TDepth> img2)
  1131. {
  1132. return Xor(img2, null);
  1133. }
  1134. /// <summary>
  1135. /// Perform an elementwise XOR operation with another image, using a mask, and return the result
  1136. /// </summary>
  1137. /// <param name="img2">The second image for the XOR operation</param>
  1138. /// <param name="mask">The mask for the XOR operation</param>
  1139. /// <returns>The result of the XOR operation</returns>
  1140. public Image<TColor, TDepth> Xor(Image<TColor, TDepth> img2, Image<Gray, Byte> mask)
  1141. {
  1142. Image<TColor, TDepth> res = CopyBlank();
  1143. CvInvoke.BitwiseXor(this, img2, res, mask);
  1144. return res;
  1145. }
  1146. /// <summary>
  1147. /// Perform an binary XOR operation with some color
  1148. /// </summary>
  1149. /// <param name="val">The value for the XOR operation</param>
  1150. /// <returns> The result of the XOR operation</returns>
  1151. [ExposableMethod(Exposable = true, Category = "Logic")]
  1152. public Image<TColor, TDepth> Xor(TColor val)
  1153. {
  1154. return Xor(val, null);
  1155. }
  1156. /// <summary>
  1157. /// Perform an binary XOR operation with some color using a mask
  1158. /// </summary>
  1159. /// <param name="val">The color for the XOR operation</param>
  1160. /// <param name="mask">The mask for the XOR operation</param>
  1161. /// <returns> The result of the XOR operation</returns>
  1162. public Image<TColor, TDepth> Xor(TColor val, Image<Gray, Byte> mask)
  1163. {
  1164. Image<TColor, TDepth> res = CopyBlank();
  1165. using (ScalarArray ia = new ScalarArray(val.MCvScalar))
  1166. {
  1167. CvInvoke.BitwiseXor(this, ia, res, mask);
  1168. }
  1169. return res;
  1170. }
  1171. #endregion
  1172. /// <summary>
  1173. /// Compute the complement image
  1174. /// </summary>
  1175. /// <returns> The complement image</returns>
  1176. public Image<TColor, TDepth> Not()
  1177. {
  1178. Image<TColor, TDepth> res = CopyBlank();
  1179. CvInvoke.BitwiseNot(this, res, null);
  1180. return res;
  1181. }
  1182. #endregion
  1183. #region Comparison
  1184. /// <summary> Find the elementwise maximum value </summary>
  1185. /// <param name="img2">The second image for the Max operation</param>
  1186. /// <returns> An image where each pixel is the maximum of <i>this</i> image and the parameter image</returns>
  1187. public Image<TColor, TDepth> Max(Image<TColor, TDepth> img2)
  1188. {
  1189. Image<TColor, TDepth> res = CopyBlank();
  1190. CvInvoke.Max(this, img2, res);
  1191. return res;
  1192. }
  1193. /// <summary> Find the elementwise maximum value </summary>
  1194. /// <param name="value">The value to compare with</param>
  1195. /// <returns> An image where each pixel is the maximum of <i>this</i> image and <paramref name="value"/></returns>
  1196. public Image<TColor, TDepth> Max(double value)
  1197. {
  1198. Image<TColor, TDepth> res = CopyBlank();
  1199. using (ScalarArray ia = new ScalarArray(value))
  1200. {
  1201. CvInvoke.Max(this, ia, res);
  1202. }
  1203. return res;
  1204. }
  1205. /// <summary> Find the elementwise minimum value </summary>
  1206. /// <param name="img2">The second image for the Min operation</param>
  1207. /// <returns> An image where each pixel is the minimum of <i>this</i> image and the parameter image</returns>
  1208. public Image<TColor, TDepth> Min(Image<TColor, TDepth> img2)
  1209. {
  1210. Image<TColor, TDepth> res = CopyBlank();
  1211. CvInvoke.Min(this, img2, res);
  1212. return res;
  1213. }
  1214. /// <summary> Find the elementwise minimum value </summary>
  1215. /// <param name="value">The value to compare with</param>
  1216. /// <returns> An image where each pixel is the minimum of <i>this</i> image and <paramref name="value"/></returns>
  1217. public Image<TColor, TDepth> Min(double value)
  1218. {
  1219. Image<TColor, TDepth> res = CopyBlank();
  1220. using (ScalarArray ia = new ScalarArray(value))
  1221. {
  1222. CvInvoke.Min(this, ia, res);
  1223. }
  1224. return res;
  1225. }
  1226. /// <summary>Checks that image elements lie between two scalars</summary>
  1227. /// <param name="lower"> The inclusive lower limit of color value</param>
  1228. /// <param name="higher"> The inclusive upper limit of color value</param>
  1229. /// <returns> res[i,j] = 255 if <paramref name="lower"/> &lt;= this[i,j] &lt;= <paramref name="higher"/>, 0 otherwise</returns>
  1230. [ExposableMethod(Exposable = true, Category = "Logic")]
  1231. public Image<Gray, Byte> InRange(TColor lower, TColor higher)
  1232. {
  1233. Image<Gray, Byte> res = new Image<Gray, Byte>(Size);
  1234. using (ScalarArray ialower = new ScalarArray(lower.MCvScalar))
  1235. using (ScalarArray iaupper = new ScalarArray(higher.MCvScalar))
  1236. CvInvoke.InRange(this, ialower, iaupper, res);
  1237. return res;
  1238. }
  1239. /// <summary>Checks that image elements lie between values defined by two images of same size and type</summary>
  1240. /// <param name="lower"> The inclusive lower limit of color value</param>
  1241. /// <param name="higher"> The inclusive upper limit of color value</param>
  1242. /// <returns> res[i,j] = 255 if <paramref name="lower"/>[i,j] &lt;= this[i,j] &lt;= <paramref name="higher"/>[i,j], 0 otherwise</returns>
  1243. public Image<Gray, Byte> InRange(Image<TColor, TDepth> lower, Image<TColor, TDepth> higher)
  1244. {
  1245. Image<Gray, Byte> res = new Image<Gray, Byte>(Size);
  1246. CvInvoke.InRange(this, lower, higher, res);
  1247. return res;
  1248. }
  1249. /// <summary>
  1250. /// Compare the current image with <paramref name="img2"/> and returns the comparison mask
  1251. /// </summary>
  1252. /// <param name="img2">The other image to compare with</param>
  1253. /// <param name="cmpType">The comparison type</param>
  1254. /// <returns>The result of the comparison as a mask</returns>
  1255. public Image<TColor, Byte> Cmp(Image<TColor, TDepth> img2, CvEnum.CmpType cmpType)
  1256. {
  1257. Size size = Size;
  1258. Image<TColor, Byte> res = new Image<TColor, byte>(size);
  1259. if (NumberOfChannels == 1)
  1260. {
  1261. CvInvoke.Compare(this, img2, res, cmpType);
  1262. }
  1263. else
  1264. {
  1265. using (Image<Gray, TDepth> src1 = new Image<Gray, TDepth>(size))
  1266. using (Image<Gray, TDepth> src2 = new Image<Gray, TDepth>(size))
  1267. using (Image<Gray, Byte> dest = new Image<Gray, Byte>(size))
  1268. for (int i = 0; i < NumberOfChannels; i++)
  1269. {
  1270. CvInvoke.cvSetImageCOI(Ptr, i + 1);
  1271. CvInvoke.cvSetImageCOI(img2.Ptr, i + 1);
  1272. CvInvoke.cvCopy(Ptr, src1.Ptr, IntPtr.Zero);
  1273. CvInvoke.cvCopy(img2.Ptr, src2.Ptr, IntPtr.Zero);
  1274. CvInvoke.Compare(src1, src2, dest, cmpType);
  1275. CvInvoke.cvSetImageCOI(res.Ptr, i + 1);
  1276. CvInvoke.cvCopy(dest.Ptr, res.Ptr, IntPtr.Zero);
  1277. }
  1278. CvInvoke.cvSetImageCOI(Ptr, 0);
  1279. CvInvoke.cvSetImageCOI(img2.Ptr, 0);
  1280. CvInvoke.cvSetImageCOI(res.Ptr, 0);
  1281. }
  1282. return res;
  1283. }
  1284. /// <summary>
  1285. /// Compare the current image with <paramref name="value"/> and returns the comparison mask
  1286. /// </summary>
  1287. /// <param name="value">The value to compare with</param>
  1288. /// <param name="comparisonType">The comparison type</param>
  1289. /// <returns>The result of the comparison as a mask</returns>
  1290. [ExposableMethod(Exposable = true, Category = "Logic")]
  1291. public Image<TColor, Byte> Cmp(double value, CvEnum.CmpType comparisonType)
  1292. {
  1293. Size size = Size;
  1294. Image<TColor, Byte> res = new Image<TColor, byte>(size);
  1295. using (ScalarArray ia = new ScalarArray(value))
  1296. {
  1297. if (NumberOfChannels == 1)
  1298. {
  1299. CvInvoke.Compare(this, ia, res, comparisonType);
  1300. }
  1301. else
  1302. {
  1303. this.ForEachDuplicateChannel<Byte>(
  1304. delegate (IInputArray img1, IOutputArray img2, int channel)
  1305. {
  1306. CvInvoke.Compare(img1, ia, img2, comparisonType);
  1307. },
  1308. res);
  1309. }
  1310. }
  1311. return res;
  1312. }
  1313. /// <summary>
  1314. /// Compare two images, returns true if the each of the pixels are equal, false otherwise
  1315. /// </summary>
  1316. /// <param name="img2">The other image to compare with</param>
  1317. /// <returns>true if the each of the pixels for the two images are equal, false otherwise</returns>
  1318. public bool Equals(Image<TColor, TDepth> img2)
  1319. {
  1320. //true if the references are equal
  1321. if (Object.ReferenceEquals(this, img2)) return true;
  1322. //false if size are not equal
  1323. if (!Size.Equals(img2.Size)) return false;
  1324. using (Image<TColor, TDepth> neqMask = new Image<TColor, TDepth>(Size))
  1325. {
  1326. CvInvoke.BitwiseXor(this, img2, neqMask, null);
  1327. if (NumberOfChannels == 1)
  1328. return CvInvoke.CountNonZero(neqMask) == 0;
  1329. else
  1330. {
  1331. IntPtr singleChannel = Marshal.AllocHGlobal(StructSize.MCvMat);
  1332. try
  1333. {
  1334. CvInvoke.cvReshape(neqMask, singleChannel, 1, 0);
  1335. using (Mat m = CvInvoke.CvArrToMat(singleChannel))
  1336. {
  1337. return CvInvoke.CountNonZero(m) == 0;
  1338. }
  1339. }
  1340. finally
  1341. {
  1342. Marshal.FreeHGlobal(singleChannel);
  1343. }
  1344. }
  1345. }
  1346. }
  1347. #endregion
  1348. #region Segmentation
  1349. /// <summary>
  1350. /// Use grabcut to perform background foreground segmentation.
  1351. /// </summary>
  1352. /// <param name="rect">The initial rectangle region for the foreground</param>
  1353. /// <param name="iteration">The number of iterations to run GrabCut</param>
  1354. /// <returns>The background foreground mask where 2 indicates background and 3 indicates foreground</returns>
  1355. public Image<Gray, Byte> GrabCut(Rectangle rect, int iteration)
  1356. {
  1357. Image<Gray, Byte> mask = new Image<Gray, byte>(Size);
  1358. using (Matrix<double> bgdModel = new Matrix<double>(1, 13 * 5))
  1359. using (Matrix<double> fgdModel = new Matrix<double>(1, 13 * 5))
  1360. {
  1361. CvInvoke.GrabCut(this, mask, rect, bgdModel, fgdModel, 0, Emgu.CV.CvEnum.GrabcutInitType.InitWithRect);
  1362. CvInvoke.GrabCut(this, mask, rect, bgdModel, fgdModel, iteration, Emgu.CV.CvEnum.GrabcutInitType.Eval);
  1363. }
  1364. return mask;
  1365. }
  1366. #endregion
  1367. #region Arithmatic
  1368. #region Subtraction methods
  1369. /// <summary> Elementwise subtract another image from the current image </summary>
  1370. /// <param name="img2">The second image to be subtracted from the current image</param>
  1371. /// <returns> The result of elementwise subtracting img2 from the current image</returns>
  1372. public Image<TColor, TDepth> Sub(Image<TColor, TDepth> img2)
  1373. {
  1374. Image<TColor, TDepth> res = CopyBlank();
  1375. CvInvoke.Subtract(this, img2, res, null, CvInvoke.GetDepthType(typeof(TDepth)));
  1376. return res;
  1377. }
  1378. /// <summary> Elementwise subtract another image from the current image, using a mask</summary>
  1379. /// <param name="img2">The image to be subtracted from the current image</param>
  1380. /// <param name="mask">The mask for the subtract operation</param>
  1381. /// <returns> The result of elementwise subtracting img2 from the current image, using the specific mask</returns>
  1382. public Image<TColor, TDepth> Sub(Image<TColor, TDepth> img2, Image<Gray, Byte> mask)
  1383. {
  1384. Image<TColor, TDepth> res = CopyBlank();
  1385. CvInvoke.Subtract(this, img2, res, mask, CvInvoke.GetDepthType(typeof(TDepth)));
  1386. return res;
  1387. }
  1388. /// <summary> Elementwise subtract a color from the current image</summary>
  1389. /// <param name="val">The color value to be subtracted from the current image</param>
  1390. /// <returns> The result of elementwise subtracting color 'val' from the current image</returns>
  1391. [ExposableMethod(Exposable = true, Category = "Math")]
  1392. public Image<TColor, TDepth> Sub(TColor val)
  1393. {
  1394. Image<TColor, TDepth> res = CopyBlank();
  1395. using (ScalarArray ia = new ScalarArray(val.MCvScalar))
  1396. {
  1397. CvInvoke.Subtract(this, ia, res, null, CvInvoke.GetDepthType(typeof(TDepth)));
  1398. }
  1399. return res;
  1400. }
  1401. /// <summary>
  1402. /// result = val - this
  1403. /// </summary>
  1404. /// <param name="val">the value which subtract this image</param>
  1405. /// <returns>val - this</returns>
  1406. [ExposableMethod(Exposable = true, Category = "Math")]
  1407. public Image<TColor, TDepth> SubR(TColor val)
  1408. {
  1409. return SubR(val, null);
  1410. }
  1411. /// <summary>
  1412. /// result = val - this, using a mask
  1413. /// </summary>
  1414. /// <param name="val">The value which subtract this image</param>
  1415. /// <param name="mask">The mask for subtraction</param>
  1416. /// <returns>val - this, with mask</returns>
  1417. public Image<TColor, TDepth> SubR(TColor val, Image<Gray, Byte> mask)
  1418. {
  1419. Image<TColor, TDepth> res = CopyBlank();
  1420. using (ScalarArray ia = new ScalarArray(val.MCvScalar))
  1421. {
  1422. CvInvoke.Subtract(ia, this, res, mask, CvInvoke.GetDepthType(typeof(TDepth)));
  1423. }
  1424. return res;
  1425. }
  1426. #endregion
  1427. #region Addition methods
  1428. /// <summary> Elementwise add another image with the current image </summary>
  1429. /// <param name="img2">The image to be added to the current image</param>
  1430. /// <returns> The result of elementwise adding img2 to the current image</returns>
  1431. public Image<TColor, TDepth> Add(Image<TColor, TDepth> img2)
  1432. {
  1433. return Add(img2, null);
  1434. }
  1435. /// <summary> Elementwise add <paramref name="img2"/> with the current image, using a mask</summary>
  1436. /// <param name="img2">The image to be added to the current image</param>
  1437. /// <param name="mask">The mask for the add operation</param>
  1438. /// <returns> The result of elementwise adding img2 to the current image, using the specific mask</returns>
  1439. public Image<TColor, TDepth> Add(Image<TColor, TDepth> img2, Image<Gray, Byte> mask)
  1440. {
  1441. Image<TColor, TDepth> res = CopyBlank();
  1442. CvInvoke.Add(this, img2, res, mask, CvInvoke.GetDepthType(typeof(TDepth)));
  1443. return res;
  1444. }
  1445. /// <summary> Elementwise add a color <paramref name="val"/> to the current image</summary>
  1446. /// <param name="val">The color value to be added to the current image</param>
  1447. /// <returns> The result of elementwise adding color <paramref name="val"/> from the current image</returns>
  1448. [ExposableMethod(Exposable = true, Category = "Math")]
  1449. public Image<TColor, TDepth> Add(TColor val)
  1450. {
  1451. Image<TColor, TDepth> res = CopyBlank();
  1452. using (ScalarArray ia = new ScalarArray(val.MCvScalar))
  1453. {
  1454. CvInvoke.Add(this, ia, res, null, CvInvoke.GetDepthType(typeof(TDepth)));
  1455. }
  1456. return res;
  1457. }
  1458. #endregion
  1459. #region Multiplication methods
  1460. /// <summary> Elementwise multiply another image with the current image and the <paramref name="scale"/></summary>
  1461. /// <param name="img2">The image to be elementwise multiplied to the current image</param>
  1462. /// <param name="scale">The scale to be multiplied</param>
  1463. /// <returns> this .* img2 * scale </returns>
  1464. public Image<TColor, TDepth> Mul(Image<TColor, TDepth> img2, double scale)
  1465. {
  1466. Image<TColor, TDepth> res = CopyBlank();
  1467. CvInvoke.Multiply(this, img2, res, scale, CvInvoke.GetDepthType(typeof(TDepth)));
  1468. return res;
  1469. }
  1470. /// <summary> Elementwise multiply <paramref name="img2"/> with the current image</summary>
  1471. /// <param name="img2">The image to be elementwise multiplied to the current image</param>
  1472. /// <returns> this .* img2 </returns>
  1473. public Image<TColor, TDepth> Mul(Image<TColor, TDepth> img2)
  1474. {
  1475. return Mul(img2, 1.0);
  1476. }
  1477. /// <summary> Elementwise multiply the current image with <paramref name="scale"/></summary>
  1478. /// <param name="scale">The scale to be multiplied</param>
  1479. /// <returns> The scaled image </returns>
  1480. [ExposableMethod(Exposable = true, Category = "Math")]
  1481. public Image<TColor, TDepth> Mul(double scale)
  1482. {
  1483. Image<TColor, TDepth> res = CopyBlank();
  1484. CvInvoke.cvConvertScale(Ptr, res.Ptr, scale, 0.0);
  1485. return res;
  1486. }
  1487. #endregion
  1488. /// <summary>
  1489. /// Accumulate <paramref name="img2"/> to the current image using the specific mask
  1490. /// </summary>
  1491. /// <param name="img2">The image to be added to the current image</param>
  1492. /// <param name="mask">the mask</param>
  1493. public void Accumulate(Image<TColor, TDepth> img2, Image<Gray, Byte> mask)
  1494. {
  1495. CvInvoke.Accumulate(img2, this, mask);
  1496. }
  1497. /// <summary>
  1498. /// Accumulate <paramref name="img2"/> to the current image using the specific mask
  1499. /// </summary>
  1500. /// <param name="img2">The image to be added to the current image</param>
  1501. public void Accumulate(Image<TColor, TDepth> img2)
  1502. {
  1503. CvInvoke.Accumulate(img2, this, null);
  1504. }
  1505. /// <summary>
  1506. /// Return the weighted sum such that: res = this * alpha + img2 * beta + gamma
  1507. /// </summary>
  1508. /// <param name="img2">img2 in: res = this * alpha + img2 * beta + gamma </param>
  1509. /// <param name="alpha">alpha in: res = this * alpha + img2 * beta + gamma</param>
  1510. /// <param name="beta">beta in: res = this * alpha + img2 * beta + gamma</param>
  1511. /// <param name="gamma">gamma in: res = this * alpha + img2 * beta + gamma</param>
  1512. /// <returns>this * alpha + img2 * beta + gamma</returns>
  1513. public Image<TColor, TDepth> AddWeighted(Image<TColor, TDepth> img2, double alpha, double beta, double gamma)
  1514. {
  1515. Image<TColor, TDepth> res = CopyBlank();
  1516. CvInvoke.AddWeighted(this, alpha, img2, beta, gamma, res, CvInvoke.GetDepthType(typeof(TDepth)));
  1517. return res;
  1518. }
  1519. /// <summary>
  1520. /// Update Running Average. <i>this</i> = (1-alpha)*<i>this</i> + alpha*img
  1521. /// </summary>
  1522. /// <param name="img">Input image, 1- or 3-channel, Byte or Single (each channel of multi-channel image is processed independently). </param>
  1523. /// <param name="alpha">the weight of <paramref name="img"/></param>
  1524. public void AccumulateWeighted(Image<TColor, TDepth> img, double alpha)
  1525. {
  1526. AccumulateWeighted(img, alpha, null);
  1527. }
  1528. /// <summary>
  1529. /// Update Running Average. <i>this</i> = (1-alpha)*<i>this</i> + alpha*img, using the mask
  1530. /// </summary>
  1531. /// <param name="img">Input image, 1- or 3-channel, Byte or Single (each channel of multi-channel image is processed independently). </param>
  1532. /// <param name="alpha">The weight of <paramref name="img"/></param>
  1533. /// <param name="mask">The mask for the running average</param>
  1534. public void AccumulateWeighted(Image<TColor, TDepth> img, double alpha, Image<Gray, Byte> mask)
  1535. {
  1536. CvInvoke.AccumulateWeighted(img, this, alpha, mask);
  1537. }
  1538. /// <summary>
  1539. /// Computes absolute different between <i>this</i> image and the other image
  1540. /// </summary>
  1541. /// <param name="img2">The other image to compute absolute different with</param>
  1542. /// <returns> The image that contains the absolute different value</returns>
  1543. public Image<TColor, TDepth> AbsDiff(Image<TColor, TDepth> img2)
  1544. {
  1545. Image<TColor, TDepth> res = CopyBlank();
  1546. CvInvoke.AbsDiff(this, img2, res);
  1547. return res;
  1548. }
  1549. /// <summary>
  1550. /// Computes absolute different between <i>this</i> image and the specific color
  1551. /// </summary>
  1552. /// <param name="color">The color to compute absolute different with</param>
  1553. /// <returns> The image that contains the absolute different value</returns>
  1554. [ExposableMethod(Exposable = true, Category = "Math")]
  1555. public Image<TColor, TDepth> AbsDiff(TColor color)
  1556. {
  1557. Image<TColor, TDepth> res = new Image<TColor, TDepth>(Size);
  1558. using (ScalarArray ia = new ScalarArray(color.MCvScalar))
  1559. {
  1560. CvInvoke.AbsDiff(this, ia, res);
  1561. }
  1562. return res;
  1563. }
  1564. #endregion
  1565. #region Math Functions
  1566. /// <summary>
  1567. /// Raises every element of input array to p
  1568. /// dst(I)=src(I)^p, if p is integer
  1569. /// dst(I)=abs(src(I))^p, otherwise
  1570. /// </summary>
  1571. /// <param name="power">The exponent of power</param>
  1572. /// <returns>The power image</returns>
  1573. [ExposableMethod(Exposable = true, Category = "Math")]
  1574. public Image<TColor, TDepth> Pow(double power)
  1575. {
  1576. Image<TColor, TDepth> res = CopyBlank();
  1577. CvInvoke.Pow(this, power, res);
  1578. return res;
  1579. }
  1580. /// <summary>
  1581. /// Calculates exponent of every element of input array:
  1582. /// dst(I)=exp(src(I))
  1583. /// </summary>
  1584. /// <remarks>Maximum relative error is ~7e-6. Currently, the function converts denormalized values to zeros on output.</remarks>
  1585. /// <returns>The exponent image</returns>
  1586. [ExposableMethod(Exposable = true, Category = "Math")]
  1587. public Image<TColor, TDepth> Exp()
  1588. {
  1589. Image<TColor, TDepth> res = CopyBlank();
  1590. CvInvoke.Exp(this, res);
  1591. return res;
  1592. }
  1593. /// <summary>
  1594. /// Calculates natural logarithm of absolute value of every element of input array
  1595. /// </summary>
  1596. /// <returns>Natural logarithm of absolute value of every element of input array</returns>
  1597. [ExposableMethod(Exposable = true, Category = "Math")]
  1598. public Image<TColor, TDepth> Log()
  1599. {
  1600. Image<TColor, TDepth> res = CopyBlank();
  1601. CvInvoke.Log(this, res);
  1602. return res;
  1603. }
  1604. #endregion
  1605. #region Sampling, Interpolation and Geometrical Transforms
  1606. /*
  1607. ///<summary> Sample the pixel values on the specific line segment </summary>
  1608. ///<param name="line"> The line to obtain samples</param>
  1609. ///<returns>The values on the (Eight-connected) line </returns>
  1610. public TDepth[,] Sample(LineSegment2D line)
  1611. {
  1612. return Sample(line, Emgu.CV.CvEnum.Connectivity.EightConnected);
  1613. }
  1614. /// <summary>
  1615. /// Sample the pixel values on the specific line segment
  1616. /// </summary>
  1617. /// <param name="line">The line to obtain samples</param>
  1618. /// <param name="type">The sampling type</param>
  1619. /// <returns>The values on the line, the first dimension is the index of the point, the second dimension is the index of color channel</returns>
  1620. public TDepth[,] Sample(LineSegment2D line, CvEnum.Connectivity type)
  1621. {
  1622. int size = type == Emgu.CV.CvEnum.Connectivity.EightConnected ?
  1623. Math.Max(Math.Abs(line.P2.X - line.P1.X), Math.Abs(line.P2.Y - line.P1.Y))
  1624. : Math.Abs(line.P2.X - line.P1.X) + Math.Abs(line.P2.Y - line.P1.Y);
  1625. TDepth[,] data = new TDepth[size, NumberOfChannels];
  1626. GCHandle handle = GCHandle.Alloc(data, GCHandleType.Pinned);
  1627. Point p1 = line.P1;
  1628. Point p2 = line.P2;
  1629. CvInvoke.cvSampleLine(
  1630. Ptr,
  1631. ref p1,
  1632. ref p2,
  1633. handle.AddrOfPinnedObject(),
  1634. type);
  1635. handle.Free();
  1636. return data;
  1637. }*/
  1638. /// <summary>
  1639. /// Scale the image to the specific size
  1640. /// </summary>
  1641. /// <param name="width">The width of the returned image.</param>
  1642. /// <param name="height">The height of the returned image.</param>
  1643. /// <param name="interpolationType">The type of interpolation</param>
  1644. /// <returns>The resized image</returns>
  1645. [ExposableMethod(Exposable = true)]
  1646. public Image<TColor, TDepth> Resize(int width, int height, CvEnum.Inter interpolationType)
  1647. {
  1648. Image<TColor, TDepth> imgScale = new Image<TColor, TDepth>(width, height);
  1649. CvInvoke.Resize(this, imgScale, new Size(width, height), 0, 0, interpolationType);
  1650. return imgScale;
  1651. }
  1652. /// <summary>
  1653. /// Scale the image to the specific size
  1654. /// </summary>
  1655. /// <param name="width">The width of the returned image.</param>
  1656. /// <param name="height">The height of the returned image.</param>
  1657. /// <param name="interpolationType">The type of interpolation</param>
  1658. /// <param name="preserveScale">if true, the scale is preservered and the resulting image has maximum width(height) possible that is &lt;= <paramref name="width"/> (<paramref name="height"/>), if false, this function is equaivalent to Resize(int width, int height)</param>
  1659. /// <returns>The resized image</returns>
  1660. public Image<TColor, TDepth> Resize(int width, int height, CvEnum.Inter interpolationType, bool preserveScale)
  1661. {
  1662. return preserveScale ?
  1663. Resize(Math.Min((double)width / Width, (double)height / Height), interpolationType)
  1664. : Resize(width, height, interpolationType);
  1665. }
  1666. /// <summary>
  1667. /// Scale the image to the specific size: width *= scale; height *= scale
  1668. /// </summary>
  1669. /// <param name="scale">The scale to resize</param>
  1670. /// <param name="interpolationType">The type of interpolation</param>
  1671. /// <returns>The scaled image</returns>
  1672. [ExposableMethod(Exposable = true)]
  1673. public Image<TColor, TDepth> Resize(double scale, CvEnum.Inter interpolationType)
  1674. {
  1675. return Resize(
  1676. (int)(Width * scale),
  1677. (int)(Height * scale),
  1678. interpolationType);
  1679. }
  1680. /// <summary>
  1681. /// Rotate the image the specified angle cropping the result to the original size
  1682. /// </summary>
  1683. /// <param name="angle">The angle of rotation in degrees.</param>
  1684. /// <param name="background">The color with which to fill the background</param>
  1685. /// <returns>The image rotates by the specific angle</returns>
  1686. public Image<TColor, TDepth> Rotate(double angle, TColor background)
  1687. {
  1688. return Rotate(angle, background, true);
  1689. }
  1690. /// <summary>
  1691. /// Transforms source image using the specified matrix
  1692. /// </summary>
  1693. /// <param name="mapMatrix">2x3 transformation matrix</param>
  1694. /// <param name="interpolationType">Interpolation type</param>
  1695. /// <param name="warpType">Warp type</param>
  1696. /// <param name="borderMode">Pixel extrapolation method</param>
  1697. /// <param name="backgroundColor">A value used to fill outliers</param>
  1698. /// <returns>The result of the transformation</returns>
  1699. public Image<TColor, TDepth> WarpAffine(Mat mapMatrix, CvEnum.Inter interpolationType, CvEnum.Warp warpType, CvEnum.BorderType borderMode, TColor backgroundColor)
  1700. {
  1701. return WarpAffine(mapMatrix, Width, Height, interpolationType, warpType, borderMode, backgroundColor);
  1702. }
  1703. /// <summary>
  1704. /// Transforms source image using the specified matrix
  1705. /// </summary>
  1706. /// <param name="mapMatrix">2x3 transformation matrix</param>
  1707. /// <param name="width">The width of the resulting image</param>
  1708. /// <param name="height">the height of the resulting image</param>
  1709. /// <param name="interpolationType">Interpolation type</param>
  1710. /// <param name="warpType">Warp type</param>
  1711. /// <param name="borderMode">Pixel extrapolation method</param>
  1712. /// <param name="backgroundColor">A value used to fill outliers</param>
  1713. /// <returns>The result of the transformation</returns>
  1714. public Image<TColor, TDepth> WarpAffine(Mat mapMatrix, int width, int height, CvEnum.Inter interpolationType, CvEnum.Warp warpType, CvEnum.BorderType borderMode, TColor backgroundColor)
  1715. {
  1716. Image<TColor, TDepth> res = new Image<TColor, TDepth>(width, height);
  1717. CvInvoke.WarpAffine(this, res, mapMatrix, res.Size, interpolationType, warpType, borderMode, backgroundColor.MCvScalar);
  1718. return res;
  1719. }
  1720. /// <summary>
  1721. /// Transforms source image using the specified matrix
  1722. /// </summary>
  1723. /// <param name="mapMatrix">3x3 transformation matrix</param>
  1724. /// <param name="interpolationType">Interpolation type</param>
  1725. /// <param name="warpType">Warp type</param>
  1726. /// <param name="borderMode">Pixel extrapolation method</param>
  1727. /// <param name="backgroundColor">A value used to fill outliers</param>
  1728. /// <typeparam name="TMapDepth">The depth type of <paramref name="mapMatrix"/>, should be either float or double</typeparam>
  1729. /// <returns>The result of the transformation</returns>
  1730. public Image<TColor, TDepth> WarpPerspective<TMapDepth>(Matrix<TMapDepth> mapMatrix, CvEnum.Inter interpolationType, CvEnum.Warp warpType, CvEnum.BorderType borderMode, TColor backgroundColor)
  1731. where TMapDepth : new()
  1732. {
  1733. return WarpPerspective(mapMatrix, Width, Height, interpolationType, warpType, borderMode, backgroundColor);
  1734. }
  1735. /// <summary>
  1736. /// Transforms source image using the specified matrix
  1737. /// </summary>
  1738. /// <param name="mapMatrix">3x3 transformation matrix</param>
  1739. /// <param name="width">The width of the resulting image</param>
  1740. /// <param name="height">the height of the resulting image</param>
  1741. /// <param name="interpolationType">Interpolation type</param>
  1742. /// <param name="warpType">Warp type</param>
  1743. /// <param name="borderType">Border type</param>
  1744. /// <param name="backgroundColor">A value used to fill outliers</param>
  1745. /// <typeparam name="TMapDepth">The depth type of <paramref name="mapMatrix"/>, should be either float or double</typeparam>
  1746. /// <returns>The result of the transformation</returns>
  1747. public Image<TColor, TDepth> WarpPerspective<TMapDepth>(
  1748. Matrix<TMapDepth> mapMatrix,
  1749. int width, int height,
  1750. CvEnum.Inter interpolationType,
  1751. CvEnum.Warp warpType,
  1752. CvEnum.BorderType borderType,
  1753. TColor backgroundColor)
  1754. where TMapDepth : new()
  1755. {
  1756. Image<TColor, TDepth> res = new Image<TColor, TDepth>(width, height);
  1757. CvInvoke.WarpPerspective(this, res, mapMatrix, res.Size, interpolationType, warpType, borderType, backgroundColor.MCvScalar);
  1758. return res;
  1759. }
  1760. /// <summary>
  1761. /// Rotate this image the specified <paramref name="angle"/>
  1762. /// </summary>
  1763. /// <param name="angle">The angle of rotation in degrees.</param>
  1764. /// <param name="background">The color with which to fill the background</param>
  1765. /// <param name="crop">If set to true the image is cropped to its original size, possibly losing corners information. If set to false the result image has different size than original and all rotation information is preserved</param>
  1766. /// <returns>The rotated image</returns>
  1767. [ExposableMethod(Exposable = true, Category = "Transform")]
  1768. public Image<TColor, TDepth> Rotate(double angle, TColor background, bool crop)
  1769. {
  1770. Size size = Size;
  1771. PointF center = new PointF(size.Width * 0.5f, size.Height * 0.5f);
  1772. return Rotate(angle, center, CvEnum.Inter.Cubic, background, crop);
  1773. }
  1774. /// <summary>
  1775. /// Rotate this image the specified <paramref name="angle"/>
  1776. /// </summary>
  1777. /// <param name="angle">The angle of rotation in degrees. Positive means clockwise.</param>
  1778. /// <param name="background">The color with with to fill the background</param>
  1779. /// <param name="crop">If set to true the image is cropped to its original size, possibly losing corners information. If set to false the result image has different size than original and all rotation information is preserved</param>
  1780. /// <param name="center">The center of rotation</param>
  1781. /// <param name="interpolationMethod">The interpolation method</param>
  1782. /// <returns>The rotated image</returns>
  1783. public Image<TColor, TDepth> Rotate(double angle, PointF center, CvEnum.Inter interpolationMethod, TColor background, bool crop)
  1784. {
  1785. if (crop)
  1786. {
  1787. using (Mat rotationMatrix = new Mat())
  1788. {
  1789. CvInvoke.GetRotationMatrix2D(center, -angle, 1, rotationMatrix);
  1790. return WarpAffine(rotationMatrix, interpolationMethod, Emgu.CV.CvEnum.Warp.FillOutliers,
  1791. CvEnum.BorderType.Constant, background);
  1792. }
  1793. }
  1794. else
  1795. {
  1796. Size dstImgSize;
  1797. using (Mat rotationMatrix = RotationMatrix2D.CreateRotationMatrix(center, -angle, Size, out dstImgSize))
  1798. {
  1799. //CvInvoke.GetRotationMatrix2D(center, -angle, 1.0, rotationMatrix);
  1800. return WarpAffine(rotationMatrix, dstImgSize.Width, dstImgSize.Height, interpolationMethod, Emgu.CV.CvEnum.Warp.FillOutliers, CvEnum.BorderType.Constant, background);
  1801. }
  1802. }
  1803. }
  1804. /// <summary>
  1805. /// Convert the image to log polar, simulating the human foveal vision
  1806. /// </summary>
  1807. /// <param name="center">The transformation center, where the output precision is maximal</param>
  1808. /// <param name="magnitude">Magnitude scale parameter</param>
  1809. /// <param name="interpolationType">interpolation type</param>
  1810. /// <param name="warpType">Warp type</param>
  1811. /// <returns>The converted image</returns>
  1812. [ExposableMethod(Exposable = true, Category = "Transform")]
  1813. public Image<TColor, TDepth> LogPolar(
  1814. PointF center,
  1815. double magnitude,
  1816. CvEnum.Inter interpolationType = CvEnum.Inter.Linear,
  1817. CvEnum.Warp warpType = CvEnum.Warp.FillOutliers)
  1818. {
  1819. Image<TColor, TDepth> imgPolar = CopyBlank();
  1820. CvInvoke.LogPolar(this, imgPolar, center, magnitude, interpolationType, warpType);
  1821. return imgPolar;
  1822. }
  1823. #endregion
  1824. #region Image color and depth conversion
  1825. /// <summary> Convert the current image to the specific color and depth </summary>
  1826. /// <typeparam name="TOtherColor"> The type of color to be converted to </typeparam>
  1827. /// <typeparam name="TOtherDepth"> The type of pixel depth to be converted to </typeparam>
  1828. /// <returns> Image of the specific color and depth </returns>
  1829. [ExposableMethod(
  1830. Exposable = true,
  1831. Category = "Conversion",
  1832. GenericParametersOptions = new Type[] {
  1833. typeof(Bgr), typeof(Bgra), typeof(Gray), typeof(Hsv), typeof(Hls), typeof(Lab), typeof(Luv), typeof(Xyz), typeof(Ycc),
  1834. typeof(Single), typeof(Byte), typeof(Double)},
  1835. GenericParametersOptionSizes = new int[] { 9, 3 }
  1836. )]
  1837. public Image<TOtherColor, TOtherDepth> Convert<TOtherColor, TOtherDepth>()
  1838. where TOtherColor : struct, IColor
  1839. where TOtherDepth : new()
  1840. {
  1841. Image<TOtherColor, TOtherDepth> res = new Image<TOtherColor, TOtherDepth>(Size);
  1842. res.ConvertFrom(this);
  1843. return res;
  1844. }
  1845. /// <summary>
  1846. /// Convert the source image to the current image, if the size are different, the current image will be a resized version of the srcImage.
  1847. /// </summary>
  1848. /// <typeparam name="TSrcColor">The color type of the source image</typeparam>
  1849. /// <typeparam name="TSrcDepth">The color depth of the source image</typeparam>
  1850. /// <param name="srcImage">The sourceImage</param>
  1851. public void ConvertFrom<TSrcColor, TSrcDepth>(Image<TSrcColor, TSrcDepth> srcImage)
  1852. where TSrcColor : struct, IColor
  1853. where TSrcDepth : new()
  1854. {
  1855. if (!Size.Equals(srcImage.Size))
  1856. { //if the size of the source image do not match the size of the current image
  1857. using (Image<TSrcColor, TSrcDepth> tmp = new Image<TSrcColor, TSrcDepth>(this.Size))
  1858. {
  1859. CvInvoke.Resize(srcImage, tmp, this.Size);
  1860. ConvertFrom(tmp);
  1861. return;
  1862. }
  1863. }
  1864. if (typeof(TColor) == typeof(TSrcColor))
  1865. {
  1866. #region same color
  1867. if (typeof(TDepth) == typeof(TSrcDepth))
  1868. { //same depth
  1869. srcImage.Mat.CopyTo(this);
  1870. //CvInvoke.cvCopy(srcImage.Ptr, Ptr, IntPtr.Zero);
  1871. }
  1872. else
  1873. {
  1874. //different depth
  1875. //int channelCount = NumberOfChannels;
  1876. {
  1877. if (typeof(TDepth) == typeof(Byte) && typeof(TSrcDepth) != typeof(Byte))
  1878. {
  1879. double[] minVal, maxVal;
  1880. Point[] minLoc, maxLoc;
  1881. srcImage.MinMax(out minVal, out maxVal, out minLoc, out maxLoc);
  1882. double min = minVal[0];
  1883. double max = maxVal[0];
  1884. for (int i = 1; i < minVal.Length; i++)
  1885. {
  1886. min = Math.Min(min, minVal[i]);
  1887. max = Math.Max(max, maxVal[i]);
  1888. }
  1889. double scale = 1.0, shift = 0.0;
  1890. if (max > 255.0 || min < 0)
  1891. {
  1892. scale = (max.Equals(min)) ? 0.0 : 255.0 / (max - min);
  1893. shift = (scale.Equals(0)) ? min : -min * scale;
  1894. }
  1895. CvInvoke.ConvertScaleAbs(srcImage, this, scale, shift);
  1896. }
  1897. else
  1898. {
  1899. srcImage.Mat.ConvertTo(this, this.Mat.Depth, 1.0, 0.0);
  1900. //CvInvoke.cvConvertScale(srcImage, this, 1.0, 0.0);
  1901. }
  1902. }
  1903. }
  1904. #endregion
  1905. }
  1906. else
  1907. {
  1908. #region different color
  1909. if (typeof(TDepth) == typeof(TSrcDepth))
  1910. { //same depth
  1911. CvInvoke.CvtColor(srcImage, this, typeof(TSrcColor), typeof(TColor));
  1912. }
  1913. else
  1914. { //different depth
  1915. if (typeof(TSrcDepth) == typeof(Byte))
  1916. { //Do color conversion first, then depth conversion
  1917. using (Image<TColor, TSrcDepth> tmp = srcImage.Convert<TColor, TSrcDepth>())
  1918. {
  1919. this.ConvertFrom(tmp);
  1920. }
  1921. }
  1922. else
  1923. { //Do depth conversion first, then color conversion
  1924. using (Image<TSrcColor, TDepth> tmp = srcImage.Convert<TSrcColor, TDepth>()) //convert depth
  1925. //using (Mat tmp = new CV.Mat())
  1926. {
  1927. //srcImage.Mat.ConvertTo(tmp, CvInvoke.GetDepthType(typeof(TDepth),
  1928. CvInvoke.CvtColor(tmp, this, typeof(TSrcColor), typeof(TColor));
  1929. }
  1930. }
  1931. }
  1932. #endregion
  1933. }
  1934. }
  1935. /// <summary>
  1936. /// Convert the source image to the current image, if the size are different, the current image will be a resized version of the srcImage.
  1937. /// </summary>
  1938. /// <param name="srcImage">The sourceImage</param>
  1939. public void ConvertFrom(IInputArray srcImage)
  1940. {
  1941. using (InputArray iaSrcImage = srcImage.GetInputArray())
  1942. {
  1943. Size srcImageSize = iaSrcImage.GetSize();
  1944. if (!Size.Equals(srcImageSize))
  1945. {
  1946. //if the size of the source image do not match the size of the current image
  1947. using (Mat tmp = new Mat())
  1948. {
  1949. CvInvoke.Resize(srcImage, tmp, this.Size);
  1950. ConvertFrom(tmp);
  1951. return;
  1952. }
  1953. }
  1954. int srcImageNumberOfChannels = iaSrcImage.GetChannels();
  1955. if (NumberOfChannels == srcImageNumberOfChannels)
  1956. {
  1957. #region same color
  1958. DepthType srcImageDepth = iaSrcImage.GetDepth();
  1959. if (CvInvoke.GetDepthType(typeof(TDepth)) == srcImageDepth)
  1960. {
  1961. //same depth
  1962. iaSrcImage.CopyTo(this);
  1963. //srcImage.CopyTo(this);
  1964. }
  1965. else
  1966. {
  1967. //different depth
  1968. //int channelCount = NumberOfChannels;
  1969. {
  1970. if (typeof(TDepth) == typeof(Byte) && DepthType.Cv8U != CvInvoke.GetDepthType(typeof(Byte)))
  1971. {
  1972. double[] minVal, maxVal;
  1973. Point[] minLoc, maxLoc;
  1974. CvInvoke.MinMax(srcImage, out minVal, out maxVal, out minLoc, out maxLoc);
  1975. double min = minVal[0];
  1976. double max = maxVal[0];
  1977. for (int i = 1; i < minVal.Length; i++)
  1978. {
  1979. min = Math.Min(min, minVal[i]);
  1980. max = Math.Max(max, maxVal[i]);
  1981. }
  1982. double scale = 1.0, shift = 0.0;
  1983. if (max > 255.0 || min < 0)
  1984. {
  1985. scale = max.Equals(min) ? 0.0 : 255.0 / (max - min);
  1986. shift = scale.Equals(0) ? min : -min * scale;
  1987. }
  1988. CvInvoke.ConvertScaleAbs(srcImage, this, scale, shift);
  1989. }
  1990. else
  1991. {
  1992. using (Mat srcMat = iaSrcImage.GetMat())
  1993. {
  1994. srcMat.ConvertTo(this, this.Mat.Depth, 1.0, 0.0);
  1995. }
  1996. }
  1997. }
  1998. }
  1999. #endregion
  2000. }
  2001. else
  2002. {
  2003. if (!(srcImageNumberOfChannels == 1 || srcImageNumberOfChannels == 3 || srcImageNumberOfChannels == 4))
  2004. throw new Exception("Color conversion not suppported");
  2005. Type srcColorType =
  2006. srcImageNumberOfChannels == 1
  2007. ? typeof(Gray)
  2008. : srcImageNumberOfChannels == 3
  2009. ? typeof(Bgr)
  2010. : typeof(Bgra);
  2011. #region different color
  2012. DepthType srcImageDepth = iaSrcImage.GetDepth();
  2013. if (CvInvoke.GetDepthType(typeof(TDepth)) == srcImageDepth)
  2014. {
  2015. //same depth
  2016. CvInvoke.CvtColor(srcImage, this, srcColorType, typeof(TColor));
  2017. }
  2018. else
  2019. {
  2020. //different depth
  2021. //using (Image<TSrcColor, TDepth> tmp = srcImage.Convert<TSrcColor, TDepth>()) //convert depth
  2022. using (Mat tmp = new Mat())
  2023. using (Mat srcMat = iaSrcImage.GetMat())
  2024. {
  2025. srcMat.ConvertTo(tmp, CvInvoke.GetDepthType(typeof(TDepth)));
  2026. //srcImage.Mat.ConvertTo(tmp, CvInvoke.GetDepthType(typeof(TDepth),
  2027. CvInvoke.CvtColor(tmp, this, srcColorType, typeof(TColor));
  2028. }
  2029. }
  2030. #endregion
  2031. }
  2032. }
  2033. }
  2034. /// <summary> Convert the current image to the specific depth, at the same time scale and shift the values of the pixel</summary>
  2035. /// <param name="scale"> The value to be multiplied with the pixel </param>
  2036. /// <param name="shift"> The value to be added to the pixel</param>
  2037. /// <typeparam name="TOtherDepth"> The type of depth to convert to</typeparam>
  2038. /// <returns> Image of the specific depth, val = val * scale + shift </returns>
  2039. public Image<TColor, TOtherDepth> ConvertScale<TOtherDepth>(double scale, double shift)
  2040. where TOtherDepth : new()
  2041. {
  2042. Image<TColor, TOtherDepth> res = new Image<TColor, TOtherDepth>(Width, Height);
  2043. if (typeof(TOtherDepth) == typeof(Byte))
  2044. CvInvoke.ConvertScaleAbs(this, res, scale, shift);
  2045. else
  2046. CvInvoke.cvConvertScale(this, res, scale, shift);
  2047. return res;
  2048. }
  2049. #endregion
  2050. #region Pyramids
  2051. /// <summary>
  2052. /// Performs downsampling step of Gaussian pyramid decomposition.
  2053. /// First it convolves <i>this</i> image with the specified filter and then downsamples the image
  2054. /// by rejecting even rows and columns.
  2055. /// </summary>
  2056. /// <returns> The down-sampled image</returns>
  2057. [ExposableMethod(Exposable = true, Category = "Pyramids")]
  2058. public Image<TColor, TDepth> PyrDown()
  2059. {
  2060. Image<TColor, TDepth> res = new Image<TColor, TDepth>(Width >> 1, Height >> 1);
  2061. CvInvoke.PyrDown(this, res, CvEnum.BorderType.Default);
  2062. return res;
  2063. }
  2064. /// <summary>
  2065. /// Performs up-sampling step of Gaussian pyramid decomposition.
  2066. /// First it up-samples <i>this</i> image by injecting even zero rows and columns and then convolves
  2067. /// result with the specified filter multiplied by 4 for interpolation.
  2068. /// So the resulting image is four times larger than the source image.
  2069. /// </summary>
  2070. /// <returns> The up-sampled image</returns>
  2071. [ExposableMethod(Exposable = true, Category = "Pyramids")]
  2072. public Image<TColor, TDepth> PyrUp()
  2073. {
  2074. Image<TColor, TDepth> res = new Image<TColor, TDepth>(Width << 1, Height << 1);
  2075. CvInvoke.PyrUp(this, res, CvEnum.BorderType.Default);
  2076. return res;
  2077. }
  2078. /// <summary>
  2079. /// Compute the image pyramid
  2080. /// </summary>
  2081. /// <param name="maxLevel">The number of level's for the pyramid; Level 0 referes to the current image, level n is computed by calling the PyrDown() function on level n-1</param>
  2082. /// <returns>The image pyramid</returns>
  2083. public Image<TColor, TDepth>[] BuildPyramid(int maxLevel)
  2084. {
  2085. Debug.Assert(maxLevel >= 0, "The pyramid should have at lease maxLevel of 0");
  2086. Image<TColor, TDepth>[] pyr = new Image<TColor, TDepth>[maxLevel + 1];
  2087. pyr[0] = this;
  2088. for (int i = 1; i <= maxLevel; i++)
  2089. pyr[i] = pyr[i - 1].PyrDown();
  2090. return pyr;
  2091. }
  2092. #endregion
  2093. #region Special Image Transforms
  2094. /// <summary> Use inpaint to recover the intensity of the pixels which location defined by <paramref name="mask"/> on <i>this</i> image </summary>
  2095. /// <param name="mask">The inpainting mask. Non-zero pixels indicate the area that needs to be inpainted</param>
  2096. /// <param name="radius">The radius of circular neighborhood of each point inpainted that is considered by the algorithm</param>
  2097. /// <returns> The inpainted image </returns>
  2098. public Image<TColor, TDepth> InPaint(Image<Gray, Byte> mask, double radius)
  2099. {
  2100. Image<TColor, TDepth> res = CopyBlank();
  2101. CvInvoke.Inpaint(this, mask, res, radius, CvEnum.InpaintType.Telea);
  2102. return res;
  2103. }
  2104. #endregion
  2105. #region Morphological Operations
  2106. /// <summary>
  2107. /// Perform advanced morphological transformations using erosion and dilation as basic operations.
  2108. /// </summary>
  2109. /// <param name="kernel">Structuring element</param>
  2110. /// <param name="anchor">Anchor position with the kernel. Negative values mean that the anchor is at the kernel center.</param>
  2111. /// <param name="operation">Type of morphological operation</param>
  2112. /// <param name="iterations">Number of times erosion and dilation are applied</param>
  2113. /// <param name="borderType">Border type</param>
  2114. /// <param name="borderValue">Border value</param>
  2115. /// <returns>The result of the morphological operation</returns>
  2116. public Image<TColor, TDepth> MorphologyEx(CvEnum.MorphOp operation, IInputArray kernel, Point anchor, int iterations, CvEnum.BorderType borderType, MCvScalar borderValue)
  2117. {
  2118. Image<TColor, TDepth> res = CopyBlank();
  2119. CvInvoke.MorphologyEx(
  2120. this, res, operation,
  2121. kernel, anchor, iterations, borderType, borderValue);
  2122. return res;
  2123. }
  2124. /// <summary>
  2125. /// Perform inplace advanced morphological transformations using erosion and dilation as basic operations.
  2126. /// </summary>
  2127. /// <param name="kernel">Structuring element</param>
  2128. /// <param name="anchor">Anchor position with the kernel. Negative values mean that the anchor is at the kernel center.</param>
  2129. /// <param name="operation">Type of morphological operation</param>
  2130. /// <param name="iterations">Number of times erosion and dilation are applied</param>
  2131. /// <param name="borderType">Border type</param>
  2132. /// <param name="borderValue">Border value</param>
  2133. public void _MorphologyEx(CvEnum.MorphOp operation, IInputArray kernel, Point anchor, int iterations, CvEnum.BorderType borderType, MCvScalar borderValue)
  2134. {
  2135. CvInvoke.MorphologyEx(
  2136. this, this, operation,
  2137. kernel, anchor, iterations, borderType, borderValue);
  2138. }
  2139. /// <summary>
  2140. /// Erodes <i>this</i> image using a 3x3 rectangular structuring element.
  2141. /// Erosion are applied several (iterations) times
  2142. /// </summary>
  2143. /// <param name="iterations">The number of erode iterations</param>
  2144. /// <returns> The eroded image</returns>
  2145. public Image<TColor, TDepth> Erode(int iterations)
  2146. {
  2147. Image<TColor, TDepth> res = CopyBlank();
  2148. CvInvoke.Erode(this, res, null, new Point(-1, -1), iterations, CvEnum.BorderType.Constant, CvInvoke.MorphologyDefaultBorderValue);
  2149. return res;
  2150. }
  2151. /// <summary>
  2152. /// Dilates <i>this</i> image using a 3x3 rectangular structuring element.
  2153. /// Dilation are applied several (iterations) times
  2154. /// </summary>
  2155. /// <param name="iterations">The number of dilate iterations</param>
  2156. /// <returns> The dilated image</returns>
  2157. public Image<TColor, TDepth> Dilate(int iterations)
  2158. {
  2159. Image<TColor, TDepth> res = CopyBlank();
  2160. CvInvoke.Dilate(this, res, null, new Point(-1, -1), iterations, CvEnum.BorderType.Constant, CvInvoke.MorphologyDefaultBorderValue);
  2161. return res;
  2162. }
  2163. /// <summary>
  2164. /// Erodes <i>this</i> image inplace using a 3x3 rectangular structuring element.
  2165. /// Erosion are applied several (iterations) times
  2166. /// </summary>
  2167. /// <param name="iterations">The number of erode iterations</param>
  2168. [ExposableMethod(Exposable = true, Category = "Morphology")]
  2169. public void _Erode(int iterations)
  2170. {
  2171. CvInvoke.Erode(this, this, null, new Point(-1, -1), iterations, CvEnum.BorderType.Constant, CvInvoke.MorphologyDefaultBorderValue);
  2172. }
  2173. /// <summary>
  2174. /// Dilates <i>this</i> image inplace using a 3x3 rectangular structuring element.
  2175. /// Dilation are applied several (iterations) times
  2176. /// </summary>
  2177. /// <param name="iterations">The number of dilate iterations</param>
  2178. [ExposableMethod(Exposable = true, Category = "Morphology")]
  2179. public void _Dilate(int iterations)
  2180. {
  2181. CvInvoke.Dilate(this, this, null, new Point(-1, -1), iterations, CvEnum.BorderType.Constant, CvInvoke.MorphologyDefaultBorderValue);
  2182. }
  2183. #endregion
  2184. #region generic operations
  2185. /// <summary>
  2186. /// perform an generic action based on each element of the image
  2187. /// </summary>
  2188. /// <param name="action">The action to be applied to each element of the image</param>
  2189. public void Action(Action<TDepth> action)
  2190. {
  2191. int cols1 = Width * new TColor().Dimension;
  2192. int step1;
  2193. IntPtr start;
  2194. Size roiSize;
  2195. CvInvoke.cvGetRawData(Ptr, out start, out step1, out roiSize);
  2196. Int64 data1 = start.ToInt64();
  2197. int width1 = SizeOfElement * cols1;
  2198. using (PinnedArray<TDepth> row1 = new PinnedArray<TDepth>(cols1))
  2199. for (int row = 0; row < Height; row++, data1 += step1)
  2200. {
  2201. CvToolbox.Memcpy(row1.AddrOfPinnedObject(), new IntPtr(data1), width1);
  2202. foreach (TDepth v in row1.Array)
  2203. action(v);
  2204. }
  2205. }
  2206. /// <summary>
  2207. /// Perform an generic operation based on the elements of the two images
  2208. /// </summary>
  2209. /// <typeparam name="TOtherDepth">The depth of the second image</typeparam>
  2210. /// <param name="img2">The second image to perform action on</param>
  2211. /// <param name="action">An action such that the first parameter is the a single channel of a pixel from the first image, the second parameter is the corresponding channel of the correspondind pixel from the second image </param>
  2212. public void Action<TOtherDepth>(Image<TColor, TOtherDepth> img2, Action<TDepth, TOtherDepth> action)
  2213. where TOtherDepth : new()
  2214. {
  2215. Debug.Assert(Size.Equals(img2.Size));
  2216. Int64 data1;
  2217. int height1, cols1, width1, step1;
  2218. RoiParam(Ptr, out data1, out height1, out cols1, out width1, out step1);
  2219. Int64 data2;
  2220. int height2, cols2, width2, step2;
  2221. RoiParam(img2.Ptr, out data2, out height2, out cols2, out width2, out step2);
  2222. TDepth[] row1 = new TDepth[cols1];
  2223. TOtherDepth[] row2 = new TOtherDepth[cols1];
  2224. GCHandle handle1 = GCHandle.Alloc(row1, GCHandleType.Pinned);
  2225. GCHandle handle2 = GCHandle.Alloc(row2, GCHandleType.Pinned);
  2226. for (int row = 0; row < height1; row++, data1 += step1, data2 += step2)
  2227. {
  2228. CvToolbox.Memcpy(handle1.AddrOfPinnedObject(), (IntPtr)data1, width1);
  2229. CvToolbox.Memcpy(handle2.AddrOfPinnedObject(), (IntPtr)data2, width2);
  2230. for (int col = 0; col < cols1; action(row1[col], row2[col]), col++) ;
  2231. }
  2232. handle1.Free();
  2233. handle2.Free();
  2234. }
  2235. /// <summary>
  2236. /// Compute the element of a new image based on the value as well as the x and y positions of each pixel on the image
  2237. /// </summary>
  2238. /// <param name="converter">The function to be applied to the image pixels</param>
  2239. /// <typeparam name="TOtherDepth">The depth type to convert the image to.</typeparam>
  2240. /// <returns>The result image</returns>
  2241. public Image<TColor, TOtherDepth> Convert<TOtherDepth>(Func<TDepth, int, int, TOtherDepth> converter)
  2242. where TOtherDepth : new()
  2243. {
  2244. Image<TColor, TOtherDepth> res = new Image<TColor, TOtherDepth>(Width, Height);
  2245. int nchannel = MIplImage.NChannels;
  2246. Int64 data1;
  2247. int height1, cols1, width1, step1;
  2248. RoiParam(Ptr, out data1, out height1, out cols1, out width1, out step1);
  2249. Int64 data2;
  2250. int height2, cols2, width2, step2;
  2251. RoiParam(res.Ptr, out data2, out height2, out cols2, out width2, out step2);
  2252. TDepth[] row1 = new TDepth[cols1];
  2253. TOtherDepth[] row2 = new TOtherDepth[cols1];
  2254. GCHandle handle1 = GCHandle.Alloc(row1, GCHandleType.Pinned);
  2255. GCHandle handle2 = GCHandle.Alloc(row2, GCHandleType.Pinned);
  2256. for (int row = 0; row < height1; row++, data1 += step1, data2 += step2)
  2257. {
  2258. CvToolbox.Memcpy(handle1.AddrOfPinnedObject(), (IntPtr)data1, width1);
  2259. for (int col = 0; col < cols1; row2[col] = converter(row1[col], row, col / nchannel), col++) ;
  2260. CvToolbox.Memcpy((IntPtr)data2, handle2.AddrOfPinnedObject(), width2);
  2261. }
  2262. handle1.Free();
  2263. handle2.Free();
  2264. return res;
  2265. }
  2266. /// <summary> Compute the element of the new image based on element of this image</summary>
  2267. /// <typeparam name="TOtherDepth">The depth type of the result image</typeparam>
  2268. /// <param name="converter">The function to be applied to the image pixels</param>
  2269. /// <returns>The result image</returns>
  2270. public Image<TColor, TOtherDepth> Convert<TOtherDepth>(Func<TDepth, TOtherDepth> converter)
  2271. where TOtherDepth : new()
  2272. {
  2273. Image<TColor, TOtherDepth> res = new Image<TColor, TOtherDepth>(Size);
  2274. Int64 data1;
  2275. int height1, cols1, width1, step1;
  2276. RoiParam(Ptr, out data1, out height1, out cols1, out width1, out step1);
  2277. Int64 data2;
  2278. int height2, cols2, width2, step2;
  2279. RoiParam(res.Ptr, out data2, out height2, out cols2, out width2, out step2);
  2280. TDepth[] row1 = new TDepth[cols1];
  2281. TOtherDepth[] row2 = new TOtherDepth[cols1];
  2282. GCHandle handle1 = GCHandle.Alloc(row1, GCHandleType.Pinned);
  2283. GCHandle handle2 = GCHandle.Alloc(row2, GCHandleType.Pinned);
  2284. for (int row = 0; row < height1; row++, data1 += step1, data2 += step2)
  2285. {
  2286. CvToolbox.Memcpy(handle1.AddrOfPinnedObject(), (IntPtr)data1, width1);
  2287. for (int col = 0; col < cols1; row2[col] = converter(row1[col]), col++) ;
  2288. CvToolbox.Memcpy((IntPtr)data2, handle2.AddrOfPinnedObject(), width2);
  2289. }
  2290. handle1.Free();
  2291. handle2.Free();
  2292. return res;
  2293. }
  2294. /// <summary> Compute the element of the new image based on the elements of the two image</summary>
  2295. /// <typeparam name="TDepth2">The depth type of img2</typeparam>
  2296. /// <typeparam name="TDepth3">The depth type of the result image</typeparam>
  2297. /// <param name="img2">The second image</param>
  2298. /// <param name="converter">The function to be applied to the image pixels</param>
  2299. /// <returns>The result image</returns>
  2300. public Image<TColor, TDepth3> Convert<TDepth2, TDepth3>(Image<TColor, TDepth2> img2, Func<TDepth, TDepth2, TDepth3> converter)
  2301. where TDepth2 : new()
  2302. where TDepth3 : new()
  2303. {
  2304. Debug.Assert(Size.Equals(img2.Size), "Image size do not match");
  2305. Image<TColor, TDepth3> res = new Image<TColor, TDepth3>(Width, Height);
  2306. Int64 data1;
  2307. int height1, cols1, width1, step1;
  2308. RoiParam(Ptr, out data1, out height1, out cols1, out width1, out step1);
  2309. Int64 data2;
  2310. int height2, cols2, width2, step2;
  2311. RoiParam(img2.Ptr, out data2, out height2, out cols2, out width2, out step2);
  2312. Int64 data3;
  2313. int height3, cols3, width3, step3;
  2314. RoiParam(res.Ptr, out data3, out height3, out cols3, out width3, out step3);
  2315. TDepth[] row1 = new TDepth[cols1];
  2316. TDepth2[] row2 = new TDepth2[cols1];
  2317. TDepth3[] row3 = new TDepth3[cols1];
  2318. GCHandle handle1 = GCHandle.Alloc(row1, GCHandleType.Pinned);
  2319. GCHandle handle2 = GCHandle.Alloc(row2, GCHandleType.Pinned);
  2320. GCHandle handle3 = GCHandle.Alloc(row3, GCHandleType.Pinned);
  2321. for (int row = 0; row < height1; row++, data1 += step1, data2 += step2, data3 += step3)
  2322. {
  2323. CvToolbox.Memcpy(handle1.AddrOfPinnedObject(), (IntPtr)data1, width1);
  2324. CvToolbox.Memcpy(handle2.AddrOfPinnedObject(), (IntPtr)data2, width2);
  2325. for (int col = 0; col < cols1; row3[col] = converter(row1[col], row2[col]), col++) ;
  2326. CvToolbox.Memcpy((IntPtr)data3, handle3.AddrOfPinnedObject(), width3);
  2327. }
  2328. handle1.Free();
  2329. handle2.Free();
  2330. handle3.Free();
  2331. return res;
  2332. }
  2333. /// <summary> Compute the element of the new image based on the elements of the three image</summary>
  2334. /// <typeparam name="TDepth2">The depth type of img2</typeparam>
  2335. /// <typeparam name="TDepth3">The depth type of img3</typeparam>
  2336. /// <typeparam name="TDepth4">The depth type of the result image</typeparam>
  2337. /// <param name="img2">The second image</param>
  2338. /// <param name="img3">The third image</param>
  2339. /// <param name="converter">The function to be applied to the image pixels</param>
  2340. /// <returns>The result image</returns>
  2341. public Image<TColor, TDepth4> Convert<TDepth2, TDepth3, TDepth4>(Image<TColor, TDepth2> img2, Image<TColor, TDepth3> img3, Func<TDepth, TDepth2, TDepth3, TDepth4> converter)
  2342. where TDepth2 : new()
  2343. where TDepth3 : new()
  2344. where TDepth4 : new()
  2345. {
  2346. Debug.Assert(Size.Equals(img2.Size) && Size.Equals(img3.Size), "Image size do not match");
  2347. Image<TColor, TDepth4> res = new Image<TColor, TDepth4>(Width, Height);
  2348. Int64 data1;
  2349. int height1, cols1, width1, step1;
  2350. RoiParam(Ptr, out data1, out height1, out cols1, out width1, out step1);
  2351. Int64 data2;
  2352. int height2, cols2, width2, step2;
  2353. RoiParam(img2.Ptr, out data2, out height2, out cols2, out width2, out step2);
  2354. Int64 data3;
  2355. int height3, cols3, width3, step3;
  2356. RoiParam(img3.Ptr, out data3, out height3, out cols3, out width3, out step3);
  2357. Int64 data4;
  2358. int height4, cols4, width4, step4;
  2359. RoiParam(res.Ptr, out data4, out height4, out cols4, out width4, out step4);
  2360. TDepth[] row1 = new TDepth[cols1];
  2361. TDepth2[] row2 = new TDepth2[cols1];
  2362. TDepth3[] row3 = new TDepth3[cols1];
  2363. TDepth4[] row4 = new TDepth4[cols1];
  2364. GCHandle handle1 = GCHandle.Alloc(row1, GCHandleType.Pinned);
  2365. GCHandle handle2 = GCHandle.Alloc(row2, GCHandleType.Pinned);
  2366. GCHandle handle3 = GCHandle.Alloc(row3, GCHandleType.Pinned);
  2367. GCHandle handle4 = GCHandle.Alloc(row4, GCHandleType.Pinned);
  2368. for (int row = 0; row < height1; row++, data1 += step1, data2 += step2, data3 += step3, data4 += step4)
  2369. {
  2370. CvToolbox.Memcpy(handle1.AddrOfPinnedObject(), (IntPtr)data1, width1);
  2371. CvToolbox.Memcpy(handle2.AddrOfPinnedObject(), (IntPtr)data2, width2);
  2372. CvToolbox.Memcpy(handle3.AddrOfPinnedObject(), (IntPtr)data3, width3);
  2373. for (int col = 0; col < cols1; row4[col] = converter(row1[col], row2[col], row3[col]), col++) ;
  2374. CvToolbox.Memcpy((IntPtr)data4, handle4.AddrOfPinnedObject(), width4);
  2375. }
  2376. handle1.Free();
  2377. handle2.Free();
  2378. handle3.Free();
  2379. handle4.Free();
  2380. return res;
  2381. }
  2382. /// <summary> Compute the element of the new image based on the elements of the four image</summary>
  2383. /// <typeparam name="TDepth2">The depth type of img2</typeparam>
  2384. /// <typeparam name="TDepth3">The depth type of img3</typeparam>
  2385. /// <typeparam name="TDepth4">The depth type of img4</typeparam>
  2386. /// <typeparam name="TDepth5">The depth type of the result image</typeparam>
  2387. /// <param name="img2">The second image</param>
  2388. /// <param name="img3">The third image</param>
  2389. /// <param name="img4">The fourth image</param>
  2390. /// <param name="converter">The function to be applied to the image pixels</param>
  2391. /// <returns>The result image</returns>
  2392. public Image<TColor, TDepth5> Convert<TDepth2, TDepth3, TDepth4, TDepth5>(Image<TColor, TDepth2> img2, Image<TColor, TDepth3> img3, Image<TColor, TDepth4> img4, Func<TDepth, TDepth2, TDepth3, TDepth4, TDepth5> converter)
  2393. where TDepth2 : new()
  2394. where TDepth3 : new()
  2395. where TDepth4 : new()
  2396. where TDepth5 : new()
  2397. {
  2398. Debug.Assert(Size.Equals(img2.Size) && Size.Equals(img3.Size) && Size.Equals(img4.Size), "Image size do not match");
  2399. Image<TColor, TDepth5> res = new Image<TColor, TDepth5>(Width, Height);
  2400. Int64 data1;
  2401. int height1, cols1, width1, step1;
  2402. RoiParam(Ptr, out data1, out height1, out cols1, out width1, out step1);
  2403. Int64 data2;
  2404. int height2, cols2, width2, step2;
  2405. RoiParam(img2.Ptr, out data2, out height2, out cols2, out width2, out step2);
  2406. Int64 data3;
  2407. int height3, cols3, width3, step3;
  2408. RoiParam(img3.Ptr, out data3, out height3, out cols3, out width3, out step3);
  2409. Int64 data4;
  2410. int height4, cols4, width4, step4;
  2411. RoiParam(img4.Ptr, out data4, out height4, out cols4, out width4, out step4);
  2412. Int64 data5;
  2413. int height5, cols5, width5, step5;
  2414. RoiParam(res.Ptr, out data5, out height5, out cols5, out width5, out step5);
  2415. TDepth[] row1 = new TDepth[cols1];
  2416. TDepth2[] row2 = new TDepth2[cols1];
  2417. TDepth3[] row3 = new TDepth3[cols1];
  2418. TDepth4[] row4 = new TDepth4[cols1];
  2419. TDepth5[] row5 = new TDepth5[cols1];
  2420. GCHandle handle1 = GCHandle.Alloc(row1, GCHandleType.Pinned);
  2421. GCHandle handle2 = GCHandle.Alloc(row2, GCHandleType.Pinned);
  2422. GCHandle handle3 = GCHandle.Alloc(row3, GCHandleType.Pinned);
  2423. GCHandle handle4 = GCHandle.Alloc(row4, GCHandleType.Pinned);
  2424. GCHandle handle5 = GCHandle.Alloc(row5, GCHandleType.Pinned);
  2425. for (int row = 0; row < height1; row++, data1 += step1, data2 += step2, data3 += step3, data4 += step4, data5 += step5)
  2426. {
  2427. CvToolbox.Memcpy(handle1.AddrOfPinnedObject(), (IntPtr)data1, width1);
  2428. CvToolbox.Memcpy(handle2.AddrOfPinnedObject(), (IntPtr)data2, width2);
  2429. CvToolbox.Memcpy(handle3.AddrOfPinnedObject(), (IntPtr)data3, width3);
  2430. CvToolbox.Memcpy(handle4.AddrOfPinnedObject(), (IntPtr)data4, width4);
  2431. for (int col = 0; col < cols1; row5[col] = converter(row1[col], row2[col], row3[col], row4[col]), col++) ;
  2432. CvToolbox.Memcpy((IntPtr)data5, handle5.AddrOfPinnedObject(), width5);
  2433. }
  2434. handle1.Free();
  2435. handle2.Free();
  2436. handle3.Free();
  2437. handle4.Free();
  2438. handle5.Free();
  2439. return res;
  2440. }
  2441. #endregion
  2442. #region Implment UnmanagedObject
  2443. /// <summary>
  2444. /// Release all unmanaged memory associate with the image
  2445. /// </summary>
  2446. protected override void DisposeObject()
  2447. {
  2448. base.DisposeObject();
  2449. if (_ptr != IntPtr.Zero)
  2450. {
  2451. if (_imageDataReleaseMode == ImageDataReleaseMode.ReleaseHeaderOnly)
  2452. {
  2453. CvInvoke.cvReleaseImageHeader(ref _ptr);
  2454. GC.RemoveMemoryPressure(StructSize.MIplImage);
  2455. }
  2456. else //ImageDataReleaseMode.ReleaseIplImage
  2457. {
  2458. CvInvoke.cvReleaseImage(ref _ptr);
  2459. }
  2460. Debug.Assert(_ptr == IntPtr.Zero);
  2461. }
  2462. _array = null;
  2463. }
  2464. #endregion
  2465. #region Operator overload
  2466. /// <summary>
  2467. /// Perform an element wise AND operation on the two images
  2468. /// </summary>
  2469. /// <param name="img1">The first image to AND</param>
  2470. /// <param name="img2">The second image to AND</param>
  2471. /// <returns>The result of the AND operation</returns>
  2472. public static Image<TColor, TDepth> operator &(Image<TColor, TDepth> img1, Image<TColor, TDepth> img2)
  2473. {
  2474. return img1.And(img2);
  2475. }
  2476. /// <summary>
  2477. /// Perform an element wise AND operation using an images and a color
  2478. /// </summary>
  2479. /// <param name="img1">The first image to AND</param>
  2480. /// <param name="val">The color to AND</param>
  2481. /// <returns>The result of the AND operation</returns>
  2482. public static Image<TColor, TDepth> operator &(Image<TColor, TDepth> img1, double val)
  2483. {
  2484. TColor color = new TColor();
  2485. color.MCvScalar = new MCvScalar(val, val, val, val);
  2486. return img1.And(color);
  2487. }
  2488. /// <summary>
  2489. /// Perform an element wise AND operation using an images and a color
  2490. /// </summary>
  2491. /// <param name="img1">The first image to AND</param>
  2492. /// <param name="val">The color to AND</param>
  2493. /// <returns>The result of the AND operation</returns>
  2494. public static Image<TColor, TDepth> operator &(double val, Image<TColor, TDepth> img1)
  2495. {
  2496. TColor color = new TColor();
  2497. color.MCvScalar = new MCvScalar(val, val, val, val);
  2498. return img1.And(color);
  2499. }
  2500. /// <summary>
  2501. /// Perform an element wise AND operation using an images and a color
  2502. /// </summary>
  2503. /// <param name="img1">The first image to AND</param>
  2504. /// <param name="val">The color to AND</param>
  2505. /// <returns>The result of the AND operation</returns>
  2506. public static Image<TColor, TDepth> operator &(Image<TColor, TDepth> img1, TColor val)
  2507. {
  2508. return img1.And(val);
  2509. }
  2510. /// <summary>
  2511. /// Perform an element wise AND operation using an images and a color
  2512. /// </summary>
  2513. /// <param name="img1">The first image to AND</param>
  2514. /// <param name="val">The color to AND</param>
  2515. /// <returns>The result of the AND operation</returns>
  2516. public static Image<TColor, TDepth> operator &(TColor val, Image<TColor, TDepth> img1)
  2517. {
  2518. return img1.And(val);
  2519. }
  2520. /// <summary> Perform an element wise OR operation with another image and return the result</summary>
  2521. /// <param name="img1">The first image to apply bitwise OR operation</param>
  2522. /// <param name="img2">The second image to apply bitwise OR operation</param>
  2523. /// <returns> The result of the OR operation</returns>
  2524. public static Image<TColor, TDepth> operator |(Image<TColor, TDepth> img1, Image<TColor, TDepth> img2)
  2525. {
  2526. return img1.Or(img2);
  2527. }
  2528. /// <summary>
  2529. /// Perform an binary OR operation with some color
  2530. /// </summary>
  2531. /// <param name="img1">The image to OR</param>
  2532. /// <param name="val"> The color to OR</param>
  2533. /// <returns> The result of the OR operation</returns>
  2534. public static Image<TColor, TDepth> operator |(Image<TColor, TDepth> img1, double val)
  2535. {
  2536. TColor color = new TColor();
  2537. color.MCvScalar = new MCvScalar(val, val, val, val);
  2538. return img1.Or(color);
  2539. }
  2540. /// <summary>
  2541. /// Perform an binary OR operation with some color
  2542. /// </summary>
  2543. /// <param name="img1">The image to OR</param>
  2544. /// <param name="val"> The color to OR</param>
  2545. /// <returns> The result of the OR operation</returns>
  2546. public static Image<TColor, TDepth> operator |(double val, Image<TColor, TDepth> img1)
  2547. {
  2548. return img1 | val;
  2549. }
  2550. /// <summary>
  2551. /// Perform an binary OR operation with some color
  2552. /// </summary>
  2553. /// <param name="img1">The image to OR</param>
  2554. /// <param name="val"> The color to OR</param>
  2555. /// <returns> The result of the OR operation</returns>
  2556. public static Image<TColor, TDepth> operator |(Image<TColor, TDepth> img1, TColor val)
  2557. {
  2558. return img1.Or(val);
  2559. }
  2560. /// <summary>
  2561. /// Perform an binary OR operation with some color
  2562. /// </summary>
  2563. /// <param name="img1">The image to OR</param>
  2564. /// <param name="val"> The color to OR</param>
  2565. /// <returns> The result of the OR operation</returns>
  2566. public static Image<TColor, TDepth> operator |(TColor val, Image<TColor, TDepth> img1)
  2567. {
  2568. return img1.Or(val);
  2569. }
  2570. /// <summary>Compute the complement image</summary>
  2571. /// <param name="image">The image to be inverted</param>
  2572. /// <returns>The complement image</returns>
  2573. public static Image<TColor, TDepth> operator ~(Image<TColor, TDepth> image)
  2574. {
  2575. return image.Not();
  2576. }
  2577. /// <summary>
  2578. /// Element wise add <paramref name="img1"/> with <paramref name="img2"/>
  2579. /// </summary>
  2580. /// <param name="img1">The first image to be added</param>
  2581. /// <param name="img2">The second image to be added</param>
  2582. /// <returns>The sum of the two images</returns>
  2583. public static Image<TColor, TDepth> operator +(Image<TColor, TDepth> img1, Image<TColor, TDepth> img2)
  2584. {
  2585. return img1.Add(img2);
  2586. }
  2587. /// <summary>
  2588. /// Element wise add <paramref name="img1"/> with <paramref name="val"/>
  2589. /// </summary>
  2590. /// <param name="img1">The image to be added</param>
  2591. /// <param name="val">The value to be added</param>
  2592. /// <returns>The images plus the color</returns>
  2593. public static Image<TColor, TDepth> operator +(double val, Image<TColor, TDepth> img1)
  2594. {
  2595. return img1 + val;
  2596. }
  2597. /// <summary>
  2598. /// Element wise add <paramref name="image"/> with <paramref name="value"/>
  2599. /// </summary>
  2600. /// <param name="image">The image to be added</param>
  2601. /// <param name="value">The value to be added</param>
  2602. /// <returns>The images plus the color</returns>
  2603. public static Image<TColor, TDepth> operator +(Image<TColor, TDepth> image, double value)
  2604. {
  2605. TColor color = new TColor();
  2606. color.MCvScalar = new MCvScalar(value, value, value, value);
  2607. return image.Add(color);
  2608. }
  2609. /// <summary>
  2610. /// Element wise add <paramref name="image"/> with <paramref name="value"/>
  2611. /// </summary>
  2612. /// <param name="image">The image to be added</param>
  2613. /// <param name="value">The color to be added</param>
  2614. /// <returns>The images plus the color</returns>
  2615. public static Image<TColor, TDepth> operator +(Image<TColor, TDepth> image, TColor value)
  2616. {
  2617. return image.Add(value);
  2618. }
  2619. /// <summary>
  2620. /// Element wise add <paramref name="image"/> with <paramref name="value"/>
  2621. /// </summary>
  2622. /// <param name="image">The image to be added</param>
  2623. /// <param name="value">The color to be added</param>
  2624. /// <returns>The images plus the color</returns>
  2625. public static Image<TColor, TDepth> operator +(TColor value, Image<TColor, TDepth> image)
  2626. {
  2627. return image.Add(value);
  2628. }
  2629. /// <summary>
  2630. /// Element wise subtract another image from the current image
  2631. /// </summary>
  2632. /// <param name="image1">The image to be subtracted</param>
  2633. /// <param name="image2">The second image to be subtracted from <paramref name="image1"/></param>
  2634. /// <returns> The result of element wise subtracting img2 from <paramref name="image1"/> </returns>
  2635. public static Image<TColor, TDepth> operator -(Image<TColor, TDepth> image1, Image<TColor, TDepth> image2)
  2636. {
  2637. return image1.Sub(image2);
  2638. }
  2639. /// <summary>
  2640. /// Element wise subtract another image from the current image
  2641. /// </summary>
  2642. /// <param name="image">The image to be subtracted</param>
  2643. /// <param name="value">The color to be subtracted</param>
  2644. /// <returns> The result of element wise subtracting <paramref name="value"/> from <paramref name="image"/> </returns>
  2645. public static Image<TColor, TDepth> operator -(Image<TColor, TDepth> image, TColor value)
  2646. {
  2647. return image.Sub(value);
  2648. }
  2649. /// <summary>
  2650. /// Element wise subtract another image from the current image
  2651. /// </summary>
  2652. /// <param name="image">The image to be subtracted</param>
  2653. /// <param name="value">The color to be subtracted</param>
  2654. /// <returns> <paramref name="value"/> - <paramref name="image"/> </returns>
  2655. public static Image<TColor, TDepth> operator -(TColor value, Image<TColor, TDepth> image)
  2656. {
  2657. return image.SubR(value);
  2658. }
  2659. /// <summary>
  2660. /// <paramref name="value"/> - <paramref name="image"/>
  2661. /// </summary>
  2662. /// <param name="image">The image to be subtracted</param>
  2663. /// <param name="value">The value to be subtracted</param>
  2664. /// <returns> <paramref name="value"/> - <paramref name="image"/> </returns>
  2665. public static Image<TColor, TDepth> operator -(double value, Image<TColor, TDepth> image)
  2666. {
  2667. TColor color = new TColor();
  2668. color.MCvScalar = new MCvScalar(value, value, value, value);
  2669. return image.SubR(color);
  2670. }
  2671. /// <summary>
  2672. /// Element wise subtract another image from the current image
  2673. /// </summary>
  2674. /// <param name="image">The image to be subtracted</param>
  2675. /// <param name="value">The value to be subtracted</param>
  2676. /// <returns> <paramref name="image"/> - <paramref name="value"/> </returns>
  2677. public static Image<TColor, TDepth> operator -(Image<TColor, TDepth> image, double value)
  2678. {
  2679. TColor color = new TColor();
  2680. color.MCvScalar = new MCvScalar(value, value, value, value);
  2681. return image.Sub(color);
  2682. }
  2683. /// <summary>
  2684. /// <paramref name="image"/> * <paramref name="scale"/>
  2685. /// </summary>
  2686. /// <param name="image">The image</param>
  2687. /// <param name="scale">The multiplication scale</param>
  2688. /// <returns><paramref name="image"/> * <paramref name="scale"/></returns>
  2689. public static Image<TColor, TDepth> operator *(Image<TColor, TDepth> image, double scale)
  2690. {
  2691. return image.Mul(scale);
  2692. }
  2693. /// <summary>
  2694. /// <paramref name="scale"/>*<paramref name="image"/>
  2695. /// </summary>
  2696. /// <param name="image">The image</param>
  2697. /// <param name="scale">The multiplication scale</param>
  2698. /// <returns><paramref name="scale"/>*<paramref name="image"/></returns>
  2699. public static Image<TColor, TDepth> operator *(double scale, Image<TColor, TDepth> image)
  2700. {
  2701. return image.Mul(scale);
  2702. }
  2703. /// <summary>
  2704. /// Perform the convolution with <paramref name="kernel"/> on <paramref name="image"/>
  2705. /// </summary>
  2706. /// <param name="image">The image</param>
  2707. /// <param name="kernel">The kernel</param>
  2708. /// <returns>Result of the convolution</returns>
  2709. public static Image<TColor, Single> operator *(Image<TColor, TDepth> image, ConvolutionKernelF kernel)
  2710. {
  2711. return image.Convolution(kernel);
  2712. }
  2713. /// <summary>
  2714. /// <paramref name="image"/> / <paramref name="scale"/>
  2715. /// </summary>
  2716. /// <param name="image">The image</param>
  2717. /// <param name="scale">The division scale</param>
  2718. /// <returns><paramref name="image"/> / <paramref name="scale"/></returns>
  2719. public static Image<TColor, TDepth> operator /(Image<TColor, TDepth> image, double scale)
  2720. {
  2721. return image.Mul(1.0 / scale);
  2722. }
  2723. /// <summary>
  2724. /// <paramref name="scale"/> / <paramref name="image"/>
  2725. /// </summary>
  2726. /// <param name="image">The image</param>
  2727. /// <param name="scale">The scale</param>
  2728. /// <returns><paramref name="scale"/> / <paramref name="image"/></returns>
  2729. public static Image<TColor, TDepth> operator /(double scale, Image<TColor, TDepth> image)
  2730. {
  2731. Image<TColor, TDepth> res = image.CopyBlank();
  2732. using (ScalarArray ia = new ScalarArray(scale))
  2733. {
  2734. CvInvoke.Divide(ia, image, res, 1.0, CvInvoke.GetDepthType(typeof(TDepth)));
  2735. }
  2736. return res;
  2737. }
  2738. #endregion
  2739. #region Filters
  2740. /// <summary>
  2741. /// Summation over a pixel param1 x param2 neighborhood with subsequent scaling by 1/(param1 x param2)
  2742. /// </summary>
  2743. /// <param name="width">The width of the window</param>
  2744. /// <param name="height">The height of the window</param>
  2745. /// <returns>The result of blur</returns>
  2746. public Image<TColor, TDepth> SmoothBlur(int width, int height)
  2747. {
  2748. return SmoothBlur(width, height, true);
  2749. }
  2750. /// <summary>
  2751. /// Summation over a pixel param1 x param2 neighborhood. If scale is true, the result is subsequent scaled by 1/(param1 x param2)
  2752. /// </summary>
  2753. /// <param name="width">The width of the window</param>
  2754. /// <param name="height">The height of the window</param>
  2755. /// <param name="scale">If true, the result is subsequent scaled by 1/(param1 x param2)</param>
  2756. /// <returns>The result of blur</returns>
  2757. [ExposableMethod(Exposable = true, Category = "Smoothing")]
  2758. public Image<TColor, TDepth> SmoothBlur(int width, int height, bool scale)
  2759. {
  2760. Image<TColor, TDepth> res = CopyBlank();
  2761. CvInvoke.BoxFilter(this, res, CvInvoke.GetDepthType(typeof(TDepth)), new Size(width, height), new Point(-1, -1), scale);
  2762. return res;
  2763. }
  2764. /// <summary>
  2765. /// Finding median of <paramref name="size"/>x<paramref name="size"/> neighborhood
  2766. /// </summary>
  2767. /// <param name="size">The size (width &amp; height) of the window</param>
  2768. /// <returns>The result of median smooth</returns>
  2769. [ExposableMethod(Exposable = true, Category = "Smoothing")]
  2770. public Image<TColor, TDepth> SmoothMedian(int size)
  2771. {
  2772. Image<TColor, TDepth> res = CopyBlank();
  2773. CvInvoke.MedianBlur(this, res, size);
  2774. return res;
  2775. }
  2776. /// <summary>
  2777. /// Applying bilateral 3x3 filtering
  2778. /// </summary>
  2779. /// <param name="colorSigma">Color sigma</param>
  2780. /// <param name="spaceSigma">Space sigma</param>
  2781. /// <param name="kernelSize">The size of the bilateral kernel</param>
  2782. /// <returns>The result of bilateral smooth</returns>
  2783. [ExposableMethod(Exposable = true, Category = "Smoothing")]
  2784. public Image<TColor, TDepth> SmoothBilateral(int kernelSize, int colorSigma, int spaceSigma)
  2785. {
  2786. Image<TColor, TDepth> res = CopyBlank();
  2787. CvInvoke.BilateralFilter(this, res, kernelSize, colorSigma, spaceSigma);
  2788. return res;
  2789. }
  2790. #region Gaussian Smooth
  2791. /// <summary> Perform Gaussian Smoothing in the current image and return the result </summary>
  2792. /// <param name="kernelSize"> The size of the Gaussian kernel (<paramref name="kernelSize"/> x <paramref name="kernelSize"/>)</param>
  2793. /// <returns> The smoothed image</returns>
  2794. public Image<TColor, TDepth> SmoothGaussian(int kernelSize)
  2795. {
  2796. return SmoothGaussian(kernelSize, kernelSize, 0, 0);
  2797. }
  2798. /// <summary> Perform Gaussian Smoothing in the current image and return the result </summary>
  2799. /// <param name="kernelWidth"> The width of the Gaussian kernel</param>
  2800. /// <param name="kernelHeight"> The height of the Gaussian kernel</param>
  2801. /// <param name="sigma1"> The standard deviation of the Gaussian kernel in the horizontal dimension</param>
  2802. /// <param name="sigma2"> The standard deviation of the Gaussian kernel in the vertical dimension</param>
  2803. /// <returns> The smoothed image</returns>
  2804. [ExposableMethod(Exposable = true, Category = "Smoothing")]
  2805. public Image<TColor, TDepth> SmoothGaussian(int kernelWidth, int kernelHeight, double sigma1, double sigma2)
  2806. {
  2807. Image<TColor, TDepth> res = CopyBlank();
  2808. CvInvoke.GaussianBlur(this, res, new Size(kernelWidth, kernelHeight), sigma1, sigma2);
  2809. return res;
  2810. }
  2811. /// <summary> Perform Gaussian Smoothing inplace for the current image </summary>
  2812. /// <param name="kernelSize"> The size of the Gaussian kernel (<paramref name="kernelSize"/> x <paramref name="kernelSize"/>)</param>
  2813. public void _SmoothGaussian(int kernelSize)
  2814. {
  2815. _SmoothGaussian(kernelSize, kernelSize, 0, 0);
  2816. }
  2817. /// <summary> Perform Gaussian Smoothing inplace for the current image </summary>
  2818. /// <param name="kernelWidth"> The width of the Gaussian kernel</param>
  2819. /// <param name="kernelHeight"> The height of the Gaussian kernel</param>
  2820. /// <param name="sigma1"> The standard deviation of the Gaussian kernel in the horizontal dimension</param>
  2821. /// <param name="sigma2"> The standard deviation of the Gaussian kernel in the vertical dimension</param>
  2822. public void _SmoothGaussian(int kernelWidth, int kernelHeight, double sigma1, double sigma2)
  2823. {
  2824. CvInvoke.GaussianBlur(this, this, new Size(kernelWidth, kernelHeight), sigma1, sigma2);
  2825. }
  2826. /// <summary>
  2827. /// Performs a convolution using the specific <paramref name="kernel"/>
  2828. /// </summary>
  2829. /// <param name="kernel">The convolution kernel</param>
  2830. /// <param name="delta">The optional value added to the filtered pixels before storing them in dst</param>
  2831. /// <param name="borderType">The pixel extrapolation method.</param>
  2832. /// <returns>The result of the convolution</returns>
  2833. public Image<TColor, Single> Convolution(ConvolutionKernelF kernel, double delta = 0,
  2834. Emgu.CV.CvEnum.BorderType borderType = CvEnum.BorderType.Default)
  2835. {
  2836. Image<TColor, Single> floatImage =
  2837. (typeof(TDepth) == typeof(Single)) ?
  2838. this as Image<TColor, Single>
  2839. : Convert<TColor, Single>();
  2840. try
  2841. {
  2842. Size s = Size;
  2843. Image<TColor, Single> res = new Image<TColor, Single>(s);
  2844. int numberOfChannels = NumberOfChannels;
  2845. if (numberOfChannels == 1)
  2846. CvInvoke.Filter2D(floatImage, res, kernel, kernel.Center, delta, borderType);
  2847. else
  2848. {
  2849. using (Mat m1 = new Mat(s, DepthType.Cv32F, 1))
  2850. using (Mat m2 = new Mat(s, DepthType.Cv32F, 1))
  2851. {
  2852. for (int i = 0; i < numberOfChannels; i++)
  2853. {
  2854. CvInvoke.ExtractChannel(floatImage, m1, i);
  2855. CvInvoke.Filter2D(m1, m2, kernel, kernel.Center, delta, borderType);
  2856. CvInvoke.InsertChannel(m2, res, i);
  2857. }
  2858. }
  2859. }
  2860. return res;
  2861. }
  2862. finally
  2863. {
  2864. if (!object.ReferenceEquals(floatImage, this))
  2865. floatImage.Dispose();
  2866. }
  2867. }
  2868. /// <summary>
  2869. /// Calculates integral images for the source image
  2870. /// </summary>
  2871. /// <returns>The integral image</returns>
  2872. public Image<TColor, double> Integral()
  2873. {
  2874. Image<TColor, double> sum = new Image<TColor, double>(Width + 1, Height + 1);
  2875. CvInvoke.Integral(this, sum, null, null, CvEnum.DepthType.Cv64F);
  2876. return sum;
  2877. }
  2878. /// <summary>
  2879. /// Calculates integral images for the source image
  2880. /// </summary>
  2881. /// <param name="sum">The integral image</param>
  2882. /// <param name="squareSum">The integral image for squared pixel values</param>
  2883. /// <returns>The integral image</returns>
  2884. public void Integral(out Image<TColor, double> sum, out Image<TColor, double> squareSum)
  2885. {
  2886. sum = new Image<TColor, double>(Width + 1, Height + 1);
  2887. squareSum = new Image<TColor, double>(Width + 1, Height + 1);
  2888. CvInvoke.Integral(this, sum, squareSum, null, CvEnum.DepthType.Cv64F);
  2889. }
  2890. /// <summary>
  2891. /// Calculates one or more integral images for the source image
  2892. /// </summary>
  2893. /// <param name="sum">The integral image</param>
  2894. /// <param name="squareSum">The integral image for squared pixel values</param>
  2895. /// <param name="titledSum">The integral for the image rotated by 45 degrees</param>
  2896. public void Integral(out Image<TColor, double> sum, out Image<TColor, double> squareSum, out Image<TColor, double> titledSum)
  2897. {
  2898. sum = new Image<TColor, double>(Width + 1, Height + 1);
  2899. squareSum = new Image<TColor, double>(Width + 1, Height + 1);
  2900. titledSum = new Image<TColor, double>(Width + 1, Height + 1);
  2901. CvInvoke.Integral(this, sum, squareSum, titledSum, CvEnum.DepthType.Cv64F);
  2902. }
  2903. #endregion
  2904. #region Threshold methods
  2905. /// <summary>
  2906. /// Transforms grayscale image to binary image.
  2907. /// Threshold calculated individually for each pixel.
  2908. /// For the method CV_ADAPTIVE_THRESH_MEAN_C it is a mean of <paramref name="blockSize"/> x <paramref name="blockSize"/> pixel
  2909. /// neighborhood, subtracted by param1.
  2910. /// For the method CV_ADAPTIVE_THRESH_GAUSSIAN_C it is a weighted sum (gaussian) of <paramref name="blockSize"/> x <paramref name="blockSize"/> pixel neighborhood, subtracted by param1.
  2911. /// </summary>
  2912. /// <param name="maxValue">Maximum value to use with CV_THRESH_BINARY and CV_THRESH_BINARY_INV thresholding types</param>
  2913. /// <param name="adaptiveType">Adaptive_method </param>
  2914. /// <param name="thresholdType">Thresholding type. must be one of CV_THRESH_BINARY, CV_THRESH_BINARY_INV </param>
  2915. /// <param name="blockSize">The size of a pixel neighborhood that is used to calculate a threshold value for the pixel: 3, 5, 7, ... </param>
  2916. /// <param name="param1">Constant subtracted from mean or weighted mean. It may be negative. </param>
  2917. /// <returns>The result of the adaptive threshold</returns>
  2918. [ExposableMethod(Exposable = true, Category = "Threshold")]
  2919. public Image<TColor, TDepth> ThresholdAdaptive(
  2920. TColor maxValue,
  2921. CvEnum.AdaptiveThresholdType adaptiveType,
  2922. CvEnum.ThresholdType thresholdType,
  2923. int blockSize,
  2924. TColor param1)
  2925. {
  2926. double[] max = maxValue.MCvScalar.ToArray();
  2927. double[] p1 = param1.MCvScalar.ToArray();
  2928. Image<TColor, TDepth> result = CopyBlank();
  2929. ForEachDuplicateChannel<TDepth>(
  2930. delegate (IInputArray src, IOutputArray dst, int channel)
  2931. {
  2932. CvInvoke.AdaptiveThreshold(src, dst, max[channel], adaptiveType, thresholdType, blockSize, p1[channel]);
  2933. },
  2934. result);
  2935. return result;
  2936. }
  2937. /// <summary>
  2938. /// The base threshold method shared by public threshold functions
  2939. /// </summary>
  2940. private void ThresholdBase(Image<TColor, TDepth> dest, TColor threshold, TColor maxValue, CvEnum.ThresholdType threshType)
  2941. {
  2942. double[] t = threshold.MCvScalar.ToArray();
  2943. double[] m = maxValue.MCvScalar.ToArray();
  2944. ForEachDuplicateChannel<TDepth>(
  2945. delegate (IInputArray src, IOutputArray dst, int channel)
  2946. {
  2947. CvInvoke.Threshold(src, dst, t[channel], m[channel], threshType);
  2948. },
  2949. dest);
  2950. }
  2951. /// <summary> Threshold the image such that: dst(x,y) = src(x,y), if src(x,y)&gt;threshold; 0, otherwise </summary>
  2952. /// <param name="threshold">The threshold value</param>
  2953. /// <returns> dst(x,y) = src(x,y), if src(x,y)&gt;threshold; 0, otherwise </returns>
  2954. public Image<TColor, TDepth> ThresholdToZero(TColor threshold)
  2955. {
  2956. Image<TColor, TDepth> res = CopyBlank();
  2957. ThresholdBase(res, threshold, new TColor(), CvEnum.ThresholdType.ToZero);
  2958. return res;
  2959. }
  2960. /// <summary>
  2961. /// Threshold the image such that: dst(x,y) = 0, if src(x,y)&gt;threshold; src(x,y), otherwise
  2962. /// </summary>
  2963. /// <param name="threshold">The threshold value</param>
  2964. /// <returns>The image such that: dst(x,y) = 0, if src(x,y)&gt;threshold; src(x,y), otherwise</returns>
  2965. public Image<TColor, TDepth> ThresholdToZeroInv(TColor threshold)
  2966. {
  2967. Image<TColor, TDepth> res = CopyBlank();
  2968. ThresholdBase(res, threshold, new TColor(), CvEnum.ThresholdType.ToZeroInv);
  2969. return res;
  2970. }
  2971. /// <summary>
  2972. /// Threshold the image such that: dst(x,y) = threshold, if src(x,y)&gt;threshold; src(x,y), otherwise
  2973. /// </summary>
  2974. /// <param name="threshold">The threshold value</param>
  2975. /// <returns>The image such that: dst(x,y) = threshold, if src(x,y)&gt;threshold; src(x,y), otherwise</returns>
  2976. public Image<TColor, TDepth> ThresholdTrunc(TColor threshold)
  2977. {
  2978. Image<TColor, TDepth> res = CopyBlank();
  2979. ThresholdBase(res, threshold, new TColor(), CvEnum.ThresholdType.Trunc);
  2980. return res;
  2981. }
  2982. /// <summary>
  2983. /// Threshold the image such that: dst(x,y) = max_value, if src(x,y)&gt;threshold; 0, otherwise
  2984. /// </summary>
  2985. /// <param name="threshold">The threshold value</param>
  2986. /// <param name="maxValue">The maximum value of the pixel on the result</param>
  2987. /// <returns>The image such that: dst(x,y) = max_value, if src(x,y)&gt;threshold; 0, otherwise </returns>
  2988. public Image<TColor, TDepth> ThresholdBinary(TColor threshold, TColor maxValue)
  2989. {
  2990. Image<TColor, TDepth> res = CopyBlank();
  2991. ThresholdBase(res, threshold, maxValue, CvEnum.ThresholdType.Binary);
  2992. return res;
  2993. }
  2994. /// <summary> Threshold the image such that: dst(x,y) = 0, if src(x,y)&gt;threshold; max_value, otherwise </summary>
  2995. /// <param name="threshold">The threshold value</param>
  2996. /// <param name="maxValue">The maximum value of the pixel on the result</param>
  2997. /// <returns>The image such that: dst(x,y) = 0, if src(x,y)&gt;threshold; max_value, otherwise</returns>
  2998. public Image<TColor, TDepth> ThresholdBinaryInv(TColor threshold, TColor maxValue)
  2999. {
  3000. Image<TColor, TDepth> res = CopyBlank();
  3001. ThresholdBase(res, threshold, maxValue, CvEnum.ThresholdType.BinaryInv);
  3002. return res;
  3003. }
  3004. /// <summary> Threshold the image inplace such that: dst(x,y) = src(x,y), if src(x,y)&gt;threshold; 0, otherwise </summary>
  3005. /// <param name="threshold">The threshold value</param>
  3006. [ExposableMethod(Exposable = true, Category = "Threshold")]
  3007. public void _ThresholdToZero(TColor threshold)
  3008. {
  3009. ThresholdBase(this, threshold, new TColor(), CvEnum.ThresholdType.ToZero);
  3010. }
  3011. /// <summary> Threshold the image inplace such that: dst(x,y) = 0, if src(x,y)&gt;threshold; src(x,y), otherwise </summary>
  3012. /// <param name="threshold">The threshold value</param>
  3013. [ExposableMethod(Exposable = true, Category = "Threshold")]
  3014. public void _ThresholdToZeroInv(TColor threshold)
  3015. {
  3016. ThresholdBase(this, threshold, new TColor(), CvEnum.ThresholdType.ToZeroInv);
  3017. }
  3018. /// <summary> Threshold the image inplace such that: dst(x,y) = threshold, if src(x,y)&gt;threshold; src(x,y), otherwise </summary>
  3019. /// <param name="threshold">The threshold value</param>
  3020. [ExposableMethod(Exposable = true, Category = "Threshold")]
  3021. public void _ThresholdTrunc(TColor threshold)
  3022. {
  3023. ThresholdBase(this, threshold, new TColor(), CvEnum.ThresholdType.Trunc);
  3024. }
  3025. /// <summary> Threshold the image inplace such that: dst(x,y) = max_value, if src(x,y)&gt;threshold; 0, otherwise </summary>
  3026. /// <param name="threshold">The threshold value</param>
  3027. /// <param name="maxValue">The maximum value of the pixel on the result</param>
  3028. [ExposableMethod(Exposable = true, Category = "Threshold")]
  3029. public void _ThresholdBinary(TColor threshold, TColor maxValue)
  3030. {
  3031. ThresholdBase(this, threshold, maxValue, CvEnum.ThresholdType.Binary);
  3032. }
  3033. /// <summary> Threshold the image inplace such that: dst(x,y) = 0, if src(x,y)&gt;threshold; max_value, otherwise </summary>
  3034. /// <param name="threshold">The threshold value</param>
  3035. /// <param name="maxValue">The maximum value of the pixel on the result</param>
  3036. [ExposableMethod(Exposable = true, Category = "Threshold")]
  3037. public void _ThresholdBinaryInv(TColor threshold, TColor maxValue)
  3038. {
  3039. ThresholdBase(this, threshold, maxValue, CvEnum.ThresholdType.BinaryInv);
  3040. }
  3041. #endregion
  3042. #endregion
  3043. #region Statistic
  3044. /// <summary>
  3045. /// Calculates the average value and standard deviation of array elements, independently for each channel
  3046. /// </summary>
  3047. /// <param name="average">The avg color</param>
  3048. /// <param name="sdv">The standard deviation for each channel</param>
  3049. /// <param name="mask">The operation mask</param>
  3050. public void AvgSdv(out TColor average, out MCvScalar sdv, Image<Gray, Byte> mask)
  3051. {
  3052. average = new TColor();
  3053. MCvScalar avgScalar = new MCvScalar();
  3054. sdv = new MCvScalar();
  3055. CvInvoke.MeanStdDev(this, ref avgScalar, ref sdv, mask);
  3056. average.MCvScalar = avgScalar;
  3057. }
  3058. /// <summary>
  3059. /// Calculates the average value and standard deviation of array elements, independently for each channel
  3060. /// </summary>
  3061. /// <param name="avg">The avg color</param>
  3062. /// <param name="sdv">The standard deviation for each channel</param>
  3063. public void AvgSdv(out TColor avg, out MCvScalar sdv)
  3064. {
  3065. AvgSdv(out avg, out sdv, null);
  3066. }
  3067. /// <summary>
  3068. /// Count the non Zero elements for each channel
  3069. /// </summary>
  3070. /// <returns>Count the non Zero elements for each channel</returns>
  3071. public int[] CountNonzero()
  3072. {
  3073. using (Mat m = CvInvoke.CvArrToMat(this))
  3074. {
  3075. if (NumberOfChannels == 1)
  3076. return new int[] { CvInvoke.CountNonZero(m) };
  3077. else
  3078. {
  3079. int[] result = new int[NumberOfChannels];
  3080. using (Mat tmp = new Mat())
  3081. for (int i = 0; i < result.Length; i++)
  3082. {
  3083. CvInvoke.ExtractChannel(m, tmp, i);
  3084. result[i] = CvInvoke.CountNonZero(tmp);
  3085. }
  3086. return result;
  3087. }
  3088. }
  3089. }
  3090. /// <summary>
  3091. /// Returns the min / max location and values for the image
  3092. /// </summary>
  3093. /// <param name="maxLocations">The maximum locations for each channel </param>
  3094. /// <param name="maxValues">The maximum values for each channel</param>
  3095. /// <param name="minLocations">The minimum locations for each channel</param>
  3096. /// <param name="minValues">The minimum values for each channel</param>
  3097. public void MinMax(out double[] minValues, out double[] maxValues, out Point[] minLocations, out Point[] maxLocations)
  3098. {
  3099. Mat.MinMax(out minValues, out maxValues, out minLocations, out maxLocations);
  3100. }
  3101. #endregion
  3102. #region Image Flipping
  3103. /// <summary> Return a flipped copy of the current image</summary>
  3104. /// <param name="flipType">The type of the flipping</param>
  3105. /// <returns> The flipped copy of <i>this</i> image </returns>
  3106. public Image<TColor, TDepth> Flip(CvEnum.FlipType flipType)
  3107. {
  3108. Image<TColor, TDepth> res = CopyBlank();
  3109. CvInvoke.Flip(this, res, flipType);
  3110. return res;
  3111. }
  3112. /// <summary> Inplace flip the image</summary>
  3113. /// <param name="flipType">The type of the flipping</param>
  3114. /// <returns> The flipped copy of <i>this</i> image </returns>
  3115. [ExposableMethod(Exposable = true, Category = "Transform")]
  3116. public void _Flip(CvEnum.FlipType flipType)
  3117. {
  3118. CvInvoke.Flip(
  3119. this,
  3120. this,
  3121. flipType);
  3122. }
  3123. #endregion
  3124. #region various
  3125. /// <summary>
  3126. /// Concate the current image with another image vertically.
  3127. /// </summary>
  3128. /// <param name="otherImage">The other image to concate</param>
  3129. /// <returns>A new image that is the vertical concatening of this image and <paramref name="otherImage"/></returns>
  3130. public Image<TColor, TDepth> ConcateVertical(Image<TColor, TDepth> otherImage)
  3131. {
  3132. Image<TColor, TDepth> res = new Image<TColor, TDepth>(Math.Max(Width, otherImage.Width), Height + otherImage.Height);
  3133. res.ROI = ROI;
  3134. CvInvoke.cvCopy(Ptr, res.Ptr, IntPtr.Zero);
  3135. Rectangle rect = otherImage.ROI;
  3136. rect.Y += Height;
  3137. res.ROI = rect;
  3138. CvInvoke.cvCopy(otherImage.Ptr, res.Ptr, IntPtr.Zero);
  3139. res.ROI = Rectangle.Empty;
  3140. return res;
  3141. }
  3142. /// <summary>
  3143. /// Concate the current image with another image horizontally.
  3144. /// </summary>
  3145. /// <param name="otherImage">The other image to concate</param>
  3146. /// <returns>A new image that is the horizontal concatening of this image and <paramref name="otherImage"/></returns>
  3147. public Image<TColor, TDepth> ConcateHorizontal(Image<TColor, TDepth> otherImage)
  3148. {
  3149. Image<TColor, TDepth> res = new Image<TColor, TDepth>(Width + otherImage.Width, Math.Max(Height, otherImage.Height));
  3150. res.ROI = ROI;
  3151. CvInvoke.cvCopy(Ptr, res.Ptr, IntPtr.Zero);
  3152. Rectangle rect = otherImage.ROI;
  3153. rect.X += Width;
  3154. res.ROI = rect;
  3155. CvInvoke.cvCopy(otherImage.Ptr, res.Ptr, IntPtr.Zero);
  3156. res.ROI = Rectangle.Empty;
  3157. return res;
  3158. }
  3159. /// <summary>
  3160. /// Calculates spatial and central moments up to the third order and writes them to moments. The moments may be used then to calculate gravity center of the shape, its area, main axises and various shape characteristics including 7 Hu invariants.
  3161. /// </summary>
  3162. /// <param name="binary">If the flag is true, all the zero pixel values are treated as zeroes, all the others are treated as 1's</param>
  3163. /// <returns>spatial and central moments up to the third order</returns>
  3164. public Emgu.CV.Moments GetMoments(bool binary)
  3165. {
  3166. return CvInvoke.Moments(this, binary);
  3167. }
  3168. /// <summary>
  3169. /// Gamma corrects this image inplace. The image must have a depth type of Byte.
  3170. /// </summary>
  3171. /// <param name="gamma">The gamma value</param>
  3172. [ExposableMethod(Exposable = true)]
  3173. public void _GammaCorrect(double gamma)
  3174. {
  3175. Image<TColor, Byte> img = this as Image<TColor, Byte>;
  3176. if (img == null)
  3177. throw new NotImplementedException("Gamma correction only implemented for Image of Byte as Depth");
  3178. Byte[,] gammaLUT = new Byte[256, 1];
  3179. for (int i = 0; i < 256; i++)
  3180. gammaLUT[i, 0] = (Byte)(Math.Pow(i / 255.0, gamma) * 255.0);
  3181. using (Matrix<Byte> lut = new Matrix<byte>(gammaLUT))
  3182. {
  3183. Matrix<Byte> lookupTable;
  3184. if (lut.NumberOfChannels == 1)
  3185. lookupTable = lut;
  3186. else
  3187. {
  3188. lookupTable = new Matrix<byte>(lut.Rows, lut.Cols, NumberOfChannels);
  3189. using (VectorOfMat mv = new VectorOfMat())
  3190. {
  3191. for (int i = 0; i < NumberOfChannels; i++)
  3192. mv.Push(lut.Mat);
  3193. CvInvoke.Merge(mv, lookupTable);
  3194. }
  3195. /*
  3196. CvInvoke.cvMerge(
  3197. lut.Ptr,
  3198. NumberOfChannels > 1 ? lut.Ptr : IntPtr.Zero,
  3199. NumberOfChannels > 2 ? lut.Ptr : IntPtr.Zero,
  3200. NumberOfChannels > 3 ? lut.Ptr : IntPtr.Zero,
  3201. lookupTable.Ptr);
  3202. */
  3203. }
  3204. CvInvoke.LUT(this, lookupTable, this);
  3205. if (!object.ReferenceEquals(lut, lookupTable))
  3206. lookupTable.Dispose();
  3207. }
  3208. }
  3209. /// <summary>
  3210. /// Split current Image into an array of gray scale images where each element
  3211. /// in the array represent a single color channel of the original image
  3212. /// </summary>
  3213. /// <returns>
  3214. /// An array of gray scale images where each element
  3215. /// in the array represent a single color channel of the original image
  3216. /// </returns>
  3217. public Image<Gray, TDepth>[] Split()
  3218. {
  3219. //If single channel, return a copy
  3220. if (NumberOfChannels == 1) return new Image<Gray, TDepth>[] { Copy() as Image<Gray, TDepth> };
  3221. //handle multiple channels
  3222. Image<Gray, TDepth>[] res = new Image<Gray, TDepth>[NumberOfChannels];
  3223. using (Util.VectorOfMat vm = new VectorOfMat())
  3224. {
  3225. Size size = Size;
  3226. for (int i = 0; i < NumberOfChannels; i++)
  3227. {
  3228. res[i] = new Image<Gray, TDepth>(size);
  3229. vm.Push(res[i].Mat);
  3230. }
  3231. CvInvoke.Split(this, vm);
  3232. }
  3233. return res;
  3234. }
  3235. /// <summary>
  3236. /// Save this image to the specific file.
  3237. /// </summary>
  3238. /// <param name="fileName">The name of the file to be saved to</param>
  3239. /// <remarks>The image format is chosen depending on the filename extension, see cvLoadImage. Only 8-bit single-channel or 3-channel (with 'BGR' channel order) images can be saved using this function. If the format, depth or channel order is different, use cvCvtScale and cvCvtColor to convert it before saving, or use universal cvSave to save the image to XML or YAML format.</remarks>
  3240. public override void Save(String fileName)
  3241. {
  3242. if (NumberOfChannels == 3 && typeof(TColor) != typeof(Bgr))
  3243. {
  3244. using (Mat tmp = new Mat())
  3245. {
  3246. CvInvoke.CvtColor(this, tmp, typeof(TColor), typeof(Bgr));
  3247. tmp.Save(fileName);
  3248. }
  3249. }
  3250. else if (NumberOfChannels == 4 && typeof(TColor) != typeof(Bgra))
  3251. {
  3252. using (Mat tmp = new Mat())
  3253. {
  3254. CvInvoke.CvtColor(this, tmp, typeof(TColor), typeof(Bgra));
  3255. tmp.Save(fileName);
  3256. }
  3257. }
  3258. else
  3259. {
  3260. Mat.Save(fileName);
  3261. }
  3262. }
  3263. /// <summary>
  3264. /// The algorithm inplace normalizes brightness and increases contrast of the image.
  3265. /// For color images, a HSV representation of the image is first obtained and the V (value) channel is histogram normalized
  3266. /// </summary>
  3267. [ExposableMethod(Exposable = true)]
  3268. public void _EqualizeHist()
  3269. {
  3270. if (NumberOfChannels == 1) //Gray scale image
  3271. {
  3272. CvInvoke.EqualizeHist(this, this);
  3273. }
  3274. else //Color image
  3275. {
  3276. //Get an hsv representation of this image
  3277. Image<Hsv, TDepth> hsv = this as Image<Hsv, TDepth> ?? Convert<Hsv, TDepth>();
  3278. //equalize the V (value) channel
  3279. using (Image<Gray, TDepth> v = new Image<Gray, TDepth>(Size))
  3280. {
  3281. CvInvoke.MixChannels(hsv, v, new int[] { 1, 0 });
  3282. //CvInvoke.cvSetImageCOI(hsv.Ptr, 3);
  3283. //CvInvoke.cvCopy(hsv.Ptr, v.Ptr, IntPtr.Zero);
  3284. v._EqualizeHist();
  3285. CvInvoke.MixChannels(v, hsv, new int[] { 0, 1 });
  3286. //CvInvoke.cvCopy(v.Ptr, hsv.Ptr, IntPtr.Zero);
  3287. //CvInvoke.cvSetImageCOI(hsv.Ptr, 0);
  3288. }
  3289. if (!Object.ReferenceEquals(this, hsv))
  3290. {
  3291. ConvertFrom(hsv);
  3292. hsv.Dispose();
  3293. }
  3294. }
  3295. }
  3296. #endregion
  3297. /// <summary>
  3298. /// This function load the image data from Mat
  3299. /// </summary>
  3300. /// <param name="mat">The Mat</param>
  3301. private void LoadImageFromMat(Mat mat)
  3302. {
  3303. Size size = mat.Size;
  3304. //Allocate data in managed memory
  3305. AllocateData(size.Height, size.Width, NumberOfChannels);
  3306. switch (mat.NumberOfChannels)
  3307. {
  3308. case 1:
  3309. //Grayscale image;
  3310. switch (mat.Depth)
  3311. {
  3312. case CvEnum.DepthType.Cv8U:
  3313. using (Image<Gray, Byte> tmp = new Image<Gray, byte>(size.Width, size.Height, mat.Step, mat.DataPointer))
  3314. ConvertFrom(tmp);
  3315. break;
  3316. case CvEnum.DepthType.Cv16U:
  3317. using (Image<Gray, UInt16> tmp = new Image<Gray, ushort>(size.Width, size.Height, mat.Step, mat.DataPointer))
  3318. ConvertFrom(tmp);
  3319. break;
  3320. case CvEnum.DepthType.Cv32F:
  3321. using (Image<Gray, float> tmp = new Image<Gray, float>(size.Width, size.Height, mat.Step, mat.DataPointer))
  3322. ConvertFrom(tmp);
  3323. break;
  3324. case CvEnum.DepthType.Cv64F:
  3325. using (Image<Gray, double> tmp = new Image<Gray, double>(size.Width, size.Height, mat.Step, mat.DataPointer))
  3326. ConvertFrom(tmp);
  3327. break;
  3328. default:
  3329. throw new NotImplementedException(String.Format("Loading of {0}, {1} channel image is not implemented.", mat.Depth, mat.NumberOfChannels));
  3330. }
  3331. break;
  3332. case 3:
  3333. //BGR image
  3334. switch (mat.Depth)
  3335. {
  3336. case CvEnum.DepthType.Cv8U:
  3337. using (Image<Bgr, Byte> tmp = new Image<Bgr, byte>(size.Width, size.Height, mat.Step, mat.DataPointer))
  3338. ConvertFrom(tmp);
  3339. break;
  3340. case CvEnum.DepthType.Cv16U:
  3341. using (Image<Bgr, UInt16> tmp = new Image<Bgr, ushort>(size.Width, size.Height, mat.Step, mat.DataPointer))
  3342. ConvertFrom(tmp);
  3343. break;
  3344. case CvEnum.DepthType.Cv32F:
  3345. using (Image<Bgr, float> tmp = new Image<Bgr, float>(size.Width, size.Height, mat.Step, mat.DataPointer))
  3346. ConvertFrom(tmp);
  3347. break;
  3348. case CvEnum.DepthType.Cv64F:
  3349. using (Image<Bgr, double> tmp = new Image<Bgr, double>(size.Width, size.Height, mat.Step, mat.DataPointer))
  3350. ConvertFrom(tmp);
  3351. break;
  3352. default:
  3353. throw new NotImplementedException(String.Format("Loading of {0}, {1} channel image is not implemented.", mat.Depth, mat.NumberOfChannels));
  3354. }
  3355. break;
  3356. default:
  3357. throw new NotImplementedException(String.Format("Loading of {0}, {1} channel image is not implemented.", mat.Depth, mat.NumberOfChannels));
  3358. }
  3359. }
  3360. /// <summary>
  3361. /// This function load the image data from the iplImage pointer
  3362. /// </summary>
  3363. /// <param name="iplImage">The pointer to the iplImage</param>
  3364. private void LoadImageFromIplImagePtr(IntPtr iplImage)
  3365. {
  3366. MIplImage mptr = (MIplImage)Marshal.PtrToStructure(iplImage, typeof(MIplImage));
  3367. Size size = new Size(mptr.Width, mptr.Height);
  3368. //Allocate data in managed memory
  3369. AllocateData(size.Height, size.Width, NumberOfChannels);
  3370. if (mptr.NChannels == 1)
  3371. { //Grayscale image;
  3372. switch (mptr.Depth)
  3373. {
  3374. case CvEnum.IplDepth.IplDepth_8U:
  3375. using (Image<Gray, Byte> tmp = new Image<Gray, byte>(mptr.Width, mptr.Height, mptr.WidthStep, mptr.ImageData))
  3376. ConvertFrom(tmp);
  3377. break;
  3378. case CvEnum.IplDepth.IplDepth16U:
  3379. using (Image<Gray, UInt16> tmp = new Image<Gray, ushort>(mptr.Width, mptr.Height, mptr.WidthStep, mptr.ImageData))
  3380. ConvertFrom(tmp);
  3381. break;
  3382. case CvEnum.IplDepth.IplDepth32F:
  3383. using (Image<Gray, float> tmp = new Image<Gray, float>(mptr.Width, mptr.Height, mptr.WidthStep, mptr.ImageData))
  3384. ConvertFrom(tmp);
  3385. break;
  3386. case CvEnum.IplDepth.IplDepth64F:
  3387. using (Image<Gray, double> tmp = new Image<Gray, double>(mptr.Width, mptr.Height, mptr.WidthStep, mptr.ImageData))
  3388. ConvertFrom(tmp);
  3389. break;
  3390. default:
  3391. throw new NotImplementedException(String.Format("Loading of {0}, {1} channel image is not implemented.", mptr.Depth, mptr.NChannels));
  3392. }
  3393. }
  3394. else if (mptr.NChannels == 3)
  3395. { //BGR image
  3396. switch (mptr.Depth)
  3397. {
  3398. case CvEnum.IplDepth.IplDepth_8U:
  3399. using (Image<Bgr, Byte> tmp = new Image<Bgr, byte>(mptr.Width, mptr.Height, mptr.WidthStep, mptr.ImageData))
  3400. ConvertFrom(tmp);
  3401. break;
  3402. case CvEnum.IplDepth.IplDepth16U:
  3403. using (Image<Bgr, UInt16> tmp = new Image<Bgr, ushort>(mptr.Width, mptr.Height, mptr.WidthStep, mptr.ImageData))
  3404. ConvertFrom(tmp);
  3405. break;
  3406. case CvEnum.IplDepth.IplDepth32F:
  3407. using (Image<Bgr, float> tmp = new Image<Bgr, float>(mptr.Width, mptr.Height, mptr.WidthStep, mptr.ImageData))
  3408. ConvertFrom(tmp);
  3409. break;
  3410. case CvEnum.IplDepth.IplDepth64F:
  3411. using (Image<Bgr, double> tmp = new Image<Bgr, double>(mptr.Width, mptr.Height, mptr.WidthStep, mptr.ImageData))
  3412. ConvertFrom(tmp);
  3413. break;
  3414. default:
  3415. throw new NotImplementedException(String.Format("Loading of {0}, {1} channel image is not implemented.", mptr.Depth, mptr.NChannels));
  3416. }
  3417. }
  3418. else
  3419. {
  3420. throw new NotImplementedException(String.Format("Loading of {0}, {1} channel image is not implemented.", mptr.Depth, mptr.NChannels));
  3421. }
  3422. }
  3423. /// <summary>
  3424. /// Get the managed image from an unmanaged IplImagePointer
  3425. /// </summary>
  3426. /// <param name="iplImage">The pointer to the iplImage</param>
  3427. /// <returns>The managed image from the iplImage pointer</returns>
  3428. public static Image<TColor, TDepth> FromIplImagePtr(IntPtr iplImage)
  3429. {
  3430. Image<TColor, TDepth> result = new Image<TColor, TDepth>();
  3431. result.LoadImageFromIplImagePtr(iplImage);
  3432. return result;
  3433. }
  3434. /// <summary>
  3435. /// Get the jpeg representation of the image
  3436. /// </summary>
  3437. /// <param name="quality">The jpeg quality</param>
  3438. /// <returns>An byte array that contains the image as jpeg data</returns>
  3439. public byte[] ToJpegData(int quality = 95)
  3440. {
  3441. using (VectorOfByte buf = new VectorOfByte())
  3442. {
  3443. CvInvoke.Imencode(".jpg", this, buf, new KeyValuePair<ImwriteFlags, int>(ImwriteFlags.JpegQuality, quality));
  3444. return buf.ToArray();
  3445. }
  3446. }
  3447. /// <summary>
  3448. /// Get the size of the array
  3449. /// </summary>
  3450. public override System.Drawing.Size Size
  3451. {
  3452. get
  3453. {
  3454. MIplImage iplImage = this.MIplImage;
  3455. if (iplImage.Roi == IntPtr.Zero)
  3456. return new Size(iplImage.Width, iplImage.Height);
  3457. else
  3458. return ROI.Size;
  3459. //return CvInvoke.cvGetSize(_ptr);
  3460. }
  3461. }
  3462. }
  3463. /// <summary>
  3464. /// Constants used by the image class
  3465. /// </summary>
  3466. internal static class ImageConstants
  3467. {
  3468. /// <summary>
  3469. /// Offset of roi
  3470. /// </summary>
  3471. public static readonly int RoiOffset = (int)Marshal.OffsetOf(typeof(MIplImage), "Roi");
  3472. }
  3473. /// <summary>
  3474. /// Image data release mode
  3475. /// </summary>
  3476. internal enum ImageDataReleaseMode
  3477. {
  3478. /// <summary>
  3479. /// Release just the header
  3480. /// </summary>
  3481. ReleaseHeaderOnly,
  3482. /// <summary>
  3483. /// Release the IplImage
  3484. /// </summary>
  3485. ReleaseIplImage
  3486. }
  3487. }