/// <summary> /// Base class for custom OpenCV filters. More convenient than plain static methods. /// </summary> public abstract class OpenCvFilter { static OpenCvFilter() { Cv2.SetUseOptimized(true); } /// <summary> /// Supported depth types of input array. /// </summary> public abstract IEnumerable<MatType> SupportedMatTypes { get; } /// <summary> /// Applies filter to <see cref="src" /> and returns result. /// </summary> /// <param name="src">Source array.</param> /// <returns>Result of processing filter.</returns> public Mat Apply(Mat src) { var dst = new Mat(); ApplyInPlace(src, dst); return dst; } /// <summary> /// Applies filter to <see cref="src" /> and writes to <see cref="dst" />. /// </summary> /// <param name="src">Source array.</param> /// <param name="dst">Output array.</param> /// <exception cref="ArgumentException">Provided image does not meet the requirements.</exception> public void ApplyInPlace(Mat src, Mat dst) { if (!SupportedMatTypes.Contains(src.Type())) throw new ArgumentException("Depth type of provided Mat is not supported"); ProcessFilter(src, dst); } /// <summary> /// Actual filter. /// </summary> /// <param name="src">Source array.</param> /// <param name="dst">Output array.</param> protected abstract void ProcessFilter(Mat src, Mat dst); }
/// <summary> /// Performs edges detection. Result will be used as base for transparency mask. /// </summary> private Mat GetGradient(Mat src) { using (var preparedSrc = new Mat()) { Cv2.CvtColor(src, preparedSrc, ColorConversionCodes.BGR2GRAY); preparedSrc.ConvertTo(preparedSrc, MatType.CV_32F, 1.0 / 255); // From 0..255 bytes to 0..1 floats using (var gradX = preparedSrc.Sobel(ddepth: MatType.CV_32F, xorder: 0, yorder: 1, ksize: 3, scale: 1 / 4.0)) using (var gradY = preparedSrc.Sobel(ddepth: MatType.CV_32F, xorder: 1, yorder: 0, ksize: 3, scale: 1 / 4.0)) { var result = new Mat(); Cv2.Magnitude(gradX, gradY, result); return result; } } }
private Mat GetGradient(Mat src) { using (var preparedSrc = new Mat()) { Cv2.CvtColor(src, preparedSrc, ColorConversionCodes.BGR2GRAY); preparedSrc.ConvertTo(preparedSrc, MatType.CV_32F, 1.0 / 255); // Calculate Sobel derivative with kernel size depending on image resolution Mat Derivative(Int32 dx, Int32 dy) { Int32 resolution = preparedSrc.Width * preparedSrc.Height; // Larger image --> larger kernel Int32 kernelSize = resolution < 1280 * 1280 ? 3 : resolution < 2000 * 2000 ? 5 : resolution < 3000 * 3000 ? 9 : 15; // Compensate lack of contrast on large images Single kernelFactor = kernelSize == 3 ? 1 : 2; using (var kernelRows = new Mat()) using (var kernelColumns = new Mat()) { // Get normalized Sobel kernel of desired size Cv2.GetDerivKernels(kernelRows, kernelColumns, dx, dy, kernelSize, normalize: true ); using (var multipliedKernelRows = kernelRows * kernelFactor) using (var multipliedKernelColumns = kernelColumns * kernelFactor) { return preparedSrc.SepFilter2D( MatType.CV_32FC1, multipliedKernelRows, multipliedKernelColumns ); } } } using (var gradX = Derivative(1, 0)) using (var gradY = Derivative(0, 1)) { var result = new Mat(); Cv2.Magnitude(gradX, gradY, result); //Add small constant so the flood fill will perform correctly result += 0.15f; return result; } } }
protected override void ProcessFilter(Mat src, Mat dst) { using (Mat alphaMask = GetGradient(src)) { Cv2.FloodFill( // Flood fill outer space image: alphaMask, seedPoint: new Point( (Int32) (FloodFillRelativeSeedPoint * src.Width), (Int32) (FloodFillRelativeSeedPoint * src.Height)), newVal: new Scalar(0), rect: out Rect _, loDiff: new Scalar(FloodFillTolerance), upDiff: new Scalar(FloodFillTolerance), flags: FloodFillFlags.FixedRange | FloodFillFlags.Link4); ... } }
protected override void ProcessFilter(Mat src, Mat dst) { using (Mat alphaMask = GetGradient(src)) { // Performs morphology operation on alpha mask with resolution-dependent element size void PerformMorphologyEx(MorphTypes operation, Int32 iterations) { Double elementSize = Math.Sqrt(alphaMask.Width * alphaMask.Height) / 300; if (elementSize < 3) elementSize = 3; if (elementSize > 20) elementSize = 20; using (var se = Cv2.GetStructuringElement( MorphShapes.Ellipse, new Size(elementSize, elementSize))) { Cv2.MorphologyEx(alphaMask, alphaMask, operation, se, null, iterations); } } PerformMorphologyEx(MorphTypes.Dilate, 1); // Close small gaps in edges Cv2.FloodFill(...); } ... }
PerformMorphologyEx(MorphTypes.Erode, 1); // Compensate initial dilate PerformMorphologyEx(MorphTypes.Open, 2); // Remove not filled small spots (noise) PerformMorphologyEx(MorphTypes.Erode, 1); // Final erode to remove white fringes/halo around objects
Cv2.Threshold( src: alphaMask, dst: alphaMask, thresh: 0, maxval: 255, type: ThresholdTypes.Binary); // Everything non-filled becomes white alphaMask.ConvertTo(alphaMask, MatType.CV_8UC1, 255); if (MaskBlurFactor > 0) Cv2.GaussianBlur(alphaMask, alphaMask, new Size(MaskBlurFactor, MaskBlurFactor), MaskBlurFactor); AddAlphaChannel(src, dst, alphaMask);
/// <summary> /// Adds transparency channel to source image and writes to output image. /// </summary> private static void AddAlphaChannel(Mat src, Mat dst, Mat alpha) { var bgr = Cv2.Split(src); var bgra = new[] {bgr[0], bgr[1], bgr[2], alpha}; Cv2.Merge(bgra, dst); }
Source: https://habr.com/ru/post/353890/
All Articles