Swift Camera Filter – Realtime Capture Effect of OpenCV

A Sample iOS project for Realtime Swift Camera Filter Capture Effect of OpenCV. (Swift & Objective-c language)

Realtime Swift Camera Filter Example

Swift Camera Filter

Installation

Add OpenCV framework through CocoaPods. To install it, simply add the following line to your Podfile and run pod install:

pod 'OpenCV', '2.4.9'

Add Camera Code in ViewController

import UIKit
import AVFoundation

class ViewController: UIViewController, AVCaptureVideoDataOutputSampleBufferDelegate {
    
    @IBOutlet weak var imageView : UIImageView!
    
    var session : AVCaptureSession = AVCaptureSession()

    override func viewDidLoad() {
        super.viewDidLoad()
        
        self.imageView!.backgroundColor = UIColor(patternImage: UIImage(named: "screentone")!)
        
        if self.setupCamera() {
            self.session.startRunning()
        } else {
            assertionFailure("setupCamera error!")
        }
    }

    func setupCamera() -> Bool {
        self.session.sessionPreset = AVCaptureSessionPresetMedium
        
        var targetDevice : AVCaptureDevice?
        
        let devices = AVCaptureDevice.devicesWithMediaType(AVMediaTypeVideo) as! [AVCaptureDevice]
        for device in devices {
            if device.position == .Back {
                targetDevice = device
                break
            }
        }
        
        if targetDevice == nil {
            return false
        }
        
        let input: AVCaptureDeviceInput
        do {
            input = try AVCaptureDeviceInput(device: targetDevice!)
        } catch {
            return false
        }
        
        if self.session.canAddInput(input) {
            self.session.addInput(input)
        } else {
            return false
        }
        
        var lockError: NSError?
        do {
            try targetDevice!.lockForConfiguration()
            if let error = lockError {
                print("lock error: \(error.localizedDescription)")
                return false
            } else {
                if targetDevice!.smoothAutoFocusSupported {
                    targetDevice!.smoothAutoFocusEnabled = true
                }
                if targetDevice!.autoFocusRangeRestrictionSupported {
                    targetDevice!.focusMode = .ContinuousAutoFocus
                }
                targetDevice!.activeVideoMinFrameDuration = CMTimeMake(1, 15)
                targetDevice!.unlockForConfiguration()
            }
        } catch let error as NSError {
            lockError = error
        }
        
        let queue = dispatch_queue_create("realtime_filter_example_queue", DISPATCH_QUEUE_SERIAL)
        
        let output : AVCaptureVideoDataOutput = AVCaptureVideoDataOutput()
        output.videoSettings = [kCVPixelBufferPixelFormatTypeKey : NSNumber(int: Int32(kCVPixelFormatType_32BGRA.value))]
        output.setSampleBufferDelegate(self, queue: queue)
        output.alwaysDiscardsLateVideoFrames = true
        
        if self.session.canAddOutput(output) {
            self.session.addOutput(output)
        } else {
            return false
        }
        
        for connection in output.connections as! [AVCaptureConnection] {
            if connection.supportsVideoOrientation {
                connection.videoOrientation = AVCaptureVideoOrientation.Portrait
            }
        }
        
        return true
    }
    
    func captureOutput(captureOutput: AVCaptureOutput!, didOutputSampleBuffer sampleBuffer: CMSampleBuffer!, fromConnection connection: AVCaptureConnection!) {
        dispatch_sync(dispatch_get_main_queue(), { () -> Void in
            let image = self.imageFromSampleBuffer(sampleBuffer)
            
            if let bufferImage = image {
                let filteredImage = OpenCVSampleFilter.mangaImageFromUIImage(bufferImage)
                
                self.imageView!.image = filteredImage
            }
        })
    }

    // @see : http://giveitashot.hatenadiary.jp/entry/2014/10/19/190505
    
    func imageFromSampleBuffer(sampleBuffer: CMSampleBufferRef) -> UIImage? {
        let imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer)!
        
        CVPixelBufferLockBaseAddress(imageBuffer, 0)
        
        let newContext = CGBitmapContextCreate(
            CVPixelBufferGetBaseAddressOfPlane(imageBuffer, 0),
            CVPixelBufferGetWidth(imageBuffer),
            CVPixelBufferGetHeight(imageBuffer),
            8,
            CVPixelBufferGetBytesPerRow(imageBuffer),
            CGColorSpaceCreateDeviceRGB(),
            CGImageAlphaInfo.PremultipliedLast.rawValue
        )
        
        let imageRef = CGBitmapContextCreateImage(newContext)
        let resultImage = UIImage(CGImage: imageRef!)
        
        return resultImage
    }
}

Create Objective-C File add set name of OpenCVSampleFilter and change Extension of .m file to .mm like OpenCVSampleFilter.mm. Crete Bridging Header File Add OpenCVSampleFilter.h File

Add this code in OpenCVSampleFilter.h File

#import <Foundation/Foundation.h>
#import <UIKit/UIKit.h>

@interface OpenCVSampleFilter: NSObject

+ (UIImage *)mangaImageFromUIImage:(UIImage *)image;

@end

Add this code in OpenCVSampleFilter.mm File

#import <opencv2/opencv.hpp>

@implementation OpenCVSampleFilter

+ (UIImage *)mangaImageFromUIImage:(UIImage *)image
{
    CGImageRef monocrhomeCGImage = [[self class] reflectMonochromeFilter:image.CGImage];
    UIImage *monocrhomeImage = [UIImage imageWithCGImage:monocrhomeCGImage];
    CGImageRelease(monocrhomeCGImage);
    
    CGImageRef lineCGImage = [[self class] reflectLineFilter:image.CGImage];
    UIImage *lineImage = [UIImage imageWithCGImage:lineCGImage];
    CGImageRelease(lineCGImage);
    
    UIImage *margedImage;
    CGRect imageRect = CGRectMake(0, 0, image.size.width, image.size.height);
    
    UIGraphicsBeginImageContext(imageRect.size);
    
    [monocrhomeImage drawInRect:imageRect];
    [lineImage drawInRect:imageRect];
    margedImage = UIGraphicsGetImageFromCurrentImageContext();
    
    UIGraphicsEndImageContext();
    
    if (margedImage) {
        return margedImage;
    }
    return [UIImage new];
}

// @see : http://dev.classmethod.jp/smartphone/opencv-manga-2/

+ (IplImage *)iplImageFromCGImage:(CGImageRef)image
{
    CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
    IplImage *tempIplImage = cvCreateImage(cvSize((int)CGImageGetWidth(image), (int)CGImageGetHeight(image)), IPL_DEPTH_8U, 4);
    
    CGContextRef context = CGBitmapContextCreate(tempIplImage->imageData,
                                                 tempIplImage->width,
                                                 tempIplImage->height,
                                                 tempIplImage->depth,
                                                 tempIplImage->widthStep,
                                                 colorSpace,
                                                 kCGImageAlphaPremultipliedLast | kCGBitmapByteOrderDefault);
    
    CGContextDrawImage(context, CGRectMake(0.0f, 0.0f, CGImageGetWidth(image), CGImageGetHeight(image)), image);
    
    CGContextRelease(context);
    CGColorSpaceRelease(colorSpace);
    
    IplImage *iplImage = cvCreateImage(cvGetSize(tempIplImage), IPL_DEPTH_8U, 3);
    cvCvtColor(tempIplImage, iplImage, CV_RGBA2RGB);
    
    cvReleaseImage(&tempIplImage);
    
    return iplImage;
}

+ (CGImageRef)cgImageFromIplImage:(IplImage *)image
{
    NSData *data = [NSData dataWithBytes:image->imageData length:image->imageSize];
    CGDataProviderRef provider = CGDataProviderCreateWithCFData((__bridge CFDataRef)data);
    
    CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
    CGImageRef cgImage = CGImageCreate(image->width,
                                       image->height,
                                       image->depth,
                                       image->depth * image->nChannels,
                                       image->widthStep,
                                       colorSpace,
                                       kCGImageAlphaNone | kCGBitmapByteOrderDefault,
                                       provider,
                                       NULL,
                                       false,
                                       kCGRenderingIntentDefault);
    
    CGColorSpaceRelease(colorSpace);
    CGDataProviderRelease(provider);
    
    return cgImage;
}

+ (CGImageRef)reflectLineFilter:(CGImageRef)image
{
    IplImage *srcImage = [[self class] iplImageFromCGImage:image];
    
    IplImage *grayscaleImage = cvCreateImage(cvGetSize(srcImage), IPL_DEPTH_8U, 1);
    IplImage *edgeImage = cvCreateImage(cvGetSize(srcImage), IPL_DEPTH_8U, 1);
    IplImage *dstImage = cvCreateImage(cvGetSize(srcImage), IPL_DEPTH_8U, 3);
    
    cvCvtColor(srcImage, grayscaleImage, CV_BGR2GRAY);
    cvSmooth(grayscaleImage, grayscaleImage, CV_GAUSSIAN, 3, 0, 0);
    cvCanny(grayscaleImage, edgeImage, 20, 120);
    cvNot(edgeImage, edgeImage);
    cvCvtColor(edgeImage, dstImage, CV_GRAY2BGR);
    
    CGImageRef effectedImage = [self cgImageFromIplImage:dstImage];
    
    cvReleaseImage(&srcImage);
    cvReleaseImage(&grayscaleImage);
    cvReleaseImage(&edgeImage);
    cvReleaseImage(&dstImage);
    
    const CGFloat colorMasking[6] = {255, 255, 255, 255, 255, 255};
    effectedImage = CGImageCreateWithMaskingColors(effectedImage, colorMasking);
    
    return effectedImage;
}

+ (CGImageRef)reflectMonochromeFilter:(CGImageRef)image
{
    IplImage *srcImage = [[self class] iplImageFromCGImage:image];
    
    IplImage *grayscaleImage = cvCreateImage(cvGetSize(srcImage), IPL_DEPTH_8U, 1);
    IplImage *dstImage = cvCreateImage(cvGetSize(srcImage), IPL_DEPTH_8U, 3);
    
    cvCvtColor(srcImage, grayscaleImage, CV_BGR2GRAY);
    
    for (int y = 0; y < grayscaleImage->height; y++) {
        for (int x = 0; x < grayscaleImage->width; x++) {
            int a = grayscaleImage->widthStep * y + x;
            uchar p = grayscaleImage->imageData[a];
            
            if (p < 70) {
                // black color
                grayscaleImage->imageData[a] = 0;
            } else if (70 <= p && p < 120) {
                // gray color
                grayscaleImage->imageData[a] = 100;
            } else {
                // white color
                grayscaleImage->imageData[a] = 255;
            }
        }
    }
    
    cvCvtColor(grayscaleImage, dstImage, CV_GRAY2BGR);
    
    CGImageRef effectedImage = [self cgImageFromIplImage:dstImage];
    
    cvReleaseImage(&srcImage);
    cvReleaseImage(&grayscaleImage);
    cvReleaseImage(&dstImage);
    
    const CGFloat colorMasking[6] = {100, 100, 100, 100, 100, 100};
    effectedImage = CGImageCreateWithMaskingColors(effectedImage, colorMasking);
    
    return effectedImage;
}

@end

Download

Swift Camera Filter

Related Posts

Leave a Reply

Your email address will not be published. Required fields are marked *