//
// ViewController.m
// UI-CoreImage
//
// Created by Bruce on 15/5/22.
// Copyright (c) 2015年 Bruce. All rights reserved.
//
/*
CoreImage 可以用滤镜来处理图片,比如修改饱和度,亮度,对比度等
它利用GPU(或者CPU,取决于客户)来非常快速、甚至实时地处理图像数据和视频的帧。多个Core Image滤镜可以叠加在一起,从而可以一次性地产生多重滤镜效果
滤镜使用过程中常用的基类对象,
CIContext:图像上下文,用于管理整个图片处理过程,不同的图形上下文将利用 不同的 图像处理硬件 进行图像处理(如基于cpu或者gpu进行处理)
cpu渲染是可以在后台进行渲染,但是速度没有这么高效,对于非实时的渲染,例如图片,可以使用cpu。gpu的渲染不能在后台进行,但是效率快,对于实时性的渲染可以使用gpu,例如视频的每一帧的渲染)
所有图像处理都是在一个CIContext 中完成的 类似于CoreData的上下文
-----------------------------------------------
CIImage是CoreImage框架中最基本代表图像的对象,他不仅包含原图像数据,还包含在原图像上面的滤镜链。特别需要强调的是CIImage和其他图像是不同,在CIImage被CIContext渲染出来之前,他是依赖于滤镜链的,滤镜是不会更改CIImage中的图像数据
CIImage是不能直接有UIImage转化而来的,有以下几种创建CIImage的类方法:
// 通过URL获得一个CIImage对象,用于滤镜处理的蓝本
1.CIImage*image=[CIImage imageWithContentsOfURL:myURL];
通过NSData获得一个CIImage对象,用于滤镜处理的蓝本
2.CIImage*image=[CIImage imageWithData:myData];
通过CGImage获得一个CIImage对象,用于滤镜处理的蓝本
3.CIImage*image=[CIImage imageWithCGImage:myCgimage];
通过CVPixel取得每一帧的图片,用于视频使用滤镜功能
4.CIImage*image=[CIImage imageWithCVPixelBuffer:CVBuffer];
-----------------------------------------------
CIFilter:图像处理滤镜,每种滤镜有不同的参数设置
滤镜类包含一个字典结构,对各种滤镜定义了属于他们各自的属性。滤镜有很多种,比如鲜艳程度滤镜,色彩反转滤镜,剪裁滤镜等等
CIFilter用来表示CoreImage提供的各种滤镜。滤镜使用键-值来设置输入值,一旦这些值设置好,CIFilter就可以用来生成新的CIImage输出图像 这里输出的图像不会进行实际的图像渲染,他只包含一个对输入图像的引用以及上面的滤镜链
CIFilter提供了一个简单的方法查询可用的滤镜种类:[CIFilter filterNamesInCategory:kCICategoryBuiltIn];//搜索属于 kCICategoryBuiltIn类别的所有滤镜名字,返回一个数组;
[CIFilter filterNamesInCategories];//搜索所有可用的滤镜名称;
调用[CIFilter attributes]会返回filter详细信息
-----------------------------------------------
CIContext用来渲染CIImage,把CIImage和他上面的滤镜链应用到原始的图片数据中
CIContext可以是基于CPU的,也可以是基于GPU的
这两种渲染的区别是:
使用CPU渲染在大部分情况下更可靠,比GPU渲染更容易使用,他可以在后台实现渲染过程
GPU渲染方式使用OpenGL ES2.0来渲染图像,这种方式CPU完全没有负担,应用程序的运行循环不会受到图像渲染的影响,而且他渲染比CPU渲染更快但是GPU渲染无法在后台运行
对于如何选择更好的渲染方式,我认为应该视具体情况而定:对于复杂的图像滤镜使用GPU更好,但是如果在处理视频并保存文件,或保存照片到照片库中时为避免程序退出对图片保存造成影响,这时应该使用CPU进行渲染。默认情况是用CPU渲染的。
-----------------------------------------------
目前为止在iOS中有 127 种滤镜,这些滤镜使用方法是类似的,只是参数设置有所区别 在 iOS 文档中可以搜索“core image filter reference”一节的内容,里面有每种滤镜的详细介绍和图片使用效果
使用Core Image框架创建滤镜效果一般分为以下几步:
创建图像上下文 CIContext
创建滤镜 CIFilter
创建过滤原图片 CIImage
调用 CIFilter 的 setValue: forKey:方法为滤镜指定源图片
设置滤镜参数【可选】
取得输出图片显示或保存
当使用CIContext才真正渲染,对其使用CIFilter是不会马上渲染的,滤镜有叠加使用的功效
-----------------------------------------------
需要注意:
图像渲染(使用CPU渲染)需要很长时间 所以 我们需要把渲染的操作放到子线程里面去渲染
CIImage和CIContext是线程安全的,CIFilter则是线程不安全,不能被多个线程共享(特别注意)
-------------------------------------------------
监测物体特征
CIDetector和CIFeature
CIDetector用来分析CIImage,得到CIFeature。每个CIDetector都要用一个探测器来初始化,这个类型高数探测器要在图像中寻找什么特征。
当一个CIDetector分析一张图片时,返回一个探测到的CIFeature的数组,如果CIDetector 被初始化为寻找面孔,那么返回的数组会被填上CIFaceFeature对象,每个CIFaceFeature都包含一个面部的CGrect引用(按照图像的坐标系),以及检测到的面孔的左眼,右眼,嘴部位置的CGPoint
*/
#import "ViewController.h"
#import <CoreImage/CoreImage.h>
#import <AssetsLibrary/AssetsLibrary.h>
@interface ViewController ()<UINavigationControllerDelegate, UIImagePickerControllerDelegate>
{
UIImageView *imageView;
UIActivityIndicatorView *indicatorView;
CIFilter *myFilter;
CIImage *inPutImage;
CIContext *myContext;
float curValue;
}
@end
@implementation ViewController
- (void)viewDidLoad {
[super viewDidLoad];
// 查找所有的滤镜名字
[self showAllFilters];
imageView = [[UIImageView alloc]initWithFrame:self.view.frame];
imageView.image = [UIImage imageNamed:@"1111.jpg"];
// imageView.contentMode = UIViewContentModeScaleAspectFit;
[self.view addSubview:imageView];
// indicatorView = [[UIActivityIndicatorView alloc]initWithActivityIndicatorStyle:UIActivityIndicatorViewStyleWhiteLarge];
// indicatorView.center = self.view.center;
// [self.view addSubview:indicatorView];
//
// [indicatorView startAnimating];
[self creatSliderView];
[self openAlbumButton];
[self saveImageButton];
// NSURL *imageUrl = [[NSURL alloc]initFileURLWithPath:[[NSBundle mainBundle] pathForResource:@"1111" ofType:@"jpg"]];
// CIImage *image = [CIImage imageWithContentsOfURL:imageUrl];
// [self detectorFaceWithImage:image];
}
- (void)viewWillAppear:(BOOL)animated
{
curValue = 0.0;
UISlider *slider = (UISlider *)[self.view viewWithTag:1111];
[slider setValue:curValue];
}
#pragma mark ----------查看所有的内置滤镜-----------
-(void)showAllFilters
{
NSArray *filterNames=[CIFilter filterNamesInCategory:kCICategoryBuiltIn];
for (NSString *filterName in filterNames){
CIFilter *filter=[CIFilter filterWithName:filterName];
NSLog(@"filterName:%@ rattributes:%@",filterName,[filter attributes]);
}
NSLog(@"filterNames.count:%ld",filterNames.count);
}
//棕色滤镜
- (void)sepia1
{
NSURL *imageUrl = [[NSURL alloc]initFileURLWithPath:[[NSBundle mainBundle] pathForResource:@"1111" ofType:@"jpg"]];
CIImage *image = [CIImage imageWithContentsOfURL:imageUrl];
// 建立一个CISepiaTone滤镜
CIFilter *sepia = [CIFilter filterWithName:@"CISepiaTone" ];
// 设置滤镜的输入图像
[sepia setValue:image forKey:kCIInputImageKey];
// 设置滤镜的输入强度
[sepia setValue:@1 forKey:kCIInputIntensityKey];
// 输出新的图像
CIImage *outImage = [sepia outputImage];
imageView.image = [UIImage imageWithCIImage:outImage];
[indicatorView stopAnimating];
// 之前我们说 需要一个CIContext来进行操作,但是在上面的例子中我们没有提到这个对象 因为我们调用的UIImage方法(imageWithCIImage:)已经自动地为我们完成了这个步骤 它生成了一个CIContext并且用它来处理图像的过滤
}
//使用 CIContext对象 创建一个渲染后的CGImageRef图像数据
- (void)sepia2
{
NSURL *url = [[NSURL alloc]initFileURLWithPath:[[NSBundle mainBundle] pathForResource:@"1111" ofType:@"jpg"]];
CIImage *image = [CIImage imageWithContentsOfURL:url];
// 初始化上下文
CIContext *context = [CIContext contextWithOptions:nil];
CIFilter *filter = [CIFilter filterWithName:@"CISepiaTone"];
// 设置滤镜的输入图像
[filter setValue:image forKey:kCIInputImageKey];
// 设置滤镜的输入强度
[filter setValue:@1 forKey:kCIInputIntensityKey];
CIImage *outputImage = [filter outputImage];
// CGImageRef 是图像的数据
// CIImage的extent属性相当于bounds属性,代表图片的大小
CGImageRef imageRef = [context createCGImage:outputImage fromRect:[outputImage extent]];
imageView.image = [UIImage imageWithCGImage:imageRef];
[indicatorView stopAnimating];
}
- (void)sepia3
{
dispatch_queue_t global = dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0);
dispatch_async(global, ^{
NSURL *url = [[NSURL alloc]initFileURLWithPath:[[NSBundle mainBundle] pathForResource:@"1111" ofType:@"jpg"]];
CIImage *image = [CIImage imageWithContentsOfURL:url];
// 初始化上下文
CIContext *context = [CIContext contextWithOptions:nil];
CIFilter *filter = [CIFilter filterWithName:@"CICircularScreen"];
// 设置滤镜的输入图像
[filter setValue:image forKey:kCIInputImageKey];
CIImage *outputImage = [filter outputImage];
// CGImageRef 是图像的数据
// CIImage的extent属性相当于bounds属性,代表图片的大小
CGImageRef imageRef = [context createCGImage:outputImage fromRect:[outputImage extent]];
dispatch_async(dispatch_get_main_queue(), ^{
imageView.image = [UIImage imageWithCGImage:imageRef];
[indicatorView stopAnimating];
});
});
}
//通过滑杆 改变图片的渲染的强度
- (void)creatSliderView
{
UISlider *slider = [[UISlider alloc]initWithFrame:CGRectMake(10, 20, self.view.frame.size.width, 10)];
slider.tag = 1111;
slider.minimumValue = 0.0;
slider.maximumValue = 1.0;
slider.value = curValue;
[slider addTarget:self action:@selector(changeValue:) forControlEvents:UIControlEventValueChanged];
[self.view addSubview:slider];
NSURL *url = [[NSURL alloc]initFileURLWithPath:[[NSBundle mainBundle] pathForResource:@"1111" ofType:@"jpg"]];
inPutImage = [CIImage imageWithContentsOfURL:url];
myFilter = [CIFilter filterWithName:@"CISepiaTone"];
[myFilter setValue:inPutImage forKey:kCIInputImageKey];
myContext = [CIContext contextWithOptions:nil];
}
//通过滑杆调节滤镜
- (void)changeValue:(UISlider *)sender
{
curValue = sender.value;
// [myFilter setValue:@(curValue) forKey:kCIInputIntensityKey];
// CIImage *outPutImage = [myFilter outputImage];
// CGImageRef imageRef = [myContext createCGImage:outPutImage fromRect:[outPutImage extent]];
// imageView.image = [UIImage imageWithCGImage:imageRef];
imageView.image = [UIImage imageWithCIImage:[self oldImage:inPutImage inputIntensity:curValue]];
}
- (void)openAlbumButton
{
UIButton *button = [UIButton buttonWithType:UIButtonTypeCustom];
button.frame = CGRectMake(200, 400, 80, 40);
button.backgroundColor = [UIColor grayColor];
[button setTitle:@"Album" forState:UIControlStateNormal];
[button addTarget:self action:@selector(openAlbum) forControlEvents:UIControlEventTouchUpInside];
[self.view addSubview:button];
}
- (void)openAlbum
{
UIImagePickerController *pickerController = [[UIImagePickerController alloc]init];
pickerController.delegate = self;
[self presentViewController:pickerController animated:YES completion:nil];
}
- (void)imagePickerController:(UIImagePickerController *)picker didFinishPickingMediaWithInfo:(NSDictionary *)info
{
NSLog(@"%@", info);
UIImage *selectImage = info[UIImagePickerControllerOriginalImage];
inPutImage = [CIImage imageWithCGImage:selectImage.CGImage];
[myFilter setValue:inPutImage forKey:kCIInputImageKey];
imageView.image = [UIImage imageWithCIImage:inPutImage];
[self dismissViewControllerAnimated:YES completion:nil];
}
- (void)imagePickerControllerDidCancel:(UIImagePickerController *)picker
{
[self dismissViewControllerAnimated:YES completion:nil];
}
- (void)saveImageButton
{
UIButton *button = [UIButton buttonWithType:UIButtonTypeCustom];
button.frame = CGRectMake(200, 400+50, 80, 40);
button.backgroundColor = [UIColor grayColor];
[button setTitle:@"saveImage" forState:UIControlStateNormal];
[button addTarget:self action:@selector(saveImage) forControlEvents:UIControlEventTouchUpInside];
[self.view addSubview:button];
}
- (void)saveImage
{
CIImage *image = [myFilter outputImage];
CGImageRef imageRef = [myContext createCGImage:image fromRect:image.extent];
ALAssetsLibrary *libary = [[ALAssetsLibrary alloc]init];
[libary writeImageToSavedPhotosAlbum:imageRef metadata:[image properties] completionBlock:^(NSURL *assetURL, NSError *error) {
NSLog(@"error :%@",error.description);
}];
}
//滤镜连
- (CIImage *)oldImage:(CIImage *)image inputIntensity:(float)intensity
{
CIFilter *sepia = [CIFilter filterWithName:@"CISepiaTone"];
[sepia setValue:image forKey:kCIInputImageKey];
[sepia setValue:@(intensity) forKey:kCIInputIntensityKey];
CIFilter *lighten = [CIFilter filterWithName:@"CIColorControls"];
[lighten setValue:sepia.outputImage forKey:kCIInputImageKey];
// inputBrightness 亮度
[lighten setValue:@(intensity) forKey:@"inputBrightness"];
// inputSaturation 饱和度
[lighten setValue:@0.0 forKey:@"inputSaturation"];
CGImageRef imageRef = [myContext createCGImage:lighten.outputImage fromRect:lighten.outputImage.extent];
CIImage *resultImage = [CIImage imageWithCGImage:imageRef];
return resultImage;
}
#pragma mark ---------面部识别---------
- (void)detectorFaceWithImage:(CIImage *)image
{
// 设置监测器的精度
NSDictionary *options = [NSDictionary dictionaryWithObjectsAndKeys:CIDetectorAccuracyHigh, CIDetectorAccuracy, nil];
// 监测的类型CIDetectorTypeFace
CIDetector *faceDetector = [CIDetector detectorOfType:CIDetectorTypeFace context:myContext options:options];
NSArray *faces = [faceDetector featuresInImage:image];
for (CIFaceFeature *face in faces) {
// 资源合成滤镜 CISourceOverCompositing
// CIFilter *filter = [CIFilter filterWithName:@"CISourceOverCompositing"];
// [filter setValue:[self makeBoxForFace:feature] forKey:kCIInputImageKey];
// [filter setValue:image forKey:kCIInputBackgroundImageKey];
// image = filter.outputImage;
image = [CIFilter filterWithName:@"CISourceOverCompositing" keysAndValues:kCIInputImageKey, [self makeBoxForFace:face],kCIInputBackgroundImageKey, image, nil].outputImage;
}
CGImageRef resultImageRef = [myContext createCGImage:image fromRect:image.extent];
imageView.image = [UIImage imageWithCGImage:resultImageRef];
}
- (CIImage *)makeBoxForFace:(CIFaceFeature*)face
{
UIColor *color = [UIColor redColor];
// 颜色生成的滤镜 CIConstantColorGenerator
CIFilter *colorFilter = [CIFilter filterWithName:@"CIConstantColorGenerator"];
[colorFilter setValue:color forKey:kCIInputColorKey];
CIImage *outColorImage = [colorFilter outputImage];
CIFilter *cropFilter = [CIFilter filterWithName:@"CICrop"];
[cropFilter setValue:outColorImage forKey:kCIInputImageKey];
[cropFilter setValue:[CIVector vectorWithCGRect:face.bounds] forKey:@"inputRectangle"];
outColorImage = cropFilter.outputImage;
return outColorImage;
}
- (void)didReceiveMemoryWarning {
[super didReceiveMemoryWarning];
// Dispose of any resources that can be recreated.
}
@end