我多次使用CIDetector,如下所示:
-(NSArray *)detect:(UIImage *)inimage
{
UIImage *inputimage = inimage;
UIImageOrientation exifOrientation = inimage.imageOrientation;
NSNumber *orientation = [NSNumber numberWithInt:exifOrientation];
NSDictionary *imageOptions = [NSDictionary dictionaryWithObject:orientation forKey:CIDetectorImageOrientation];
CIImage* ciimage = [CIImage imageWithCGImage:inputimage.CGImage options:imageOptions];
NSDictionary *detectorOptions = [NSDictionary dictionaryWithObject:orientation forKey:CIDetectorImageOrientation];
NSArray* features = [self.detector featuresInImage:ciimage options:detectorOptions];
if (features.count == 0)
{
PXLog(@"no face found");
}
ciimage = nil;
NSMutableArray *returnArray = [NSMutableArray new];
for(CIFaceFeature *feature in features)
{
CGRect rect = feature.bounds;
CGRect r = CGRectMake(rect.origin.x,inputimage.size.height - rect.origin.y - rect.size.height,rect.size.width,rect.size.height);
FaceFeatures * ff = [[FaceFeatures new] initWithLeftEye:CGPointMake(feature.leftEyePosition.x, inputimage.size.height - feature.leftEyePosition.y )
rightEye:CGPointMake(feature.rightEyePosition.x, inputimage.size.height - feature.rightEyePosition.y )
mouth:CGPointMake(feature.mouthPosition.x, inputimage.size.height - feature.mouthPosition.y )];
Face *ob = [[Face new] initFaceInRect:r withFaceFeatures:ff] ;
[returnArray addObject:ob];
}
features = nil;
return returnArray;
}
-(CIContext*) context{
if(!_context){
_context = [CIContext contextWithOptions:nil];
}
return _context;
}
-(CIDetector *)detector
{
if (!_detector)
{
// 1 for high 0 for low
#warning not checking for fast/slow detection operation
NSString *str = @"fast";//[SettingsFunctions retrieveFromUserDefaults:@"face_detection_accuracy"];
if ([str isEqualToString:@"slow"])
{
//DDLogInfo(@"faceDetection: -I- Setting accuracy to high");
_detector = [CIDetector detectorOfType:CIDetectorTypeFace context:nil
options:[NSDictionary dictionaryWithObject:CIDetectorAccuracyHigh forKey:CIDetectorAccuracy]];
} else {
//DDLogInfo(@"faceDetection: -I- Setting accuracy to low");
_detector = [CIDetector detectorOfType:CIDetectorTypeFace context:nil
options:[NSDictionary dictionaryWithObject:CIDetectorAccuracyLow forKey:CIDetectorAccuracy]];
}
}
return _detector;
}
但是在遇到各种内存问题后,根据《乐器》,它似乎没有发布
NSArray* features = [self.detector featuresInImage:ciimage options:detectorOptions];
我的代码中是否存在内存泄漏?
最佳答案
我遇到了相同的问题,并且重用CIDetector似乎是一个错误(或者出于缓存目的,可能是设计错误)。
通过不重复使用CIDetector,而是根据需要实例化一个实例,然后在检测完成后将其释放(或使用ARC术语,就是不保留引用),可以解决该问题。这样做有一定的成本,但是如果您按照您所说的在后台线程上进行检测,那么与无限制的内存增长相比,该成本可能是值得的。
也许更好的解决方案是,如果您连续检测多个图像,则创建一个检测器,然后将其全部使用(或者,如果增长太大,则每N张图像释放并创建一个新检测器。)必须尝试看看N应该是什么)。
我已向Apple提交了有关此问题的Radar错误:http://openradar.appspot.com/radar?id=6645353252126720
关于objective-c - CIDetector没有释放内存,我们在Stack Overflow上找到一个类似的问题:https://stackoverflow.com/questions/19156330/