2012-08-31 149 views

回答

0

如鏈接提到請在你的Mac系統中打開位於

/Applications/Utilities/Terminal.app

終端應用程序,執行命令提到。

+0

我停在「重建OpenCV的框架,」我下載兩個他提供的鏈接,但如何獲得OpenCV源後該怎麼辦? – Nims

18

給出。如果你想使用OpenCV的iOS上,你應該與OpenCV中提供的官方框架去(如版本2.4.2)。

這裏獲取最新版本:OpenCV for iOS,它拖放到你的項目,包括到您的項目的前綴是:

ExampleApp中,Prefix.pch:

#ifdef __cplusplus 
    #import <opencv2/opencv.hpp> 
#endif 

你也得將UIImage「轉換」爲cv :: Mat,以便在OpenCV中使用它。

UIImageCVMatConverter.h:

// 
// UIImageCVMatConverter.h 
// 

#import <Foundation/Foundation.h> 

@interface UIImageCVMatConverter : NSObject { 

} 

+ (UIImage *)UIImageFromCVMat:(cv::Mat)cvMat; 
+ (UIImage *)UIImageFromCVMat:(cv::Mat)cvMat withUIImage:(UIImage*)image; 
+ (cv::Mat)cvMatFromUIImage:(UIImage *)image; 
+ (cv::Mat)cvMatGrayFromUIImage:(UIImage *)image; 
+ (UIImage *)scaleAndRotateImageFrontCamera:(UIImage *)image; 
+ (UIImage *)scaleAndRotateImageBackCamera:(UIImage *)image; 

@end 

UIImageCVMatConverter.mm:

// 
// UIImageCVMatConverter.mm 
// 

#import "UIImageCVMatConverter.h" 

@implementation UIImageCVMatConverter 

+ (UIImage *)UIImageFromCVMat:(cv::Mat)cvMat withUIImage:(UIImage*)image; 
{ 
    CGColorSpaceRef colorSpace = CGImageGetColorSpace(image.CGImage); 
    CGFloat cols = image.size.width; 
    CGFloat rows = image.size.height; 
    CGFloat widthStep = image.size.width; 
    CGContextRef contextRef = CGBitmapContextCreate(NULL, cols, rows, 8, widthStep*4, colorSpace, kCGImageAlphaNoneSkipLast | kCGBitmapByteOrderDefault); 
    CGContextDrawImage(contextRef, CGRectMake(0, 0, cols, rows), image.CGImage); 
    CGContextSetRGBStrokeColor(contextRef, 1, 0, 0, 1); 
    CGImageRef cgImage = CGBitmapContextCreateImage(contextRef); 
    UIImage* result = [UIImage imageWithCGImage:cgImage]; 
    CGImageRelease(cgImage); 
    CGContextRelease(contextRef); 
    CGColorSpaceRelease(colorSpace); 
    return result; 
} 

+(UIImage *)UIImageFromCVMat:(cv::Mat)cvMat 
{ 
    NSData *data = [NSData dataWithBytes:cvMat.data length:cvMat.elemSize()*cvMat.total()]; 
    CGColorSpaceRef colorSpace; 
    if (cvMat.elemSize() == 1) { 
     colorSpace = CGColorSpaceCreateDeviceGray(); 
    } 
    else { 
     colorSpace = CGColorSpaceCreateDeviceRGB(); 
    } 
    CGDataProviderRef provider = CGDataProviderCreateWithCFData((__bridge CFDataRef)data); 
    CGImageRef imageRef = CGImageCreate(cvMat.cols, cvMat.rows, 8, 8 * cvMat.elemSize(), cvMat.step[0], colorSpace, kCGImageAlphaNone|kCGBitmapByteOrderDefault, provider, NULL, false, kCGRenderingIntentDefault); 
    UIImage *finalImage = [UIImage imageWithCGImage:imageRef]; 
    CGImageRelease(imageRef); 
    CGDataProviderRelease(provider); 
    CGColorSpaceRelease(colorSpace); 
    return finalImage; 
} 

+ (cv::Mat)cvMatFromUIImage:(UIImage *)image 
{ 
    CGColorSpaceRef colorSpace = CGImageGetColorSpace(image.CGImage); 
    CGFloat cols = image.size.width; 
    CGFloat rows = image.size.height; 
    cv::Mat cvMat(rows, cols, CV_8UC4); 
    CGContextRef contextRef = CGBitmapContextCreate(cvMat.data, cols, rows, 8, cvMat.step[0], colorSpace, kCGImageAlphaNoneSkipLast | kCGBitmapByteOrderDefault); 
    CGContextDrawImage(contextRef, CGRectMake(0, 0, cols, rows), image.CGImage); 
    CGContextRelease(contextRef); 
    CGColorSpaceRelease(colorSpace); 
    return cvMat; 
} 

+ (cv::Mat)cvMatGrayFromUIImage:(UIImage *)image 
{ 
    cv::Mat cvMat = [UIImageCVMatConverter cvMatFromUIImage:image]; 
    cv::Mat grayMat; 
    if (cvMat.channels() == 1) { 
     grayMat = cvMat; 
    } 
    else { 
     grayMat = cv :: Mat(cvMat.rows,cvMat.cols, CV_8UC1); 
     cv::cvtColor(cvMat, grayMat, CV_BGR2GRAY); 
    } 
    return grayMat; 
} 

+ (UIImage *)scaleAndRotateImageBackCamera:(UIImage *)image 
{ 
    static int kMaxResolution = 640; 
    CGImageRef imgRef = image.CGImage; 
    CGFloat width = CGImageGetWidth(imgRef); 
    CGFloat height = CGImageGetHeight(imgRef); 
    CGAffineTransform transform = CGAffineTransformIdentity; 
    CGRect bounds = CGRectMake(0, 0, width, height); 
    if (width > kMaxResolution || height > kMaxResolution) { 
    CGFloat ratio = width/height; 
    if (ratio > 1) { 
     bounds.size.width = kMaxResolution; 
     bounds.size.height = bounds.size.width/ratio; 
    } 
     else { 
     bounds.size.height = kMaxResolution; 
     bounds.size.width = bounds.size.height * ratio; 
    } 
    } 
    CGFloat scaleRatio = bounds.size.width/width; 
    CGSize imageSize = CGSizeMake(CGImageGetWidth(imgRef), CGImageGetHeight(imgRef)); 
    CGFloat boundHeight; 
    UIImageOrientation orient = image.imageOrientation; 
    switch(orient) { 
    case UIImageOrientationUp: 
     transform = CGAffineTransformIdentity; 
     break; 
    case UIImageOrientationUpMirrored: 
     transform = CGAffineTransformMakeTranslation(imageSize.width, 0.0); 
     transform = CGAffineTransformScale(transform, -1.0, 1.0); 
     break; 
    case UIImageOrientationDown: 
     transform = CGAffineTransformMakeTranslation(imageSize.width, imageSize.height); 
     transform = CGAffineTransformRotate(transform, M_PI); 
     break; 
    case UIImageOrientationDownMirrored: 
     transform = CGAffineTransformMakeTranslation(0.0, imageSize.height); 
     transform = CGAffineTransformScale(transform, 1.0, -1.0); 
     break; 
    case UIImageOrientationLeftMirrored: 
     boundHeight = bounds.size.height; 
     bounds.size.height = bounds.size.width; 
     bounds.size.width = boundHeight; 
     transform = CGAffineTransformMakeTranslation(imageSize.height, imageSize.width); 
     transform = CGAffineTransformScale(transform, -1.0, 1.0); 
     transform = CGAffineTransformRotate(transform, 3.0 * M_PI/2.0); 
     break; 
    case UIImageOrientationLeft: 
     boundHeight = bounds.size.height; 
     bounds.size.height = bounds.size.width; 
     bounds.size.width = boundHeight; 
     transform = CGAffineTransformMakeTranslation(0.0, imageSize.width); 
     transform = CGAffineTransformRotate(transform, 3.0 * M_PI/2.0); 
     break; 
    case UIImageOrientationRightMirrored: 
     boundHeight = bounds.size.height; 
     bounds.size.height = bounds.size.width; 
     bounds.size.width = boundHeight; 
     transform = CGAffineTransformMakeScale(-1.0, 1.0); 
     transform = CGAffineTransformRotate(transform, M_PI/2.0); 
     break; 
    case UIImageOrientationRight: 
     boundHeight = bounds.size.height; 
     bounds.size.height = bounds.size.width; 
     bounds.size.width = boundHeight; 
     transform = CGAffineTransformMakeTranslation(imageSize.height, 0.0); 
     transform = CGAffineTransformRotate(transform, M_PI/2.0); 
     break; 
    default: 
     [NSException raise:NSInternalInconsistencyException format:@"Invalid image orientation"]; 
    } 
    UIGraphicsBeginImageContext(bounds.size); 
    CGContextRef context = UIGraphicsGetCurrentContext(); 
    if (orient == UIImageOrientationRight || orient == UIImageOrientationLeft) { 
    CGContextScaleCTM(context, -scaleRatio, scaleRatio); 
    CGContextTranslateCTM(context, -height, 0); 
    } 
    else { 
    CGContextScaleCTM(context, scaleRatio, -scaleRatio); 
    CGContextTranslateCTM(context, 0, -height); 
    } 
    CGContextConcatCTM(context, transform); 
    CGContextDrawImage(UIGraphicsGetCurrentContext(), CGRectMake(0, 0, width, height), imgRef); 
    UIImage *returnImage = UIGraphicsGetImageFromCurrentImageContext(); 
    UIGraphicsEndImageContext(); 
    return returnImage; 
} 

+ (UIImage *)scaleAndRotateImageFrontCamera:(UIImage *)image 
{ 
    static int kMaxResolution = 640; 
    CGImageRef imgRef = image.CGImage; 
    CGFloat width = CGImageGetWidth(imgRef); 
    CGFloat height = CGImageGetHeight(imgRef); 
    CGAffineTransform transform = CGAffineTransformIdentity; 
    CGRect bounds = CGRectMake(0, 0, width, height); 
    if (width > kMaxResolution || height > kMaxResolution) { 
    CGFloat ratio = width/height; 
    if (ratio > 1) { 
     bounds.size.width = kMaxResolution; 
     bounds.size.height = bounds.size.width/ratio; 
    } else { 
     bounds.size.height = kMaxResolution; 
     bounds.size.width = bounds.size.height * ratio; 
    } 
    } 

    CGFloat scaleRatio = bounds.size.width/width; 
    CGSize imageSize = CGSizeMake(CGImageGetWidth(imgRef), CGImageGetHeight(imgRef)); 
    CGFloat boundHeight; 
    UIImageOrientation orient = image.imageOrientation; 
    switch(orient) { 
    case UIImageOrientationUp: 
     transform = CGAffineTransformIdentity; 
     break; 
    case UIImageOrientationUpMirrored: 
     transform = CGAffineTransformMakeTranslation(imageSize.width, 0.0); 
     transform = CGAffineTransformScale(transform, -1.0, 1.0); 
     break; 
    case UIImageOrientationDown: 
     transform = CGAffineTransformMakeTranslation(imageSize.width, imageSize.height); 
     transform = CGAffineTransformRotate(transform, M_PI); 
     break; 
    case UIImageOrientationDownMirrored: 
     transform = CGAffineTransformMakeTranslation(0.0, imageSize.height); 
     transform = CGAffineTransformScale(transform, 1.0, -1.0); 
     break; 
    case UIImageOrientationLeftMirrored: 
     boundHeight = bounds.size.height; 
     bounds.size.height = bounds.size.width; 
     bounds.size.width = boundHeight; 
     transform = CGAffineTransformMakeTranslation(imageSize.height, imageSize.width); 
     transform = CGAffineTransformScale(transform, -1.0, 1.0); 
     transform = CGAffineTransformRotate(transform, 3.0 * M_PI/2.0); 
     break; 
    case UIImageOrientationLeft: 
     boundHeight = bounds.size.height; 
     bounds.size.height = bounds.size.width; 
     bounds.size.width = boundHeight; 
     transform = CGAffineTransformMakeTranslation(0.0, imageSize.width); 
     transform = CGAffineTransformRotate(transform, 3.0 * M_PI/2.0); 
     break; 
     case UIImageOrientationRight: 
    case UIImageOrientationRightMirrored: 
     boundHeight = bounds.size.height; 
     bounds.size.height = bounds.size.width; 
     bounds.size.width = boundHeight; 
     transform = CGAffineTransformMakeScale(-1.0, 1.0); 
     transform = CGAffineTransformRotate(transform, M_PI/2.0); 
     break; 
     default: 
     [NSException raise:NSInternalInconsistencyException format:@"Invalid image orientation"]; 
    } 
    UIGraphicsBeginImageContext(bounds.size); 
    CGContextRef context = UIGraphicsGetCurrentContext(); 
    if (orient == UIImageOrientationRight || orient == UIImageOrientationLeft) { 
    CGContextScaleCTM(context, -scaleRatio, scaleRatio); 
    CGContextTranslateCTM(context, -height, 0); 
    } 
    else { 
    CGContextScaleCTM(context, scaleRatio, -scaleRatio); 
    CGContextTranslateCTM(context, 0, -height); 
    } 
    CGContextConcatCTM(context, transform); 
    CGContextDrawImage(UIGraphicsGetCurrentContext(), CGRectMake(0, 0, width, height), imgRef); 
    UIImage *returnImage = UIGraphicsGetImageFromCurrentImageContext(); 
    UIGraphicsEndImageContext(); 
    return returnImage; 
} 

@end 

重命名您的視圖控制器實現文件* .mm

MyViewController.m -> MyViewController.mm 

和進口在UIImageCVMatConverter在您的視圖控制器:

#import "UIImageCVMatConverter.h" 

現在,您可以將您的視圖控制器內混合Objective-C和C++代碼的OpenCV:

cv::Mat img = [UIImageCVMatConverter cvMatFromUIImage:[UIImage imageNamed:@"my_image.png"]]; 
... 

玩得開心!

+0

嘿拉鍊文件不提取這裏它給出的錯誤 – Nims

+0

檢查了兩次。它的工作;) – dom

+0

非常有用:)感謝分享 – Prerna

2

@Nims,如@moosgummi說,它的工作原理,但我也做了以下步驟:

  • 添加庫的libC++ dylib
  • 在 「生成設置」 - 「蘋果。 LLVM編譯器XX - 語言」 - ‘編譯來源’ - 對象的C++
+0

隨着moosgummi和你的信息,它的工作非常好。感謝分享 :) – Prerna

1

你可以寫所有的類方法,或者你可以簡單地包括ios.h文件。 它有兩種方法已經寫入圖像處理。

這是我的代碼。

對不起所有的評論,我包括他們來顯示我的研究進展。

#import "JmBViewController.h" 

@interface JmBViewController() 

@end 

@implementation JmBViewController 

- (void)viewDidLoad { 
[super viewDidLoad]; 
_imgtest = [UIImage imageNamed:@"IMG_0424.PNG"]; 

cv::Mat cvImage; 
UIImageToMat(_imgtest, cvImage); 
if (!cvImage.empty()) { 
    cv::Mat gray; 
    // cv::Mat filteredMat; 
    cv::cvtColor(cvImage, gray, CV_BGRA2GRAY); 
    // cv::GaussianBlur(gray, gray, cv::Size(5, 5), 1.2, 1.2); 
    cv::vector<cv::Vec3f> circles; 

    /* 
    for(size_t i = 0; i < circles.size(); i++) 
    { 
     cv::Point center((cvRound(circles[i][0]), cvRound(circles[i][1]))); 
     int radius = cvRound(circles[i][2]); 
     cv::circle(gray, center, 3, cv::Scalar(0,255,0)); 
     cv::circle(gray, center, radius, cv::Scalar(0,0,255)); 
    } 
    */ 

// for (int i = 1; i < 15; i = i + 2) 

     cv::GaussianBlur(gray, gray, cv::Size(9, 9), 1.5, 1.5); 

     cv::Mat edges; 
     cv::Canny(gray, edges, 0, 50); 
     //gray.setTo(cv::Scalar::all(0)); 
     //gray.setTo(cv::Scalar::all(255), edges); 
     cv::HoughCircles(gray, circles, CV_HOUGH_GRADIENT, 1, 30, 50, 20, 10, 25); 
     for(size_t i = 0; i < circles.size(); i++) 
     { 
      cv::Point center(cvRound(circles[i][0]), cvRound(circles[i][1])); 
      int radius = cvRound(circles[i][2]); 
      cv::circle(cvImage, center, 5, cv::Scalar::all(200), -1, 8, 0);//center 
      cv::circle(cvImage, center, radius, cv::Scalar::all(255), 3, 8, 0);//diamter 
     NSLog(@"Circles: %ld", i+1); 

     // cv::imshow(&"circles i " [ i], gray); 
    } 


    _imgView.image = MatToUIImage(cvImage); 
    } 
    /* 
cv::Mat cvImage; 
cv::Mat grey; 
cv::Mat filteredMat; 
cv::vector<cv::Vec3f> circles; 
// cv::cvtColor(_imgtest, cvImage, CV_BGR2GRAY); 
cv::threshold(grey, filteredMat, 100, 255, CV_THRESH_BINARY); 
[UIImageCVMatConverter cvMatGrayFromUIImage:_imgtest]; 
// cv::cvtColor(cvImage, grey, CV_RGBA2GRAY); 
// UIImageToMat(_imgtest, cvImage); 
cv::HoughCircles(cvImage, circles, CV_HOUGH_GRADIENT, 1, 50); 
// MatToUIImage(cvImage); 
_imgView.image = [UIImageCVMatConverter UIImageFromCVMat:cvImage]; 
_imgView.image = MatToUIImage(cvImage); 
*/ 

// Do any additional setup after loading the view, typically from a nib. 
} 

- (void)didReceiveMemoryWarning 
{ 
[super didReceiveMemoryWarning]; 
// Dispose of any resources that can be recreated. 
} 
/* 

UIImage* MatToUIImage(const cv::Mat& image) { 
NSData *data = [NSData dataWithBytes:image.data length:image.elemSize()*image.total()]; 
CGColorSpaceRef colorSpace; 
if (image.elemSize() == 1) { 
    colorSpace = CGColorSpaceCreateDeviceGray(); 
}else { colorSpace = CGColorSpaceCreateDeviceRGB(); 
} 
CGDataProviderRef provider = CGDataProviderCreateWithCFData((__bridge CFDataRef)data); 

CGImageRef imageRef = CGImageCreate(image.cols, image.rows, 8, 8*image.elemSize(), image.step.p[0], colorSpace, kCGImageAlphaNone|kCGBitmapByteOrderDefault, provider, NULL, FALSE, kCGRenderingIntentDefault); 
UIImage *finalImage = [UIImage imageWithCGImage:imageRef]; 

return finalImage; 
} 
*/ 


@end 

我希望這有助於!

這裏是我在瀏覽器頭文件中的#包括。

#import <UIKit/UIKit.h> 
// #import "UIImageCVMatConverter.h" 
#import <opencv2/highgui/highgui_c.h> 
#import <opencv2/highgui/highgui.hpp> 
#import <opencv2/imgproc/imgproc_c.h> 
#import <opencv2/imgproc/imgproc.hpp> 
#import <opencv2/highgui/ios.h> 
#import <opencv2/core/core_c.h> 
#import <opencv2/core/core.hpp> 

@interface JmBViewController : UIViewController 
@property (weak, nonatomic) IBOutlet UIImageView *imgView; 
@property (weak, nonatomic) UIImage *imgtest; 

@end 

無需編譯或使自己的架構,只需下載您的OpenCV的網站想要的版本,把它拖到項目下的框架,確保「將所有文件複製到目標」當被問及被確認Xcode,如果你使用的是iOS。這是我發現的最簡單的方法,即在沒有所有終端命令和cMake廢話的情況下將框架包含到項目中。

0

不要忘記你的所有.m文件轉換成.mm文件,還有什麼別的都不行