Xcode 集成Opencv

2020-09-10  本文已影响0人  copy_farmer

1.**本地集成Opencv **

下载通道
本地方法

2.pod集成

  pod 'Opencv'

3.使用(手势识别)

-(void)gestureTest{
    dispatch_async(dispatch_get_global_queue(0, 0), ^{
        //手势识别
        CV_TRACE_FUNCTION();//
        
         NSString *modelTxt = [[NSBundle mainBundle] pathForResource:@"deploy_relu6" ofType:@"prototxt"];
         NSString *modelBin = [[NSBundle mainBundle] pathForResource:@"ssdlite-mobilenetv2_iter_100000" ofType:@"caffemodel"];
        self->net = dnn::readNetFromCaffe([modelTxt UTF8String], [modelBin UTF8String]);
        if (self->net.empty())
         {
            std::cerr << "Can't load network by using the following files: " << std::endl;
             std::cerr << "prototxt:   " << modelTxt << std::endl;
             std::cerr << "caffemodel: " << modelBin << std::endl;
             std::cerr << "bvlc_googlenet.caffemodel can be downloaded here:" << std::endl;
             std::cerr << "http://dl.caffe.berkeleyvision.org/bvlc_googlenet.caffemodel" << std::endl;
             exit(-1);
         }else{
             NSLog(@"1111111111");
         }
           dispatch_async(dispatch_get_main_queue(), ^{
                if(!self.ijkPlayer.isPlaying){
                    return;
                }
               UIImage *image = [self.ijkPlayer thumbnailImageAtCurrentTime:0];

               int xValue = ScreenWidth/2-200;
               int yValue = 40;
               image = [self ct_imageFromImage:image inRect:CGRectMake(xValue, yValue, 300, 300)];

               self->testImageView.image = image;
                Mat img;
                UIImageToMat(image, img);

               if(img.channels()==4){
                 cv::cvtColor(img, img, cv::COLOR_BGRA2RGB);
               }
                if (img.empty())
                {
                     NSLog(@"Can't read image");
                        exit(-1);
                }else{

                    NSLog(@"22222222");
                }
                Mat inputBlob = dnn::blobFromImage(img, 0.007843f, cv::Size(300, 300),127.5,false);   //Convert
               self->net.setInput(inputBlob,"data");
               Mat detections = self->net.forward("detection_out");
                Mat detectionMat(detections.size[2], detections.size[3], CV_32F, detections.ptr<float>());
                NSLog(@"detections=%d",detectionMat.rows);
                for(int i=0;i<detectionMat.rows;i++){
                    float confidence = detectionMat.at<float>(i, 2);
                    if (confidence > 0.2)
                    {
                        int xLeftTop = static_cast<int>(detectionMat.at<float>(i, 3) * img.cols);
                        int yLeftTop = static_cast<int>(detectionMat.at<float>(i, 4) * img.rows);
                        int xRightBottom = static_cast<int>(detectionMat.at<float>(i, 5) * img.cols);
                        int yRightBottom = static_cast<int>(detectionMat.at<float>(i, 6) * img.rows);
                        xLeftTop = max(xLeftTop , 0);
                        yLeftTop = max(yLeftTop , 0);
                        xRightBottom = min(400, xRightBottom);
                        yRightBottom = min(300, yRightBottom);
                       NSLog(@"111111111xLeftTop=%d,yLeftTop=%d,xRightBottom=%d,yRightBottom=%d",xLeftTop,yLeftTop,xRightBottom,yRightBottom);

                        CGRect frame = CGRectMake(xLeftTop+xValue, yLeftTop+yValue, xRightBottom-xLeftTop, yRightBottom-yLeftTop);

                        NSLog(@"2222222222width%f,height=%f",frame.size.width,frame.size.height);
                  
                    }
                }

           });
       });
    
 
}
上一篇下一篇

猜你喜欢

热点阅读