2016-11-10 35 views
0

私はfindHomography()を使用して2つのイメージを比較しています。私はサーフふるいにかけるアルゴリズムを使用すると、最新のAndroidのアーキテクチャ用にコンパイルするOpenCVの3.1.0opencv_contribから余分なモジュールを追加しました。 ndk-buildを使用してライブラリを正常にコンパイルできます。warpPerspectiveでのエラー

問題:シーンとコンピューティングwarpPerspectiveで物体を検出しながら

、次の例外がいくつかの画像にスローされます。

11-10 20:47:30.990 10503-11056/ E/cv::error(): OpenCV Error: Assertion failed ((M0.type() == CV_32F || M0.type() == CV_64F) && M0.rows == 3 && M0.cols == 3) in void cv::warpPerspective(cv::InputArray, cv::OutputArray, cv::InputArray, cv::Size, int, int, const Scalar&), file /Volumes/Linux/builds/master_pack-android/opencv/modules/imgproc/src/imgwarp.cpp, line 6120 
    --------- beginning of crash 
    11-10 20:47:31.020 10503-11056/ A/libc: Fatal signal 6 (SIGABRT), code -6 in tid 11056 (Thread-31509) 
    11-10 20:47:31.122 200-200/? A/DEBUG: *** *** *** *** *** *** *** *** *** *** *** *** *** *** *** *** 
    11-10 20:47:31.122 200-200/? A/DEBUG: Build fingerprint: 'google/hammerhead/hammerhead:6.0.1/M4B30X/3237893:user/release-keys' 
    11-10 20:47:31.122 200-200/? A/DEBUG: Revision: '11' 
    11-10 20:47:31.122 200-200/? A/DEBUG: ABI: 'arm' 
    11-10 20:47:31.122 200-200/? A/DEBUG: pid: 10503, tid: 11056, name: Thread-31509 >>> <<< 
    11-10 20:47:31.122 200-200/? A/DEBUG: signal 6 (SIGABRT), code -6 (SI_TKILL), fault addr -------- 
    11-10 20:47:31.149 200-200/? A/DEBUG:  r0 00000000 r1 00002b30 r2 00000006 r3 926e0978 
    11-10 20:47:31.149 200-200/? A/DEBUG:  r4 926e0980 r5 926e0930 r6 00000000 r7 0000010c 
    11-10 20:47:31.149 200-200/? A/DEBUG:  r8 00000047 r9 00000001 sl 00000050 fp 00000001 
    11-10 20:47:31.149 200-200/? A/DEBUG:  ip 00000006 sp 926cfc48 lr b6d4fb61 pc b6d51f50 cpsr 400f0010 
    11-10 20:47:31.174 200-200/? A/DEBUG: backtrace: 
    11-10 20:47:31.174 200-200/? A/DEBUG:  #00 pc 00041f50 /system/lib/libc.so (tgkill+12) 
    11-10 20:47:31.174 200-200/? A/DEBUG:  #01 pc 0003fb5d /system/lib/libc.so (pthread_kill+32) 
    11-10 20:47:31.174 200-200/? A/DEBUG:  #02 pc 0001c30f /system/lib/libc.so (raise+10) 
    11-10 20:47:31.174 200-200/? A/DEBUG:  #03 pc 000194c1 /system/lib/libc.so (__libc_android_abort+34) 
    11-10 20:47:31.174 200-200/? A/DEBUG:  #04 pc 000174ac /system/lib/libc.so (abort+4) 
    11-10 20:47:31.174 200-200/? A/DEBUG:  #05 pc 00666958 /data/app/-2/lib/arm/libopencv_java3.so (_ZN9__gnu_cxx27__verbose_terminate_handlerEv+344) 
    11-10 20:47:31.175 200-200/? A/DEBUG:  #06 pc 0063d7b0 /data/app/-2/lib/arm/libopencv_java3.so (_ZN10__cxxabiv111__terminateEPFvvE+4) 
    11-10 20:47:31.175 200-200/? A/DEBUG:  #07 pc 0063d7f0 /data/app/-2/lib/arm/libopencv_java3.so (_ZSt9terminatev+16) 
    11-10 20:47:31.175 200-200/? A/DEBUG:  #08 pc 0063d1cc /data/app/-2/lib/arm/libopencv_java3.so (__cxa_throw+168) 
    11-10 20:47:31.175 200-200/? A/DEBUG:  #09 pc 001e477d /data/app/-2/lib/arm/libopencv_java3.so (_ZN2cv5errorERKNS_9ExceptionE+244) 
    11-10 20:47:31.175 200-200/? A/DEBUG:  #10 pc 001e48bd /data/app/-2/lib/arm/libopencv_java3.so (_ZN2cv5errorEiRKNS_6StringEPKcS4_i+108) 
    11-10 20:47:31.175 200-200/? A/DEBUG:  #11 pc 002ca5fd /data/app/-2/lib/arm/libopencv_java3.so (_ZN2cv15warpPerspectiveERKNS_11_InputArrayERKNS_12_OutputArrayES2_NS_5Size_IiEEiiRKNS_7Scalar_IdEE+356) 
    11-10 20:47:31.175 200-200/? A/DEBUG:  #12 pc 00007375 /data/app/-2/lib/arm/libnonfree.so (_Z15detect_featuresP7_JNIEnvP8_jstringS2_i+2844) 
    11-10 20:47:31.175 200-200/? A/DEBUG:  #13 pc 022bfd23 /data/app/-2/oat/arm/base.odex (offset 0x13ce000) (boolean .NonfreeJNILib.detectFeatures(java.lang.String, java.lang.String, int)+126) 
    11-10 20:47:31.176 200-200/? A/DEBUG:  #14 pc 0258c149 /data/app/-2/oat/arm/base.odex (offset 0x13ce000) (void .tasks.AdDetectionAsyncTask$1.run()+292) 
    11-10 20:47:31.176 200-200/? A/DEBUG:  #15 pc 71c99c67 /data/dalvik-cache/arm/[email protected]@boot.oat (offset 0x1ed6000) 

コード:

#include <jni.h> 
#include <string.h> 
#include <stdio.h> 
#include <android/log.h> 

#include "opencv2/core/core.hpp" 
#include "opencv2/features2d/features2d.hpp" 
#include "opencv2/highgui/highgui.hpp" 
#include "opencv2/calib3d/calib3d.hpp" 
#include "opencv2/xfeatures2d/nonfree.hpp" 
#include "opencv2/opencv.hpp" 

using namespace std; 
using namespace cv; 

#define LOG_TAG "nonfree_jni" 
#define LOGI(...) __android_log_print(ANDROID_LOG_INFO,LOG_TAG,__VA_ARGS__) 

jboolean detect_features(JNIEnv * env, jstring scenePath, jstring objectPath) { 

    const char *nativeScenePath = (env)->GetStringUTFChars(scenePath, NULL); 
    const char *nativeObjectPath = (env)->GetStringUTFChars(objectPath, NULL); 

    nativeScenePath = env->GetStringUTFChars(scenePath, 0); 
    nativeObjectPath = env->GetStringUTFChars(objectPath, 0); 

    (env)->ReleaseStringUTFChars(scenePath, nativeScenePath); 
    (env)->ReleaseStringUTFChars(objectPath, nativeObjectPath); 

    __android_log_print(ANDROID_LOG_VERBOSE, LOG_TAG, "Object path: ----- %s \n", nativeObjectPath); 
    __android_log_print(ANDROID_LOG_VERBOSE, LOG_TAG, "Scene path: ----- %s \n", nativeScenePath); 

    Mat img_object = imread(nativeObjectPath, CV_LOAD_IMAGE_GRAYSCALE); 
    Mat img_scene = imread(nativeScenePath, CV_LOAD_IMAGE_GRAYSCALE); 


    if(!img_object.data || !img_scene.data){ 
     LOGI(" --(!) Error reading images "); 
     return false; 
    } 

     //-- Step 1: Detect the keypoints using SURF Detector 
     int minHessian = 400; 

    __android_log_print(ANDROID_LOG_VERBOSE, LOG_TAG, "Image comparison rows: ----- %d \n", img_object.rows); 
    __android_log_print(ANDROID_LOG_VERBOSE, LOG_TAG, "Image comparison colums: ----- %d \n", img_object.cols); 

//  cv::xfeatures2d::SurfFeatureDetector detector(minHessian); 
     Ptr<cv::xfeatures2d::SurfFeatureDetector> detector = cv::xfeatures2d::SurfFeatureDetector::create(minHessian); 

     std::vector<KeyPoint> keypoints_object, keypoints_scene; 
     detector->detect(img_object, keypoints_object); 
     detector->detect(img_scene, keypoints_scene); 

     //-- Step 2: Calculate descriptors (feature vectors) 
//  cv::xfeatures2d::SurfDescriptorExtractor extractor; 
     Ptr<cv::xfeatures2d::SurfDescriptorExtractor> extractor = cv::xfeatures2d::SurfDescriptorExtractor::create(); 

     Mat descriptors_object, descriptors_scene; 

     extractor->compute(img_object, keypoints_object, descriptors_object); 
     extractor->compute(img_scene, keypoints_scene, descriptors_scene); 

     //-- Step 3: Matching descriptor vectors using FLANN matcher 
     FlannBasedMatcher matcher; 
     std::vector<DMatch> matches; 
     matcher.match(descriptors_object, descriptors_scene, matches); 

     double max_dist = 0; double min_dist = 100; 

     //-- Quick calculation of max and min distances between keypoints 
     for(int i = 0; i < descriptors_object.rows; i++) 
     { 
      double dist = matches[i].distance; 
      if (dist == 0) continue; 
      if(dist < min_dist) min_dist = dist; 
      if(dist > max_dist) max_dist = dist; 
     } 

     __android_log_print(ANDROID_LOG_VERBOSE, LOG_TAG, "-- Max dist : %f \n", max_dist); 
     __android_log_print(ANDROID_LOG_VERBOSE, LOG_TAG, "-- Min dist : %f \n", min_dist); 

     //-- Draw only "good" matches (i.e. whose distance is less than 3*min_dist) 
     std::vector<DMatch> good_matches; 

     for(int i = 0; i < descriptors_object.rows; i++) 
     { 
      if(matches[i].distance <= 0.1) //3*min_dist 
      { 
       good_matches.push_back(matches[i]); 
      } 
     } 

     __android_log_print(ANDROID_LOG_VERBOSE, LOG_TAG, "FLANN total matches -----: %zu \n", matches.size()); 
     __android_log_print(ANDROID_LOG_VERBOSE, LOG_TAG, "FLANN good matches -----: %zu \n", good_matches.size()); 

     Mat img_matches; 
     drawMatches(img_object, keypoints_object, img_scene, keypoints_scene, 
        good_matches, img_matches, Scalar::all(-1), Scalar::all(-1), 
        vector<char>(), DrawMatchesFlags::NOT_DRAW_SINGLE_POINTS); 

     //-- Localize the object 
     std::vector<Point2f> obj; 
     std::vector<Point2f> scene; 

     for(int i = 0; i < good_matches.size(); i++) 
     { 
      //-- Get the keypoints from the good matches 
      obj.push_back(keypoints_object[ good_matches[i].queryIdx ].pt); 
      scene.push_back(keypoints_scene[ good_matches[i].trainIdx ].pt); 
     } 

     if (good_matches.size() >= 5) 
     { 
      Mat H = findHomography(obj, scene, CV_RANSAC); 

      //-- Get the corners from the image_1 (the object to be "detected") 
      std::vector<Point2f> obj_corners(4); 
      obj_corners[0] = cvPoint(0,0); obj_corners[1] = cvPoint(img_object.cols, 0); 
      obj_corners[2] = cvPoint(img_object.cols, img_object.rows); obj_corners[3] = cvPoint(0, img_object.rows); 
      std::vector<Point2f> scene_corners(4); 

      Mat output, matrix; 

      warpPerspective(img_object, output, H, { img_scene.cols, img_scene.rows }); 

      //////////////////////////////////////////////////////////////////////////////// 

      detector->detect(output, keypoints_object); 

      //-- Step 2: Calculate descriptors (feature vectors) 
      //cv::xfeatures2d::SurfDescriptorExtractor extractor; 
      Ptr<cv::xfeatures2d::SurfDescriptorExtractor> extractor = cv::xfeatures2d::SurfDescriptorExtractor::create(); 

      extractor->compute(output, keypoints_object, descriptors_object); 
      extractor->compute(img_scene, keypoints_scene, descriptors_scene); 

      std::vector<std::vector<cv::DMatch>> matches2; 
      BFMatcher matcher; 
      matcher.knnMatch(descriptors_object, descriptors_scene, matches2, 2); 
      vector<cv::DMatch> good_matches2; 

      for (int i = 0; i < matches2.size(); ++i) 
      { 
       const float ratio = 0.8; // As in Lowe's paper; can be tuned 
       if (matches2[i][0].distance < ratio * matches2[i][1].distance) 
       { 
        good_matches2.push_back(matches2[i][0]); 
       } 
      } 

      if (matches2.size() == 0 || good_matches2.size() == 0) { 
      LOGI("End run!\n"); 
       return false; 
      } 

      double ratioOfSimilarity = static_cast<double>(good_matches2.size())/static_cast<double>(matches2.size()); 

      __android_log_print(ANDROID_LOG_VERBOSE, LOG_TAG, "Bruteforce total matches -----: %zu \n", matches2.size()); 
      __android_log_print(ANDROID_LOG_VERBOSE, LOG_TAG, "Bruteforce good matches -----: %zu \n", good_matches2.size()); 
      __android_log_print(ANDROID_LOG_VERBOSE, LOG_TAG, "Bruteforce similarity ratio -----: %f \n", ratioOfSimilarity); 

      if(ratioOfSimilarity >= 0.3) { 
      LOGI("End run!\n"); 
       return true; 
      } 

      LOGI("End run!\n"); 
      return false; 

     } 
     LOGI("End run!\n"); 
     return false; 
} 

ことができるもの問題?

答えて

0

これは長年にわたり私を苦痛に陥らせていました。私は他人がそれを通過することを望んでいません。ここ

は問題である。

cv::findHomography()機能は2.4.5リリースから約始まる空のホモグラフィ行列(0 COLS X 0行)を返すことができます。いくつかの意見によると、これはcv::RANSACフラグが渡されたときにのみ起こると思われます。

Reference

関連する問題