2017-06-27 33 views
-2

私のコードはデバッグモードで問題なく動作しており、ここにはOutputがあります。 Project4.exeで0x5E3ADF2C(msvcp120d.dll)でOpenCV.3.0エラー:0xC0000005:アクセス違反の読み取り場所0x00000000

未処理の例外:0xc0000005で:アクセス違反読み取り場所0x00000000の私はリリースモードでそれを試してみましたら、私はこのエラーを得ました。

インターネットで読んだことは、初期化されていないポインタやその他の変数と関係があると思います。私の賭けはvector< vector<Point> > contours;findSquaresの機能にあります。私は考えることができるすべてでそれを初期化しようとしましたが、今まで運がありませんでした。

私はOpenCV.3.0.0 X86 version.HereでのVisual Studio 2013を使用していますのcompletコードです:

#include <stdio.h> 
#include <iostream> 
#include "opencv2/core/core.hpp" 
#include "opencv2/features2d/features2d.hpp" 
#include "opencv2/highgui/highgui.hpp" 
#include "opencv2/calib3d/calib3d.hpp" 
#include <sstream> 
#include "opencv2/imgproc/imgproc.hpp" 
#include <math.h> 
#include <string.h> 
#ifndef NOMINMAX 
#define NOMINMAX 
#endif 
#include <windows.h> 
#include <algorithm> 

using namespace cv; 
using namespace std; 


// helper function: 
// finds a cosine of angle between vectors 
// from pt0->pt1 and from pt0->pt2 
static double angle(Point pt1, Point pt2, Point pt0) 
{ 
double dx1 = pt1.x - pt0.x; 
double dy1 = pt1.y - pt0.y; 
double dx2 = pt2.x - pt0.x; 
double dy2 = pt2.y - pt0.y; 
return (dx1*dx2 + dy1*dy2)/sqrt((dx1*dx1 + dy1*dy1)*(dx2*dx2 + dy2*dy2) + 
1e-10); 
} 

// returns sequence of squares detected on the image. 
// the sequence is stored in the specified memory storage 
static void findSquares(const Mat& image, vector<vector<Point> >& squares) 
{ 
    squares.clear(); 

    vector<vector<Point>> contours; 

    // find white and yellow patch 

    Mat grayscal, grayscal1; 
    cvtColor(image, grayscal, CV_BGR2GRAY); 
    // try several threshold levels 
    for (int l = 0; l < 1; l++) 
    { 

     Mat imgThresholded, imgThresholded1, imgThresholded2; 
     cv::adaptiveThreshold(grayscal, grayscal1, 255, 
cv::ADAPTIVE_THRESH_MEAN_C, cv::THRESH_BINARY, 11, 0); 
    inRange(grayscal, Scalar(100, 100, 100), Scalar(255, 255, 255), 
imgThresholded1); 

    //morphological closing (fill small holes in the foreground) 
    //dilate(imgThresholded1, imgThresholded1, 
    getStructuringElement(MORPH_RECT, Size(7, 7))); 
    erode(imgThresholded1, imgThresholded1, 
    getStructuringElement(MORPH_RECT, Size(7, 7))); 

    // find contours and store them all as a list 
    findContours(imgThresholded1, contours, RETR_LIST, CHAIN_APPROX_SIMPLE); 
    vector<Point> approx; 

    // test each contour 
    for (size_t i = 0; i < contours.size(); i++) 
    { 
     // approximate contour with accuracy proportional 
     // to the contour perimeter 
     approxPolyDP(Mat(contours[i]), approx, arcLength(Mat(contours[i]), 
     true)*0.02, true); 

     // square contours should have 4 vertices after approximation 
     // relatively large area (to filter out noisy contours) 
     // and be convex. 
     // Note: absolute value of an area is used because 
     // area may be positive or negative - in accordance with the 
     // contour orientation 
     if (approx.size() == 4 && 
      fabs(contourArea(Mat(approx))) > 4000 && 
    fabs(contourArea(Mat(approx))) < 400000 && 
      isContourConvex(Mat(approx))) 
     { 
      double maxCosine = 0; 

      for (int j = 2; j < 5; j++) 
      { 
       // find the maximum cosine of the angle between joint edges 
       double cosine = fabs(angle(approx[j % 4], approx[j - 2], 
    approx[j - 1])); 
       maxCosine = MAX(maxCosine, cosine); 
      } 

      // if cosines of all angles are small 
      // (all angles are ~90 degree) then write quandrange 
      // vertices to resultant sequence 
      if (maxCosine < 0.07) 
       squares.push_back(approx); 
      } 
     } 
    cout << "size of squares:" << squares.size() << endl; 
    } 
} 
// the function draws all the squares in the image 
cv::Mat drawSquares(Mat& image, const vector<vector<Point> >& squares) 
    { 
    std::vector<cv::Mat> listOfMatrices, listOfMatrices2; 
    vector<Point> centers; 

    int m = listOfMatrices.size(); 
    int n = listOfMatrices2.size(); 
    int q = centers.size(); 


    for (size_t i = 0; i < squares.size(); i++) 
    { 

    const Point* p = &squares[i][0]; 

    int n = (int)squares[i].size(); 
    Rect r = boundingRect(Mat(squares[i])); 
    cv::Size inflationSize(2, 2); 
    r -= inflationSize; 
    r.x = r.x + r.width/4; 
    r.y = r.y + r.height/4; 
    r.width = r.width/2; 
    r.height = r.height/2; 

    //dont detect the border 

    //Mat roi = image(r); 
    cv::Mat Image(image); 
    cv::Mat croppedImage = Image(Rect(r.x, r.y, r.width - 4, r.height - 4)); 

    Point center(r.x + r.width/2, r.y + r.height/2); 
    centers.push_back(center); 
    q++; 
    listOfMatrices.push_back(croppedImage); 
    m++; 

    } 

int maxbleu = 0; 
Scalar tempVal0 = mean(listOfMatrices[0]); 
double myMAtMeanB0 = tempVal0.val[0]; 
for (int j = 1; j < q; j++) 
{ 
    Scalar tempVal = mean(listOfMatrices[j]); 
    double myMAtMeanB = tempVal.val[0]; 
    if (myMAtMeanB > myMAtMeanB0) 
    { 
     myMAtMeanB0 = myMAtMeanB; 
     maxbleu = j; 
    } 
} 

int maxdistance = 0, indicemax = 0, resmax = 0; 
for (int i = 0; i < q; i++) 
{ 
    //listOfMatrices[i].release(); 
    double xDiff = abs(centers[maxbleu].x - centers[i].x); 
    double yDiff = abs(centers[maxbleu].y - centers[i].y); 
    resmax = sqrt((xDiff * xDiff) + (yDiff * yDiff)); 
    if (i == maxbleu) 
    { 
     continue; 
    } 
    else if (resmax>maxdistance) 
    { 
     maxdistance = resmax; 
     indicemax = i; 
    } 
} 

int mindistance = 1000, indicemin = 0, resmin = 0; 
for (int i = 0; i < q; i++) 
{ 
    //listOfMatrices[i].release(); 
    double xDiff = abs(centers[maxbleu].x - centers[i].x); 
    double yDiff = abs(centers[maxbleu].y - centers[i].y); 
    resmin = sqrt((xDiff * xDiff) + (yDiff * yDiff)); 
    if (i == maxbleu) 
    { 
     continue; 
    } 
    else if (resmin<mindistance) 
    { 
     mindistance = resmin; 
     indicemin = i; 
    } 
} 
cout << "cyan" << centers[indicemax] << endl; 
cout << "white" << centers[maxbleu] << endl; 
cout << "gray" << centers[indicemin] << endl; 

vector<Point> centersV2; 
for (int j = 0; j < 4; j++) 
{ 
    for (int i = 0; i < 6; i++) 
    { 
     if (abs(centers[maxbleu].x - centers[indicemax].x) < 
abs(centers[maxbleu].y - centers[indicemax].y)) 
     { 
      if (centers[maxbleu].y - centers[indicemax].y > 0) 
      { 
       if (5 * abs(centers[maxbleu].x - centers[indicemin].x) > 30) 
       { 
        Point tmpV2(centers[maxbleu].x - i*(centers[maxbleu].x - 
centers[indicemin].x) - j*(centers[maxbleu].x - centers[indicemax].x)/3.3, 
centers[maxbleu].y - i*(abs(centers[maxbleu].y - centers[indicemax].y))/
5); 
        centersV2.push_back(tmpV2); 
       } 
       else { 
        Point tmpV2(centers[maxbleu].x - i*(centers[maxbleu].x - 
centers[indicemin].x) - j*(centers[maxbleu].x - centers[indicemax].x)/3, 
centers[maxbleu].y - i*(abs(centers[maxbleu].y - centers[indicemax].y))/
5); 
        centersV2.push_back(tmpV2); 
       } 
      } 
      else { 
       if (5 * abs(centers[maxbleu].x - centers[indicemin].x) > 30) 
       { 
        Point tmpV2(centers[maxbleu].x - i* 
(abs(centers[maxbleu].x - centers[indicemin].x)) - j*(abs(centers[maxbleu].x 
- centers[indicemax].x))/3.3, centers[maxbleu].y + i* 
(abs(centers[maxbleu].y - centers[indicemax].y)/5)); 
        centersV2.push_back(tmpV2); 
       } 
       else { 
        Point tmpV2(centers[maxbleu].x - i* 
    (abs(centers[maxbleu].x - centers[indicemin].x)) - j* 
    (abs(centers[maxbleu].x - centers[indicemax].x))/3, centers[maxbleu].y 
    + i*(abs(centers[maxbleu].y - centers[indicemax].y)/5)); 
        centersV2.push_back(tmpV2); 
       } 
      } 

     } 
     else { 
      if (centers[maxbleu].x - centers[indicemin].x > 0) 
      { 
       if (5 * abs(centers[maxbleu].y - centers[indicemin].y) > 30) 
       { 
        Point tmpV2(centers[maxbleu].x - i* 
(abs(centers[maxbleu].x - centers[indicemax].x)/5) + i, 
    centers[indicemin].y - i*(centers[maxbleu].y - centers[indicemin].y) - j* 
(centers[maxbleu].y - centers[indicemax].y)/3.3); 
        centersV2.push_back(tmpV2); 
       } 
       else { 
        Point tmpV2(centers[maxbleu].x - i* 
(abs(centers[maxbleu].x - centers[indicemax].x)/5) + i, 
centers[indicemin].y - i*(centers[maxbleu].y - centers[indicemin].y) - j* 
(centers[maxbleu].y - centers[indicemax].y)/3); 
        centersV2.push_back(tmpV2); 
       } 
      } 
      else { 
       if (5 * abs(centers[maxbleu].y - centers[indicemin].y) > 30) 
       { 
        Point tmpV2(centers[maxbleu].x + i* 
(abs(centers[maxbleu].x - centers[indicemax].x)/5) + i, 
centers[maxbleu].y - i*((centers[maxbleu].y - centers[indicemin].y)) - j* 
(centers[maxbleu].y - centers[indicemax].y)/3.3); 
        centersV2.push_back(tmpV2); 
       } 
       else 
       { 
        Point tmpV2(centers[maxbleu].x + i* 
    (abs(centers[maxbleu].x - centers[indicemax].x)/5) + i, 
    centers[maxbleu].y - i*((centers[maxbleu].y - centers[indicemin].y)) - j* 
    (centers[maxbleu].y - centers[indicemax].y)/3); 
        centersV2.push_back(tmpV2); 
       } 
      } 

     } 

    } 
    } 
    for (int i = 0; i < centersV2.size(); i++) 
    { 
    cv::Mat IImage; 
    image.copyTo(IImage); 
    cv::Mat roi = IImage(Rect(centersV2[i].x - 
    0.66*listOfMatrices[maxbleu].size().width/2, centersV2[i].y - 
    0.66*listOfMatrices[maxbleu].size().height/2, 
    0.66*listOfMatrices[maxbleu].size().width, 
    0.66*listOfMatrices[maxbleu].size().height)); 
    listOfMatrices2.push_back(roi); 
    n++; 
    cout << "centre de patchs :" << i + 1 << " :est:" << centersV2[i] << " 
    colour :" << mean(listOfMatrices2[i]) << endl; 
    rectangle(image, Point(centersV2[i].x - 
    0.66*listOfMatrices[maxbleu].size().width, centersV2[i].y - 
    0.66*listOfMatrices[maxbleu].size().height), Point(centersV2[i].x + 
    0.66*listOfMatrices[maxbleu].size().width, centersV2[i].y + 
    0.66*listOfMatrices[maxbleu].size().height), Scalar(0, 255, 0), 4, 8, 
    0); 
    //ellipse(image, centersV2[i], 
    Size(0.66*listOfMatrices[maxbleu].size().width, 
    0.66*listOfMatrices[maxbleu].size().height), 0, 0, 360, Scalar(0, 255, 
    0), 2, LINE_AA); 

    stringstream numero; 
    numero << i + 1; 
    putText(image, numero.str(), Point(centersV2[i].x - 15, centersV2[i].y + 
    5), 5, 2, Scalar(0, 0, 255), 4, 8, false); 
} 

} 

int main(int /*argc*/, char** /*argv*/) 
{ 

static const char* filename[] = { "E:/Zouhair Jimmouh-Colorimetrie/Example 
Etudes/Exemple2/AS1606001A-008-R045-HP-01.jpg", 0}; 

    vector<vector<Point> > Squares; 

    for (int i = 0; filename[i] != 0; i++) 
    { 
    Mat Image = imread(filename[i], 1); 
    if (Image.empty()) 
    { 
     cout << "Couldn't load " << endl; 
     //continue; 
    } 

    Mat blackTOwhite; 
    findSquares(Image, Squares); 
    (drawSquares(Image, Squares)).copyTo(blackTOwhite); 
    //show image with detected patches 
    namedWindow("RECT", CV_WINDOW_NORMAL); 
    imshow("RECT", Image); 

    int c = waitKey(); 
    if ((char)c == 27) 
     break; 
    } 

    return 0; 
} 

みんなが評価されてすべてのヘルプ!私は今、これと数日間苦労してきました。

+3

[**、**最小完全、かつ検証例]を作成してみてください(http://stackoverflow.com/:へのリンクヘルプ/ mcve)を使用してください。また、[良い質問をする方法について読む](http://stackoverflow.com/help/how-to-ask)をご覧ください。 –

+1

リリースのデバッグライブラリにリンクしています。 – Miki

+1

このような問題を解決する適切なツールは、デバッガです。スタックオーバーフローを尋ねる前に、コードを一行ずつ進める必要があります。詳しいヘルプは、[小さなプログラムをデバッグする方法(Eric Lippert)](https://ericlippert.com/2014/03/05/how-to-debug-small-programs/)を参照してください。少なくとも、問題を再現する[最小、完全、および検証可能](http://stackoverflow.com/help/mcve)の例と、デバッガでの観察結果を含めるように質問を編集する必要があります。 –

答えて

0

リンク設定でデバッグライブラリとリリースライブラリを混在させています。私は追加の依存にopencv_world300d.libopencv_world300.libの両方をリンク

:質問へのコメントから

両方にリンクしないでください。リリースでのデバッグ

  • opencv_world300.lib

    • opencv_world300d.lib
  • 関連する問題