emgu cv CvInvoke.cvRemap зависает при попытке искажения данных калибровки стерео

Я пытаюсь реализовать приложение для калибровки стереокамеры, используя emgu cv.

Моя проблема в том, что когда я пытаюсь использовать CvInvoke.cvRemap для восстановления изображения, функция просто зависает. Никаких ошибок или сбоев, он просто зависает, и я оставил его на 2 часа на случай, если он просто тормозит. Вот что я делаю:

  1. Захват 10 пар образцов Chessboard (слева и справа), убедившись, что FindChessboardCorners работает с каждым. Я не делаю ничего особенного для синхронизации камер, просто снимаю их одновременно.
  2. Создайте набор точек объекта на основе используемой шахматной доски.
  3. Выполнение отдельной CalibrateCamera для левого и правого изображений каждого образца с использованием точек объекта из 2 и точек изображения из 1.
  4. Выполнение StereoCalibrate с использованием параметров IntrinsicCameraParameters, сгенерированных CalibrateCamera в 3, точек объекта в 2 и точек изображения, снятых с шахматной доски в 1.
  5. Выполнение StereoRectify с использованием IntrinsicCameraParameters из 3/4.
  6. Генерация mapx и mapy как для левого, так и для правого из cvInitUndistorRectifyMap с использованием выходных данных из 5.
  7. Попытка cvRemap с использованием mapx и mapy из 6 и свежих изображений, снятых с камер. ДАЛЕЕ: Используйте StereoBM.FindStereoCorrespondence и PointCollection.ReprojectImageTo3D для создания облака точек из моих, надеюсь, калиброванных стереоданных.

Поэтому, когда я добираюсь до 7, cvRemap просто зависает. У меня есть cvRemap для захвата с одной камеры, поэтому я знаю, что эта функция в некоторой степени работает с моей настройкой.

Я написал класс для управления несколькими камерами:

using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Drawing;
using System.Drawing.Drawing2D;
using System.Windows.Forms;

using Emgu.CV;
using Emgu.CV.UI;
using Emgu.CV.CvEnum;
using Emgu.CV.Structure;
using Emgu.CV.VideoSurveillance;


namespace Capture2Cams
{
    class camData
    {
        public Capture capture;
        public Image<Bgr, Byte> lastFrame;
        public Image<Gray, Byte> lastFrameGray;
        public bool lastChessboardFound;
        public PointF[] lastChessboardCorners;
        public Image<Gray, Byte>[] samplesGray;
        public PointF[][] samplesChessboardCorners;
        public Size cbDimensions;
        public Size imageDimensions;
        public int cursampleIndex = 0;
        public ImageList sampleIcons;

        private Image<Gray, Byte> _chessBoardDisplay;
        private int _iconWidth = 160;
        private int _icnonHeight = 90;

        private int _numSamples = 0;
        public int numSamples()
        {
            return _numSamples;
        }

        public void numSamples(int val)
        {
            _numSamples = val;
            this.samplesGray = new Image<Gray, Byte>[val];
            this.samplesChessboardCorners = new PointF[val][];

            this.sampleIcons.ImageSize = new Size(_iconWidth, _icnonHeight);
            Bitmap tmp = new Bitmap(_iconWidth, _icnonHeight);
            this.sampleIcons.Images.Clear();
            for (int c = 0; c < _numSamples; c++) this.sampleIcons.Images.Add(tmp);
        }



        public camData(int camIndex, int capWidth, int capHeight, int pcbWidth, int pcbHeight, int pNumSamples)
        {
            this.sampleIcons = new ImageList();

            try
            {
                this.capture = new Capture(camIndex);
                this.capture.SetCaptureProperty(CAP_PROP.CV_CAP_PROP_FRAME_WIDTH, capWidth);
                this.capture.SetCaptureProperty(CAP_PROP.CV_CAP_PROP_FRAME_HEIGHT, capHeight);
            }
            catch (Exception e)
            {
                MessageBox.Show(e.Message);
                return;
            }
            this.imageDimensions = new Size(capWidth, capHeight);
            this.cbDimensions = new Size(pcbWidth, pcbHeight);
            this.numSamples(pNumSamples);

        }

        public Image<Gray, Byte> captureFrame()
        {
            this.lastFrame = this.capture.QueryFrame();
            this.lastFrameGray = this.lastFrame.Convert<Gray, Byte>();
            return this.lastFrameGray;
        }


        public int captureSample()
        {
            this.detectChessboard(true);  // detectChessboard calls -> captureFrame

            if (lastChessboardFound)
            {
                this.samplesGray[cursampleIndex] = this.lastFrameGray;
                this.samplesChessboardCorners[cursampleIndex] = this.lastChessboardCorners;
                this.sampleIcons.Images[this.cursampleIndex] = this.lastFrameGray.ToBitmap(_iconWidth, _icnonHeight);

                this.cursampleIndex++;
                if (this.cursampleIndex >= _numSamples) this.cursampleIndex = 0;

            }
            return cursampleIndex;
        }

        public void clearSamples()
        {
            this.cursampleIndex = 0;
            this.numSamples(_numSamples);
        }

        public Image<Gray, Byte> detectChessboard(bool pDoCapture)
        {
            if (pDoCapture) this.captureFrame();

            this.lastChessboardFound = CameraCalibration.FindChessboardCorners(this.lastFrameGray, this.cbDimensions, CALIB_CB_TYPE.ADAPTIVE_THRESH | CALIB_CB_TYPE.FILTER_QUADS, out this.lastChessboardCorners);

            _chessBoardDisplay = this.lastFrameGray.Clone();
            CameraCalibration.DrawChessboardCorners(this._chessBoardDisplay, this.cbDimensions, this.lastChessboardCorners, this.lastChessboardFound);

            return this._chessBoardDisplay;
        }

        public void saveSampleImages(string pPath, string pID)
        {
            for(int ic = 0; ic < this._numSamples; ic++)
            {
                this.samplesGray[ic].Save(pPath + pID + ic.ToString() + ".bmp");
            }
        }


        public void loadSampleImages(string pPath, string pID)
        {
            clearSamples();

            for (int ic = 0; ic < this._numSamples; ic++)
            {
                this.lastFrameGray = new Image<Gray, byte>(new Bitmap(pPath + pID + ic.ToString() + ".bmp"));
                this.detectChessboard(false);
                this.samplesChessboardCorners[ic] = this.lastChessboardCorners;
                this.sampleIcons.Images[ic] = this.lastFrameGray.ToBitmap(_iconWidth, _icnonHeight);

                this.samplesGray[ic] = this.lastFrameGray;

            }
        }

    }
}

А вот мой код формы с остальной логикой калибровки:

using System;
using System.Collections.Generic;
using System.ComponentModel;
using System.Data;
using System.Drawing;
using System.Linq;
using System.Text;
using System.Windows.Forms;
using System.Runtime.InteropServices;

using Emgu.CV.Util;
using Emgu.CV;
using Emgu.CV.UI;
using Emgu.CV.CvEnum;
using Emgu.CV.Structure;
using Emgu.CV.VideoSurveillance;

namespace Capture2Cams
{
    public partial class CaptureForm : Form
    {
        private static camData camLeft;
        private static camData camRight;
        private int _numSamples = 10;  // Number of calibration samples
        private int _imageWidth = 1280;  // web cam resolution
        private int _imageHeight = 720; // web cam resolution
        private int _cbWidth = 9; // chessboard corner count
        private int _cbHeight = 5; // chessboard corner count

        // TODO: Test post calibration values, these will need to be loaded and saved
        private static Matrix<double> _foundamentalMatrix;
        private static Matrix<double> _essentialMatrix;
        private static IntrinsicCameraParameters _inPramsLeft;
        private static IntrinsicCameraParameters _inPramsRight;
        private static ExtrinsicCameraParameters _outExtParamsStereo;

        private  Matrix<float> _mapxLeft;
        private  Matrix<float> _mapyLeft;
        private  Matrix<float> _mapxRight;
        private  Matrix<float> _mapyRight;

        public CaptureForm()
        {
            InitializeComponent();
            Run();
        }

        void Run()
        {
            camLeft = new camData(0, _imageWidth, _imageHeight, _cbWidth, _cbHeight, _numSamples);
            camRight = new camData(1, _imageWidth, _imageHeight, _cbWidth, _cbHeight, _numSamples);

            this.listViewLeft.LargeImageList = camLeft.sampleIcons;
            for (int c = 0; c < _numSamples; c++)
            {
                ListViewItem curItem = new ListViewItem();
                curItem.ImageIndex = c;
                curItem.Text = "Sample" + c.ToString();
                this.listViewLeft.Items.Add(curItem);
            }


            this.listViewRight.LargeImageList = camRight.sampleIcons;
            for (int c = 0; c < _numSamples; c++)
            {
                ListViewItem curItem = new ListViewItem();
                curItem.ImageIndex = c;
                curItem.Text = "Sample" + c.ToString();
                this.listViewRight.Items.Add(curItem);
            }


            Application.Idle += ProcessFrame;
        }

        void ProcessFrame(object sender, EventArgs e)
        {
            if (!checkBoxRectify.Checked)
            {
                if (this.checkBoxCapCB.Checked)
                {
                    imageBoxLeft.Image = camLeft.detectChessboard(true);
                    imageBoxRight.Image = camRight.detectChessboard(true);
                }
                else
                {
                    imageBoxLeft.Image = camLeft.captureFrame();
                    imageBoxRight.Image = camRight.captureFrame();
                }
            }
            else
            {
                camLeft.captureFrame();
                camRight.captureFrame();
                Image<Gray, byte> imgLeft = camLeft.lastFrameGray.Clone();
                Image<Gray, byte> imgRight = camRight.lastFrameGray.Clone();

                CvInvoke.cvRemap(camLeft.lastFrameGray.Ptr, imgLeft.Ptr, _mapxLeft.Ptr, _mapyLeft.Ptr, (int)INTER.CV_INTER_LINEAR | (int)WARP.CV_WARP_FILL_OUTLIERS, new MCvScalar(0));
                CvInvoke.cvRemap(camRight.lastFrameGray.Ptr, imgRight.Ptr, _mapxRight.Ptr, _mapyRight.Ptr, (int)INTER.CV_INTER_LINEAR | (int)WARP.CV_WARP_FILL_OUTLIERS, new MCvScalar(0));

                imageBoxLeft.Image = imgLeft;
                imageBoxRight.Image = imgRight;
            }


            //checkBoxRectify
        }

        private void buttonCaptureSample_Click(object sender, EventArgs e)
        {
            camLeft.captureSample();            
            camRight.captureSample();

            this.listViewLeft.Refresh();
            this.listViewRight.Refresh();
        }

        private void buttonStereoCalibrate_Click(object sender, EventArgs e)
        {
            // We should have most of the data needed from the sampling with the camData objects
            int numCorners = _cbWidth * _cbHeight;

            // Calc intrisitcs / camera
            _inPramsLeft = new IntrinsicCameraParameters();
            _inPramsRight = new IntrinsicCameraParameters();

            ExtrinsicCameraParameters[] outExtParamsLeft;
            ExtrinsicCameraParameters[] outExtParamsRight;

            //Matrix<double> foundamentalMatrix;
            //Matrix<double> essentialMatrix;


            outExtParamsLeft = new ExtrinsicCameraParameters[_numSamples];
            outExtParamsRight = new ExtrinsicCameraParameters[_numSamples];
            _outExtParamsStereo = new ExtrinsicCameraParameters();

            // Building object points
            // These are the points on the cessboard in local 3d coordinates
            // Requires one set per sample, if the same calibration object (chessboard) is used for each sample then just use the same set of points for each sample
            // Also setting sub pixel analasys on samples
            MCvPoint3D32f[][] objectPoints = new MCvPoint3D32f[_numSamples][];
            for (int sc = 0; sc < _numSamples; sc++) // Samples count
            {
                // indivual cam setup
                outExtParamsLeft[sc] = new ExtrinsicCameraParameters();
                outExtParamsRight[sc] = new ExtrinsicCameraParameters();

                // Sub pixel analasys
                camLeft.samplesGray[sc].FindCornerSubPix(new PointF[][] { camLeft.samplesChessboardCorners[sc] }, new Size(10, 10), new Size(-1, -1), new MCvTermCriteria(300, 0.01));
                camRight.samplesGray[sc].FindCornerSubPix(new PointF[][] { camRight.samplesChessboardCorners[sc] }, new Size(10, 10), new Size(-1, -1), new MCvTermCriteria(300, 0.01));

                // Object points
                objectPoints[sc] = new MCvPoint3D32f[numCorners];

                for (int cc = 0; cc < numCorners; cc++)  // chessboard corners count
                {                    
                    objectPoints[sc][cc].x = cc / _cbWidth;
                    objectPoints[sc][cc].y = cc % _cbWidth;
                    objectPoints[sc][cc].z = 0.0f;
                }
            }
            Size imageSize = new Size(_imageWidth, _imageHeight);
            // Indivual cam camibration

            CameraCalibration.CalibrateCamera(objectPoints, camLeft.samplesChessboardCorners, imageSize, _inPramsLeft, CALIB_TYPE.DEFAULT, out outExtParamsLeft);
            CameraCalibration.CalibrateCamera(objectPoints, camRight.samplesChessboardCorners, imageSize, _inPramsRight, CALIB_TYPE.DEFAULT, out outExtParamsRight);


            // Stereo Cam calibration
            CameraCalibration.StereoCalibrate(
                objectPoints, 
                camLeft.samplesChessboardCorners, 
                camRight.samplesChessboardCorners, 
                _inPramsLeft, 
                _inPramsRight,
                imageSize, 
                CALIB_TYPE.CV_CALIB_FIX_ASPECT_RATIO | CALIB_TYPE.CV_CALIB_ZERO_TANGENT_DIST | CALIB_TYPE.CV_CALIB_FIX_FOCAL_LENGTH, 
                new MCvTermCriteria(100, 0.001), 
                out _outExtParamsStereo, 
                out _foundamentalMatrix, 
                out _essentialMatrix
                );

            PrintIntrinsic(_inPramsLeft);
            PrintIntrinsic(_inPramsRight);
        }


        private void listViewLeft_ItemSelectionChanged(object sender, ListViewItemSelectionChangedEventArgs e)
        {

        }

        private void listViewRight_ItemSelectionChanged(object sender, ListViewItemSelectionChangedEventArgs e)
        {

        }

        private void buttonSaveSamples_Click(object sender, EventArgs e)
        {
            camLeft.saveSampleImages(textBoxSavePath.Text, "left");
            camRight.saveSampleImages(textBoxSavePath.Text, "right");
        }

        private void buttonLoadSamples_Click(object sender, EventArgs e)
        {
            camLeft.loadSampleImages(textBoxSavePath.Text, "left");
            camRight.loadSampleImages(textBoxSavePath.Text, "right");

            this.listViewLeft.Refresh();
            this.listViewRight.Refresh();
        }

        private void buttonCapture_Click(object sender, EventArgs e)
        {

        }

        private void buttonCaptureCurframe_Click(object sender, EventArgs e)
        {
            camLeft.captureFrame();
            camRight.captureFrame();
            camLeft.lastFrame.Save(textBoxSavePath.Text + "frameLeft" + ".bmp");
            camLeft.lastFrameGray.Save(textBoxSavePath.Text + "frameLeftGray" + ".bmp");
            camRight.lastFrame.Save(textBoxSavePath.Text + "frameRight" + ".bmp");
            camRight.lastFrameGray.Save(textBoxSavePath.Text + "frameRightGray" + ".bmp");
        }

        public void StereoRectify(
            IntrinsicCameraParameters intrinsicParam1,
            IntrinsicCameraParameters intrinsicParam2,
            Size imageSize,           
            ExtrinsicCameraParameters extrinsicParams,
            out Matrix<double> R1,
            out Matrix<double> R2,
            out Matrix<double> P1,
            out Matrix<double> P2,
            out Matrix<double> Q,
            STEREO_RECTIFY_TYPE flags,
            double alpha,
            Size newImageSize,
            ref Rectangle validPixROI1,
            ref Rectangle validPixROI2
            )
        {            
            R1 = new Matrix<double>(3, 3);
            R2 = new Matrix<double>(3, 3);
            P1 = new Matrix<double>(3, 4);
            P2 = new Matrix<double>(3, 4);
            Q = new Matrix<double>(4, 4);

            CvInvoke.cvStereoRectify(
                _inPramsLeft.IntrinsicMatrix.Ptr,
                _inPramsRight.IntrinsicMatrix.Ptr,
                _inPramsLeft.DistortionCoeffs.Ptr,
                _inPramsRight.DistortionCoeffs.Ptr, 
                imageSize,
                extrinsicParams.RotationVector.Ptr,
                extrinsicParams.TranslationVector.Ptr, 
                R1.Ptr, 
                R2.Ptr, 
                P1.Ptr, 
                P2.Ptr, 
                Q.Ptr, 
                STEREO_RECTIFY_TYPE.DEFAULT, 
                alpha, 
                newImageSize, 
                ref validPixROI1, 
                ref validPixROI1);
        }

        public void InitUndistortRectifyMap(
            IntrinsicCameraParameters intrinsicParam,
            Matrix<double> R,
            Matrix<double> newCameraMatrix,
            out Matrix<float> mapx,
            out Matrix<float> mapy
            )
        {
            mapx = new Matrix<float>(new Size(_imageWidth, _imageHeight));
            mapy = new Matrix<float>(new Size(_imageWidth, _imageHeight));
            CvInvoke.cvInitUndistortRectifyMap(intrinsicParam.IntrinsicMatrix.Ptr, intrinsicParam.DistortionCoeffs.Ptr, R.Ptr, newCameraMatrix.Ptr, mapx.Ptr, mapy.Ptr);
        }


        private void buttonTestCalc_Click(object sender, EventArgs e)
        {
            // Stereo Rectify images           
            Matrix<double> R1;
            Matrix<double> R2;
            Matrix<double> P1;
            Matrix<double> P2;
            Matrix<double> Q;
            Rectangle validPixROI1, validPixROI2;
            validPixROI1 = new Rectangle();
            validPixROI2 = new Rectangle();

            StereoRectify(_inPramsLeft, _inPramsRight, new Size(_imageWidth, _imageHeight), _outExtParamsStereo, out R1, out R2, out P1, out P2, out Q, 0, 0, new Size(_imageWidth, _imageHeight), ref validPixROI1, ref validPixROI2);

            //InitUndistortRectifyMap(_inPramsLeft, R1, P1, out _mapxLeft, out _mapyLeft);
            //InitUndistortRectifyMap(_inPramsRight, R2, P2, out _mapxRight, out _mapyRight);            

            _inPramsLeft.InitUndistortMap(_imageWidth, _imageHeight, out _mapxLeft, out _mapyLeft);
            _inPramsRight.InitUndistortMap(_imageWidth, _imageHeight, out _mapxRight, out _mapyRight);

            Image<Gray, byte> imgLeft = camLeft.lastFrameGray.Clone();
            Image<Gray, byte> imgRight = camRight.lastFrameGray.Clone();

            // **** THIS IS WHERE IM UP TO, no errors, it just hangs ****
            CvInvoke.cvRemap(camLeft.lastFrameGray.Ptr, imgLeft.Ptr, _mapxLeft.Ptr, _mapyLeft.Ptr, (int)INTER.CV_INTER_LINEAR | (int)WARP.CV_WARP_FILL_OUTLIERS, new MCvScalar(0));

            // StereoBM stereoSolver = new StereoBM(Emgu.CV.CvEnum.STEREO_BM_TYPE.BASIC, 0);
            //stereoSolver.FindStereoCorrespondence(                      
        }


        public void PrintIntrinsic(IntrinsicCameraParameters CamIntrinsic)
        {
            // Prints the Intrinsic camera parameters to the command line

            Console.WriteLine("Intrinsic Matrix:");

            string outStr = "";
            int i = 0;
            int j = 0;

            for (i = 0; i < CamIntrinsic.IntrinsicMatrix.Height; i++)
            {
                for (j = 0; j < CamIntrinsic.IntrinsicMatrix.Width; j++)
                {
                    outStr = outStr + CamIntrinsic.IntrinsicMatrix.Data[i, j].ToString();
                    outStr = outStr + "  ";
                }

                Console.WriteLine(outStr);
                outStr = "";

            }

            Console.WriteLine("Distortion Coefficients: ");
            outStr = "";

            for (j = 0; j < CamIntrinsic.DistortionCoeffs.Height; j++)
            {
                outStr = outStr + CamIntrinsic.DistortionCoeffs.Data[j, 0].ToString();
                outStr = outStr + "  ";
            }

            Console.WriteLine(outStr);
        }


        public void PrintExtrinsic(ExtrinsicCameraParameters CamExtrinsic)
        {
            // Prints the Extrinsic camera parameters to the command line
            Console.WriteLine("Extrinsic Matrix:");
            string outStr = "";
            int i = 0;
            int j = 0;
            for (i = 0; i < CamExtrinsic.ExtrinsicMatrix.Height; i++)
            {
                for (j = 0; j < CamExtrinsic.ExtrinsicMatrix.Width; j++)
                {
                    outStr = outStr + CamExtrinsic.ExtrinsicMatrix.Data[i, j].ToString();
                    outStr = outStr + "  ";
                }
                Console.WriteLine(outStr);
                outStr = "";

            }

            Console.WriteLine("Rotation Vector: ");
            outStr = "";

            for (i = 0; i < CamExtrinsic.RotationVector.Height; i++)
            {
                for (j = 0; j < CamExtrinsic.RotationVector.Width; j++)
                {
                    outStr = outStr + CamExtrinsic.RotationVector.Data[i, j].ToString();
                    outStr = outStr + "  ";
                }

                Console.WriteLine(outStr);
                outStr = "";
            }


            Console.WriteLine("Translation Vector: ");
            outStr = "";

            for (i = 0; i < CamExtrinsic.TranslationVector.Height; i++)
            {
                for (j = 0; j < CamExtrinsic.TranslationVector.Width; j++)
                {
                    outStr = outStr + CamExtrinsic.TranslationVector.Data[i, j].ToString();
                    outStr = outStr + "  ";
                }

                Console.WriteLine(outStr);
                outStr = "";
            }
        }


    }
}

ТНКС!


person Geordie    schedule 13.06.2011    source источник


Ответы (1)


Ваши карты должны быть изображениями, а не матрицами.

В частности, типа "Серый, плавающий".

person George    schedule 25.08.2011