Files
newspark110/device/camera/cmvcamera.cpp
Chenwenxuan edac2715f0 init
2024-03-06 14:54:30 +08:00

1352 lines
38 KiB
C++
Raw Permalink Blame History

This file contains ambiguous Unicode characters

This file contains Unicode characters that might be confused with other characters. If you think that this is intentional, you can safely ignore this warning. Use the Escape button to reveal them.

#include "cmvcamera.h"
#include <QDebug>
CMvCamera* CMvCamera::uniqueInstance = nullptr;
CMvCamera* CMvCamera::instance()
{
if (!uniqueInstance) {
uniqueInstance = new CMvCamera();
}
return uniqueInstance;
}
CMvCamera::CMvCamera()
{
m_hDevHandle = MV_NULL;
InitializeCriticalSection(&mCsRead);
getCameraPara("CameraPara");//CameraPara
}
CMvCamera::~CMvCamera()
{
if (m_hDevHandle)
{
MV_CC_DestroyHandle(m_hDevHandle);
m_hDevHandle = MV_NULL;
}
}
// ch:获取SDK版本号 | en:Get SDK Version
int CMvCamera::GetSDKVersion()
{
return MV_CC_GetSDKVersion();
}
// ch:枚举设备 | en:Enumerate Device
int CMvCamera::EnumDevices(unsigned int nTLayerType, MV_CC_DEVICE_INFO_LIST* pstDevList)
{
return MV_CC_EnumDevices(nTLayerType, pstDevList);
}
// ch:判断设备是否可达 | en:Is the device accessible
bool CMvCamera::IsDeviceAccessible(MV_CC_DEVICE_INFO* pstDevInfo, unsigned int nAccessMode)
{
return MV_CC_IsDeviceAccessible(pstDevInfo, nAccessMode);
}
// ch:打开设备 | en:Open Device
int CMvCamera::Open(MV_CC_DEVICE_INFO* pstDeviceInfo)
{
if (MV_NULL == pstDeviceInfo)
{
return MV_E_PARAMETER;
}
if (m_hDevHandle)
{
return MV_E_CALLORDER;
}
int nRet = MV_CC_CreateHandle(&m_hDevHandle, pstDeviceInfo);
if (MV_OK != nRet)
{
return nRet;
}
nRet = MV_CC_OpenDevice(m_hDevHandle);
if (MV_OK != nRet)
{
MV_CC_DestroyHandle(m_hDevHandle);
m_hDevHandle = MV_NULL;
}
return nRet;
}
// ch:关闭设备 | en:Close Device
int CMvCamera::Close()
{
if (MV_NULL == m_hDevHandle)
{
return MV_E_HANDLE;
}
MV_CC_CloseDevice(m_hDevHandle);
int nRet = MV_CC_DestroyHandle(m_hDevHandle);
m_hDevHandle = MV_NULL;
return nRet;
}
// ch:判断相机是否处于连接状态 | en:Is The Device Connected
bool CMvCamera::IsDeviceConnected()
{
return MV_CC_IsDeviceConnected(m_hDevHandle);
}
// ch:注册图像数据回调 | en:Register Image Data CallBack
int CMvCamera::RegisterImageCallBack(void(__stdcall* cbOutput)(unsigned char * pData, MV_FRAME_OUT_INFO_EX* pFrameInfo, void* pUser), void* pUser)
{
return MV_CC_RegisterImageCallBackEx(m_hDevHandle, cbOutput, pUser);
}
// ch:开启抓图 | en:Start Grabbing
int CMvCamera::StartGrabbing()
{
return MV_CC_StartGrabbing(m_hDevHandle);
}
// ch:停止抓图 | en:Stop Grabbing
int CMvCamera::StopGrabbing()
{
return MV_CC_StopGrabbing(m_hDevHandle);
}
// ch:主动获取一帧图像数据 | en:Get one frame initiatively
int CMvCamera::GetImageBuffer(MV_FRAME_OUT* pFrame, int nMsec)
{
return MV_CC_GetImageBuffer(m_hDevHandle, pFrame, nMsec);
}
// ch:释放图像缓存 | en:Free image buffer
int CMvCamera::FreeImageBuffer(MV_FRAME_OUT* pFrame)
{
return MV_CC_FreeImageBuffer(m_hDevHandle, pFrame);
}
// ch:设置显示窗口句柄 | en:Set Display Window Handle
int CMvCamera::DisplayOneFrame(MV_DISPLAY_FRAME_INFO* pDisplayInfo)
{
return MV_CC_DisplayOneFrame(m_hDevHandle, pDisplayInfo);
}
// ch:设置SDK内部图像缓存节点个数 | en:Set the number of the internal image cache nodes in SDK
int CMvCamera::SetImageNodeNum(unsigned int nNum)
{
return MV_CC_SetImageNodeNum(m_hDevHandle, nNum);
}
// ch:获取设备信息 | en:Get device information
int CMvCamera::GetDeviceInfo(MV_CC_DEVICE_INFO* pstDevInfo)
{
return MV_CC_GetDeviceInfo(m_hDevHandle, pstDevInfo);
}
// ch:获取GEV相机的统计信息 | en:Get detect info of GEV camera
int CMvCamera::GetGevAllMatchInfo(MV_MATCH_INFO_NET_DETECT* pMatchInfoNetDetect)
{
if (MV_NULL == pMatchInfoNetDetect)
{
return MV_E_PARAMETER;
}
MV_CC_DEVICE_INFO stDevInfo = {0};
GetDeviceInfo(&stDevInfo);
if (stDevInfo.nTLayerType != MV_GIGE_DEVICE)
{
return MV_E_SUPPORT;
}
MV_ALL_MATCH_INFO struMatchInfo = {0};
struMatchInfo.nType = MV_MATCH_TYPE_NET_DETECT;
struMatchInfo.pInfo = pMatchInfoNetDetect;
struMatchInfo.nInfoSize = sizeof(MV_MATCH_INFO_NET_DETECT);
memset(struMatchInfo.pInfo, 0, sizeof(MV_MATCH_INFO_NET_DETECT));
return MV_CC_GetAllMatchInfo(m_hDevHandle, &struMatchInfo);
}
// ch:获取U3V相机的统计信息 | en:Get detect info of U3V camera
int CMvCamera::GetU3VAllMatchInfo(MV_MATCH_INFO_USB_DETECT* pMatchInfoUSBDetect)
{
if (MV_NULL == pMatchInfoUSBDetect)
{
return MV_E_PARAMETER;
}
MV_CC_DEVICE_INFO stDevInfo = {0};
GetDeviceInfo(&stDevInfo);
if (stDevInfo.nTLayerType != MV_USB_DEVICE)
{
return MV_E_SUPPORT;
}
MV_ALL_MATCH_INFO struMatchInfo = {0};
struMatchInfo.nType = MV_MATCH_TYPE_USB_DETECT;
struMatchInfo.pInfo = pMatchInfoUSBDetect;
struMatchInfo.nInfoSize = sizeof(MV_MATCH_INFO_USB_DETECT);
memset(struMatchInfo.pInfo, 0, sizeof(MV_MATCH_INFO_USB_DETECT));
return MV_CC_GetAllMatchInfo(m_hDevHandle, &struMatchInfo);
}
// ch:获取和设置Int型参数如 Width和Height详细内容参考SDK安装目录下的 MvCameraNode.xlsx 文件
// en:Get Int type parameters, such as Width and Height, for details please refer to MvCameraNode.xlsx file under SDK installation directory
int CMvCamera::GetIntValue(IN const char* strKey, OUT MVCC_INTVALUE_EX *pIntValue)
{
return MV_CC_GetIntValueEx(m_hDevHandle, strKey, pIntValue);
}
int CMvCamera::SetIntValue(IN const char* strKey, IN int64_t nValue)
{
return MV_CC_SetIntValueEx(m_hDevHandle, strKey, nValue);
}
// ch:获取和设置Enum型参数如 PixelFormat详细内容参考SDK安装目录下的 MvCameraNode.xlsx 文件
// en:Get Enum type parameters, such as PixelFormat, for details please refer to MvCameraNode.xlsx file under SDK installation directory
int CMvCamera::GetEnumValue(IN const char* strKey, OUT MVCC_ENUMVALUE *pEnumValue)
{
return MV_CC_GetEnumValue(m_hDevHandle, strKey, pEnumValue);
}
int CMvCamera::SetEnumValue(IN const char* strKey, IN unsigned int nValue)
{
return MV_CC_SetEnumValue(m_hDevHandle, strKey, nValue);
}
int CMvCamera::SetEnumValueByString(IN const char* strKey, IN const char* sValue)
{
return MV_CC_SetEnumValueByString(m_hDevHandle, strKey, sValue);
}
int CMvCamera::GetEnumEntrySymbolic(IN const char* strKey, IN MVCC_ENUMENTRY* pstEnumEntry)
{
return MV_CC_GetEnumEntrySymbolic(m_hDevHandle, strKey, pstEnumEntry);
}
// ch:获取和设置Float型参数如 ExposureTime和Gain详细内容参考SDK安装目录下的 MvCameraNode.xlsx 文件
// en:Get Float type parameters, such as ExposureTime and Gain, for details please refer to MvCameraNode.xlsx file under SDK installation directory
int CMvCamera::GetFloatValue(IN const char* strKey, OUT MVCC_FLOATVALUE *pFloatValue)
{
return MV_CC_GetFloatValue(m_hDevHandle, strKey, pFloatValue);
}
int CMvCamera::SetFloatValue(IN const char* strKey, IN float fValue)
{
return MV_CC_SetFloatValue(m_hDevHandle, strKey, fValue);
}
// ch:获取和设置Bool型参数如 ReverseX详细内容参考SDK安装目录下的 MvCameraNode.xlsx 文件
// en:Get Bool type parameters, such as ReverseX, for details please refer to MvCameraNode.xlsx file under SDK installation directory
int CMvCamera::GetBoolValue(IN const char* strKey, OUT bool *pbValue)
{
return MV_CC_GetBoolValue(m_hDevHandle, strKey, pbValue);
}
int CMvCamera::SetBoolValue(IN const char* strKey, IN bool bValue)
{
return MV_CC_SetBoolValue(m_hDevHandle, strKey, bValue);
}
// ch:获取和设置String型参数如 DeviceUserID详细内容参考SDK安装目录下的 MvCameraNode.xlsx 文件UserSetSave
// en:Get String type parameters, such as DeviceUserID, for details please refer to MvCameraNode.xlsx file under SDK installation directory
int CMvCamera::GetStringValue(IN const char* strKey, MVCC_STRINGVALUE *pStringValue)
{
return MV_CC_GetStringValue(m_hDevHandle, strKey, pStringValue);
}
int CMvCamera::SetStringValue(IN const char* strKey, IN const char* strValue)
{
return MV_CC_SetStringValue(m_hDevHandle, strKey, strValue);
}
// ch:执行一次Command型命令如 UserSetSave详细内容参考SDK安装目录下的 MvCameraNode.xlsx 文件
// en:Execute Command once, such as UserSetSave, for details please refer to MvCameraNode.xlsx file under SDK installation directory
int CMvCamera::CommandExecute(IN const char* strKey)
{
return MV_CC_SetCommandValue(m_hDevHandle, strKey);
}
// ch:探测网络最佳包大小(只对GigE相机有效) | en:Detection network optimal package size(It only works for the GigE camera)
int CMvCamera::GetOptimalPacketSize(unsigned int* pOptimalPacketSize)
{
if (MV_NULL == pOptimalPacketSize)
{
return MV_E_PARAMETER;
}
int nRet = MV_CC_GetOptimalPacketSize(m_hDevHandle);
if (nRet < MV_OK)
{
return nRet;
}
*pOptimalPacketSize = (unsigned int)nRet;
return MV_OK;
}
// ch:注册消息异常回调 | en:Register Message Exception CallBack
int CMvCamera::RegisterExceptionCallBack(void(__stdcall* cbException)(unsigned int nMsgType, void* pUser),void* pUser)
{
return MV_CC_RegisterExceptionCallBack(m_hDevHandle, cbException, pUser);
}
// ch:注册单个事件回调 | en:Register Event CallBack
int CMvCamera::RegisterEventCallBack(const char* pEventName, void(__stdcall* cbEvent)(MV_EVENT_OUT_INFO * pEventInfo, void* pUser), void* pUser)
{
return MV_CC_RegisterEventCallBackEx(m_hDevHandle, pEventName, cbEvent, pUser);
}
// ch:强制IP | en:Force IP
int CMvCamera::ForceIp(unsigned int nIP, unsigned int nSubNetMask, unsigned int nDefaultGateWay)
{
return MV_GIGE_ForceIpEx(m_hDevHandle, nIP, nSubNetMask, nDefaultGateWay);
}
// ch:配置IP方式 | en:IP configuration method
int CMvCamera::SetIpConfig(unsigned int nType)
{
return MV_GIGE_SetIpConfig(m_hDevHandle, nType);
}
// ch:设置网络传输模式 | en:Set Net Transfer Mode
int CMvCamera::SetNetTransMode(unsigned int nType)
{
return MV_GIGE_SetNetTransMode(m_hDevHandle, nType);
}
// ch:像素格式转换 | en:Pixel format conversion
int CMvCamera::ConvertPixelType(MV_CC_PIXEL_CONVERT_PARAM* pstCvtParam)
{
return MV_CC_ConvertPixelType(m_hDevHandle, pstCvtParam);
}
// ch:保存图片 | en:save image
int CMvCamera::SaveImage(MV_SAVE_IMAGE_PARAM_EX* pstParam)
{
return MV_CC_SaveImageEx2(m_hDevHandle, pstParam);
}
// ch:保存图片为文件 | en:Save the image as a file
int CMvCamera::SaveImageToFile(MV_SAVE_IMG_TO_FILE_PARAM* pstSaveFileParam)
{
return MV_CC_SaveImageToFile(m_hDevHandle, pstSaveFileParam);
}
// ch:绘制圆形辅助线 | en:Draw circle auxiliary line
int CMvCamera::DrawCircle(MVCC_CIRCLE_INFO* pCircleInfo)
{
return MV_CC_DrawCircle(m_hDevHandle, pCircleInfo);
}
// ch:绘制线形辅助线 | en:Draw lines auxiliary line
int CMvCamera::DrawLines(MVCC_LINES_INFO* pLinesInfo)
{
return MV_CC_DrawLines(m_hDevHandle, pLinesInfo);
}
// 读取buffer
int CMvCamera::ReadBuffer(Mat &image)
{
Mat* getImage = new Mat();
unsigned int nRecvBufSize = 0;
MVCC_INTVALUE stParam;
memset(&stParam, 0,sizeof(MVCC_INTVALUE));
int tempValue = MV_CC_GetIntValue(m_hDevHandle,"PayloadSize",&stParam);
if(tempValue != 0)
{
return -1;
}
nRecvBufSize = stParam.nCurValue;
unsigned char* pDate;
pDate = (unsigned char *)malloc(nRecvBufSize);
MV_FRAME_OUT_INFO_EX stImageInfo = {0};
tempValue = MV_CC_GetOneFrameTimeout(m_hDevHandle,pDate, nRecvBufSize, &stImageInfo, 700);
if(tempValue != 0)
{
return -1;
}
m_nBufSizeForSaveImage = stImageInfo.nWidth * stImageInfo.nHeight *3+2048;
unsigned char* m_pBufForSaveImage;
m_pBufForSaveImage = (unsigned char*)malloc(m_nBufSizeForSaveImage);
bool isMono;
switch(stImageInfo.enPixelType)
{
case PixelType_Gvsp_Mono8:
case PixelType_Gvsp_Mono10:
case PixelType_Gvsp_Mono10_Packed:
case PixelType_Gvsp_Mono12:
case PixelType_Gvsp_Mono12_Packed:
isMono = true;
break;
default:
isMono = false;
break;
}
if(isMono)
{
*getImage = Mat(stImageInfo.nHeight,stImageInfo.nWidth,CV_8UC1,pDate);
}
else
{
//图像到BGR8转换
MV_CC_PIXEL_CONVERT_PARAM stConvertParam = {0};
memset(&stConvertParam,0,sizeof(MV_CC_PIXEL_CONVERT_PARAM));
stConvertParam.nWidth = stImageInfo.nWidth;
stConvertParam.nHeight = stImageInfo.nHeight;
stConvertParam.pSrcData = pDate;
stConvertParam.nSrcDataLen = stImageInfo.nFrameLen;
stConvertParam.enSrcPixelType = stImageInfo.enPixelType;
stConvertParam.enDstPixelType = PixelType_Gvsp_BGR8_Packed;
stConvertParam.pDstBuffer = m_pBufForSaveImage;
stConvertParam.nDstBufferSize = m_nBufSizeForSaveImage;
MV_CC_ConvertPixelType(m_hDevHandle,&stConvertParam);
*getImage = Mat(stImageInfo.nHeight,stImageInfo.nWidth,CV_8UC3,m_pBufForSaveImage);
}
(*getImage).copyTo(image);
(*getImage).release();
free(pDate);
free(m_pBufForSaveImage);
return 0;
}
// 读取buffer
int CMvCamera::GetImage(Mat &image,bool FindFlag)
{
int ret;
::EnterCriticalSection(&mCsRead);
if (FindFlag)
{
ret = SetFloatValue("ExposureTime",ExposureTimeFindEdge);
if (ret != MV_OK)
{
::LeaveCriticalSection(&mCsRead);
return -1;
}
}
ClearImageBuffer();
ret = CommandExecute("TriggerSoftware");
if (ret != MV_OK)
{
::LeaveCriticalSection(&mCsRead);
return -1;
}
ret = ReadBuffer(image);
if (ret != MV_OK)
{
::LeaveCriticalSection(&mCsRead);
return -1;
}
if (FindFlag)
{
ret = SetFloatValue("ExposureTime",ExposureTime);
if (ret != MV_OK)
{
::LeaveCriticalSection(&mCsRead);
return -1;
}
}
::LeaveCriticalSection(&mCsRead);
return 0;
}
int CMvCamera::setPoints(std::vector<cv::Point> points, cv::Mat &image)
{
for (int i = 0; i < points.size(); i++)
{
image.at<unsigned char>(points[i].y, points[i].x) = 255;
}
return 0;
}
std::vector<cv::Point> CMvCamera::getPoints(cv::Mat &image, int value)
{
int nl = image.rows; // number of lines
int nc = image.cols * image.channels();
std::vector<cv::Point> points;
for (int j = 0; j < nl; j++)
{
uchar* data = image.ptr<uchar>(j);
for (int i = 0; i < nc; i++)
{
if (data[i] >= value)
{
points.push_back(cv::Point(i, j));
}
}
}
return points;
}
bool SortByX(Point &v1, Point &v2)//注意本函数的参数的类型一定要与vector中元素的类型一致
{
return v1.x < v2.x;//升序排列
}
bool SortByY(Point &v1, Point &v2)//注意本函数的参数的类型一定要与vector中元素的类型一致
{
return v1.y < v2.y;//升序排列
}
int CMvCamera::GetLineSlope(Mat &src,bool &result,double *slope)
{
std::vector<std::vector<cv::Point>> contours;
std::vector<cv::Vec4i> hierarchy;
std::vector<cv::Point> points;
std::vector<cv::Point> points20;
std::vector<cv::Point> points_dst;
result = false;
imwrite("src.bmp", src);
Mat src_gray = src(Rect(src.cols/4, 0, src.cols / 2, src.rows));
blur(src_gray, src_gray, Size(7, 7));
threshold(src_gray, src_gray, ParaThreshold, 255, THRESH_BINARY); //参数1阈值
imwrite("canny0.bmp", src_gray);
Mat dst = Mat::zeros(src_gray.size(), CV_8UC1);
Point p_tmp, p_start;
unsigned char pix_next;
int x_tmp = 100;
while (1)
{
bool flag = false;
for (int j = src_gray.rows/2; j > 0; j--) //参数2Y方向起始值
{
pix_next = src_gray.at<unsigned char>(j - 1, x_tmp);
if (pix_next == 0)
{
p_start.x = x_tmp;
p_start.y = j - 1;
flag = true;
break;
}
}
if (!flag)
{
return -1;
}
int x2, y2;
x2 = p_start.x;
y2 = p_start.y + YOffsetPos; //参数3Y方向偏移值
for (int k = 0; k < ContinuePointSum; k++) //参数4连续合格点数量排除干扰
{
for (int j = y2; j > 0; j--)
{
pix_next = src_gray.at<unsigned char>(j - 1, x2);
if (pix_next == 0)
{
p_tmp.x = x2;
p_tmp.y = j-1;
points20.push_back(p_tmp);
break;
}
}
x2++;
}
if (points20.empty())
{
return -1;
}
int minY = p_start.y, maxY = p_start.y;
for (int k = 0; k < ContinuePointSum; k++) //参数4连续合格点数量排除干扰
{
if (minY > points20[k].y)
{
minY = points20[k].y;
}
if (maxY < points20[k].y)
{
maxY = points20[k].y;
}
}
if ((maxY - minY) < YPointValueDiff) //参数5Y方向差值
break;
x_tmp = x_tmp + ContinuePointSum; //参数4连续合格点数量排除干扰
std::vector<cv::Point>().swap(points20);
if (x_tmp >= src_gray.cols)
return -1;
}
if (points20.empty())
{
return -1;
}
{
Mat tmpsrtgray;
src_gray.copyTo(tmpsrtgray);
Canny(tmpsrtgray, src_gray, 70, 200);
imwrite("canny.bmp", src_gray);
Mat shape1;
findContours(src_gray, contours, hierarchy, RETR_EXTERNAL, CHAIN_APPROX_NONE);//RETR_EXTERNAL
//imwrite("dstnull.bmp", dst);
if ((contours.size() > 0))
{
bool flag = false;
int num = 0;
for (unsigned int i = 0; i < contours.size(); i++)
{
for (int j = 0; j < contours[i].size(); j++)
{
if (contours[i][j] == points20[0])
{
num = i;
flag = true;
break;
}
}
if (flag)
break;
}
points = contours[num];
std::sort(points.begin(), points.end(), SortByX);
setPoints(points, dst);
//imwrite("canny2.bmp", dst);
//imshow("src_gray1", dst);
}
if (points.size() > ContoursPointSum) //参数6轮廓点数量
{
float step = points.size() / LinePointCount; //参数7取点间隔
int i = 0;
while (1)
{
points_dst.push_back(points[(int)(i*step)]);
i++;
if (i >= LinePointCount) //参数7取点间隔
break;
}
}
else
{
points_dst = points;
}
}
cv::Vec4f line;
double k;
double b;
double cos_theta;
double sin_theta;
double x0, y0;
fitLine(points_dst, line, DIST_L2, 0, 0.01, 0.01);
cos_theta = line[0];
sin_theta = line[1];
x0 = line[2];
y0 = line[3];
k = sin_theta / cos_theta;
b = y0 - k * x0;
*slope = k;
cv::Point pt1((int)(-b/k), 0);
cv::Point pt2((int)((dst.rows - b)/k), dst.rows);
cv::line(dst, pt1, pt2, cv::Scalar(255, 255, 255), 2);
//imshow("src_gray2", dst);
double A, B, C, TotalDistance = 0;
A = k;
B = -1;
C = b;
for (unsigned int i = 0; i < points_dst.size(); i++)
{
double dis = abs(A * points_dst[i].x + B * points_dst[i].y + C) / sqrt(A * A + B * B);
TotalDistance = TotalDistance + dis;
}
if (TotalDistance <= SumValueThresh) //参数8取点间隔
{
result = true;
}
else
{
result = false;
}
qDebug()<< "result:"<<result;
qDebug()<< "slope:"<<*slope;
qDebug()<< "TotalDistance:"<<TotalDistance;
return 0;
}
int CMvCamera::GetGap(Mat &src,bool &result,int *xPos)
{
std::vector<std::vector<cv::Point>> contours;
std::vector<cv::Vec4i> hierarchy;
std::vector<cv::Point> points;
std::vector<cv::Point> points20;
std::vector<cv::Point> points_dst;
result = false;
imwrite("src.bmp", src);
Mat src_gray = src(Rect(src.cols/4, 0, src.cols / 2, src.rows));
blur(src_gray, src_gray, Size(7, 7));
threshold(src_gray, src_gray, ParaThreshold, 255, THRESH_BINARY); //参数1阈值ParaThreshold
imwrite("canny0.bmp", src_gray);
Mat dst = Mat::zeros(src_gray.size(), CV_8UC1);
Point p_tmp, p_start;
unsigned char pix_next;
int x_tmp = 100;
while (1)
{
bool flag = false;
for (int j = src_gray.rows*3/4; j > 0; j--) //参数2Y方向起始值
{
pix_next = src_gray.at<unsigned char>(j - 1, x_tmp);
if (pix_next == 0)
{
p_start.x = x_tmp;
p_start.y = j - 1;
flag = true;
break;
}
}
if (!flag)
{
return -1;
}
int x2, y2;
x2 = p_start.x;
y2 = p_start.y + YOffsetPos; //参数3Y方向偏移值
for (int k = 0; k < ContinuePointSum; k++) //参数4连续合格点数量排除干扰
{
for (int j = y2; j > 0; j--)
{
pix_next = src_gray.at<unsigned char>(j - 1, x2);
if (pix_next == 0)
{
p_tmp.x = x2;
p_tmp.y = j-1;
points20.push_back(p_tmp);
break;
}
}
x2++;
}
if (points20.empty())
{
return -1;
}
int minY = p_start.y, maxY = p_start.y;
for (int k = 0; k < ContinuePointSum; k++) //参数4连续合格点数量排除干扰
{
if (minY > points20[k].y)
{
minY = points20[k].y;
}
if (maxY < points20[k].y)
{
maxY = points20[k].y;
}
}
if ((maxY - minY) < YPointValueDiff_6) //参数5Y方向差值
break;
x_tmp = x_tmp + ContinuePointSum; //参数4连续合格点数量排除干扰
std::vector<cv::Point>().swap(points20);
if (x_tmp >= src_gray.cols)
return -1;
}
if (points20.empty())
{
return -1;
}
{
Mat tmpsrtgray;
src_gray.copyTo(tmpsrtgray);
Canny(tmpsrtgray, src_gray, 70, 255);
imwrite("canny.bmp", src_gray);
Mat shape1;
findContours(src_gray, contours, hierarchy, RETR_EXTERNAL, CHAIN_APPROX_NONE);//RETR_EXTERNAL
//imwrite("dstnull.bmp", dst);
if ((contours.size() > 0))
{
bool flag = false;
int num = 0;
for (unsigned int i = 0; i < contours.size(); i++)
{
for (int j = 0; j < contours[i].size(); j++)
{
if (contours[i][j] == points20[0])
{
num = i;
flag = true;
break;
}
}
if (flag)
break;
}
points = contours[num];
std::sort(points.begin(), points.end(), SortByY);
setPoints(points, dst);
//imwrite("canny2.bmp", dst);
//imshow("src_gray1", dst);
}
qDebug()<< "ContoursPointSum:"<<ContoursPointSum;
if (points.size() > ContoursPointSum) //参数6轮廓点数量
{
int maxY = points[points.size()-1].y; //参数7取点间隔 //参数6轮廓点数量 //参数5Y方向差值 //参数4连续合格点数量排除干扰 //参数3Y方向偏移值 //参数2Y方向起始值 //参数1阈值
int maxX = points[points.size() - 1].x;
if ((maxX <= 50) || (maxX >= (int)src_gray.cols - 50))
{
return 0;
}
result = true;
std::vector<cv::Point> points_maxY;
for (int i = 0; i < points.size(); i++)
{
if (maxY == points[i].y)
{
points_maxY.push_back(points[i]);
}
}
maxX = 0;
for (int i = 0; i < points_maxY.size(); i++)
{
maxX = maxX + points_maxY[i].x;
}
maxX = maxX / points_maxY.size();
*xPos = maxX;
Point tmpPoint;
tmpPoint.x = maxX;
tmpPoint.y = maxY;
circle(src_gray, tmpPoint, 5, (255, 255, 255), 1);
imwrite("canny666.bmp", src_gray);
}
else
{
return 0;
}
}
return 0;
}
int CMvCamera::getCameraPara(QString DevName)//CameraPara
{
QSettings *settings;//申明一个QSetting类函数
settings = new QSettings ("SerialDevSet.ini", QSettings::IniFormat);//构建函数
QString strKey;
strKey = DevName+"/ParaThreshold";
ParaThreshold = settings->value(strKey).toInt();
strKey = DevName+"/YOffsetPos";
YOffsetPos = static_cast<unsigned short>(settings->value(strKey).toInt());
strKey = DevName+"/ContinuePointSum";
ContinuePointSum = static_cast<unsigned short>(settings->value(strKey).toInt());
strKey = DevName+"/YPointValueDiff";
YPointValueDiff = static_cast<unsigned short>(settings->value(strKey).toInt());
strKey = DevName+"/YPointValueDiff_6";
YPointValueDiff_6 = static_cast<unsigned short>(settings->value(strKey).toInt());
strKey = DevName+"/ContoursPointSum";
ContoursPointSum = static_cast<unsigned short>(settings->value(strKey).toInt());
strKey = DevName+"/LinePointCount";
LinePointCount = static_cast<unsigned short>(settings->value(strKey).toInt());
strKey = DevName+"/SumValueThresh";
SumValueThresh = static_cast<unsigned short>(settings->value(strKey).toInt());
strKey = DevName+"/StepOfFalse";
StepOfFalse = static_cast<unsigned short>(settings->value(strKey).toInt());
strKey = DevName+"/StepOfTrue";
StepOfTrue = static_cast<unsigned short>(settings->value(strKey).toInt());
strKey = DevName+"/ExposureTimeFindEdge";
ExposureTimeFindEdge = static_cast<unsigned short>(settings->value(strKey).toInt());
strKey = DevName+"/ExposureTime";
ExposureTime = static_cast<unsigned short>(settings->value(strKey).toInt());
return 0;
}
int CMvCamera::ClearImageBuffer()
{
return MV_CC_ClearImageBuffer(m_hDevHandle);
}
std::vector<cv::Point> CMvCamera::getLinePoints(Mat img)
{
std::vector<std::vector<cv::Point>> contours;
std::vector<cv::Vec4i> hierarchy;
std::vector<cv::Point> points;
std::vector<cv::Point> points20;
std::vector<cv::Point> points_dst;
Mat src_gray;
img.copyTo(src_gray);
blur(src_gray, src_gray, Size(7, 7));
threshold(src_gray, src_gray, ParaThreshold, 255, THRESH_BINARY); //参数1阈值
imwrite("canny0.bmp", src_gray);
Mat dst = Mat::zeros(src_gray.size(), CV_8UC1);
Point p_tmp, p_start;
unsigned char pix_next;
int x_tmp = 100;
while (1)
{
bool flag = false;
for (int j = src_gray.rows / 2; j > 0; j--) //参数2Y方向起始值
{
pix_next = src_gray.at<unsigned char>(j - 1, x_tmp);
if (pix_next == 0)
{
p_start.x = x_tmp;
p_start.y = j - 1;
flag = true;
break;
}
}
if (!flag)
{
return points_dst;
}
int x2, y2;
x2 = p_start.x;
y2 = p_start.y + YOffsetPos; //参数3Y方向偏移值
for (int k = 0; k < ContinuePointSum; k++) //参数4连续合格点数量排除干扰
{
for (int j = y2; j > 0; j--)
{
pix_next = src_gray.at<unsigned char>(j - 1, x2);
if (pix_next == 0)
{
p_tmp.x = x2;
p_tmp.y = j - 1;
points20.push_back(p_tmp);
break;
}
}
x2++;
}
if (points20.empty())
{
return points_dst;
}
int minY = p_start.y, maxY = p_start.y;
for (int k = 0; k < ContinuePointSum; k++) //参数4连续合格点数量排除干扰
{
if (minY > points20[k].y)
{
minY = points20[k].y;
}
if (maxY < points20[k].y)
{
maxY = points20[k].y;
}
}
if ((maxY - minY) < YPointValueDiff) //参数5Y方向差值
break;
x_tmp = x_tmp + ContinuePointSum; //参数4连续合格点数量排除干扰
std::vector<cv::Point>().swap(points20);
if (x_tmp >= src_gray.cols)
return points_dst;
}
if (points20.empty())
{
return points_dst;
}
{
Mat tmpsrtgray;
src_gray.copyTo(tmpsrtgray);
Canny(tmpsrtgray, src_gray, 70, 200);
imwrite("canny.bmp", src_gray);
Mat shape1;
findContours(src_gray, contours, hierarchy, RETR_EXTERNAL, CHAIN_APPROX_NONE);//RETR_EXTERNAL
//imwrite("dstnull.bmp", dst);
if ((contours.size() > 0))
{
bool flag = false;
int num = 0;
for (unsigned int i = 0; i < contours.size(); i++)
{
for (int j = 0; j < contours[i].size(); j++)
{
if (contours[i][j] == points20[0])
{
num = i;
flag = true;
break;
}
}
if (flag)
break;
}
points = contours[num];
std::sort(points.begin(), points.end(), SortByX);
//setPoints(points, dst);
//imwrite("canny2.bmp", dst);
//imshow("src_gray1", dst);
}
points_dst = points;
return points_dst;
}
}
int CMvCamera::GetCrossPoint(Mat& src, bool left,int * top)
{
std::vector<cv::Point> points;
std::vector<cv::Point> points_dst;
Mat src_gray = src(Rect(src.cols / 3, 0, src.cols / 3, src.rows));
points_dst = getLinePoints(src_gray);
if (points_dst.size() == 0)
return -1;
Mat src_6 = src(Rect(src.cols / 6, 0, src.cols *4/ 6, src.rows));
points = getLinePoints(src_6);
if (points.size() == 0)
return -1;
cv::Vec4f line;
double k;
double b;
double cos_theta;
double sin_theta;
double x0, y0;
fitLine(points_dst, line, DIST_L2, 0, 0.01, 0.01);
cos_theta = line[0];
sin_theta = line[1];
x0 = line[2];
y0 = line[3];
k = sin_theta / cos_theta;
b = y0 - k * x0;
//cv::Point pt1((int)(-b / k), 0);
//cv::Point pt2((int)((dst.rows - b) / k), dst.rows);
////imshow("src_gray1", dst);
//Mat testdst;
//testdst = cv::imread("2_jiaodian.bmp", 0);
////cv::line(src_gray, pt1, pt2, cv::Scalar(255, 255, 255), 3);
//cv::Point pt3((int)(-(b- src.cols / 4 * k) / k), 0);
//cv::Point pt4((int)((dst.rows - (b - src.cols / 4 * k)) / k), dst.rows);
//cv::line(testdst, pt3, pt4, cv::Scalar(255, 255, 255), 3);
//cv::line(dst, pt1, pt2, cv::Scalar(255, 255, 255), 3);
//imshow("dst", dst);
////imshow("src_gray2", src);
//imshow("src_gray", testdst);
////imwrite("src_gray3.bmp", src_gray);
double A, B, C;
A = k;
B = -1;
C = b - src.cols / 6 * k;
cv::Point pt3((int)(-(b- src.cols / 6 * k) / k), 0);
cv::Point pt4((int)((src.rows - (b - src.cols / 6 * k)) / k), src.rows);
Mat dst = Mat::zeros(src_6.size(), CV_8UC1);
setPoints(points, dst);
/*cv::line(dst, pt3, pt4, cv::Scalar(255, 255, 255), 1);
imwrite("src_6.bmp", dst);*/
unsigned int i;
double dis;
if (left)
{
for (i = points.size() / 2; i > 0; i--)
{
dis = abs(A * points[i].x + B * points[i].y + C) / sqrt(A * A + B * B);
if (dis > 3)
{
if ((i - 10) > 0)
{
double dis2 = abs(A * points[i - 1].x + B * points[i - 1].y + C) / sqrt(A * A + B * B);
if (dis2 > 2*dis)
break;
}
else
break;
}
}
if (i > 0)
{
//circle(dst, points[i-1],3,Scalar(255), -1, 8);
cv::Point pt1(points[i + 1].x, points[i + 1].y + 20);
cv::Point pt2(points[i + 1].x, points[i + 1].y - 20);
cv::line(dst, pt1, pt2, cv::Scalar(255, 255, 255), 1);
imwrite("src_left.jpg", dst);
*top = points[i + 1].y;
return points[i + 1].x;
}
else
return -1;
}
else
{
for (i = points.size() / 2; i < points.size(); i++)
{
dis = abs(A * points[i].x + B * points[i].y + C) / sqrt(A * A + B * B);
if (dis > 3)
{
if ((i + 10) < points.size())
{
double dis2 = abs(A * points[i + 1].x + B * points[i + 1].y + C) / sqrt(A * A + B * B);
if (dis2 > 2*dis)
break;
}
else
break;
}
}
if (i < points.size())
{
//circle(dst, points[i-1],3,Scalar(255), -1, 8);
cv::Point pt1(points[i - 1].x, points[i - 1].y + 20);
cv::Point pt2(points[i - 1].x, points[i - 1].y - 20);
cv::line(dst, pt1, pt2, cv::Scalar(255, 255, 255), 1);
imwrite("src_right.bmp", dst);
*top = points[i - 1].y;
return dst.cols - points[i - 1].x;
}
else
return -1;
}
}
int CMvCamera::getLinePoints_bottom(Mat& img)
{
std::vector<std::vector<cv::Point>> contours;
std::vector<cv::Vec4i> hierarchy;
std::vector<cv::Point> points;
std::vector<cv::Point> points20;
std::vector<cv::Point> points_dst;
Mat src_gray = img(Rect(img.cols / 3, 0, img.cols / 3, img.rows));
//Mat src_gray;
//img.copyTo(src_gray);
blur(src_gray, src_gray, Size(7, 7));
threshold(src_gray, src_gray, ParaThreshold, 255, THRESH_BINARY); //参数1阈值
imwrite("canny0.bmp", src_gray);
Mat dst = Mat::zeros(src_gray.size(), CV_8UC1);
Point p_tmp, p_start;
unsigned char pix_next;
int x_tmp = 100;
while (1)
{
bool flag = false;
for (int j = src_gray.rows / 3; j < src_gray.rows; j++) //参数2Y方向起始值
{
pix_next = src_gray.at<unsigned char>(j + 1, x_tmp);
if (pix_next == 0)
{
p_start.x = x_tmp;
p_start.y = j + 1;
flag = true;
break;
}
}
if (!flag)
{
return -1;
}
int x2, y2;
x2 = p_start.x;
y2 = p_start.y - YOffsetPos; //参数3Y方向偏移值
for (int k = 0; k < 20; k++) //参数4连续合格点数量排除干扰
{
for (int j = y2; j < src_gray.rows; j++)
{
pix_next = src_gray.at<unsigned char>(j + 1, x2);
if (pix_next == 0)
{
p_tmp.x = x2;
p_tmp.y = j ;
points20.push_back(p_tmp);
break;
}
}
x2++;
}
if (points20.empty())
{
return -1;
}
int minY = p_start.y, maxY = p_start.y;
for (int k = 0; k < 20; k++) //参数4连续合格点数量排除干扰
{
if (minY > points20[k].y)
{
minY = points20[k].y;
}
if (maxY < points20[k].y)
{
maxY = points20[k].y;
}
}
if ((maxY - minY) < 30) //参数5Y方向差值
break;
x_tmp = x_tmp + 20; //参数4连续合格点数量排除干扰
std::vector<cv::Point>().swap(points20);
if (x_tmp >= src_gray.cols)
return -1;
}
if (points20.empty())
{
return -1;
}
{
Mat tmpsrtgray;
src_gray.copyTo(tmpsrtgray);
Canny(tmpsrtgray, src_gray, 70, 200);
imwrite("canny.bmp", src_gray);
Mat shape1;
findContours(src_gray, contours, hierarchy, RETR_EXTERNAL, CHAIN_APPROX_NONE);//RETR_EXTERNAL
//imwrite("dstnull.bmp", dst);
if ((contours.size() > 0))
{
bool flag = false;
int num = 0;
for (unsigned int i = 0; i < contours.size(); i++)
{
for (int j = 0; j < contours[i].size(); j++)
{
if (contours[i][j] == points20[0])
{
num = i;
flag = true;
break;
}
}
if (flag)
break;
}
points = contours[num];
std::sort(points.begin(), points.end(), SortByX);
setPoints(points, dst);
imwrite("canny2.bmp", dst);
//imshow("src_gray1", dst);
}
for (int i = 0; i < points.size(); i++)
{
if (points[i].x == (int)src_gray.cols / 2)
return points[i].y;
}
return -1;
}
}
int CMvCamera::calc_arc_bypoints3(double* points0, double* points1, double* points2, double* center, double* radius)
{
double x1 = points0[0],
x2 = points1[0],
x3 = points2[0];
double y1 = points0[1],
y2 = points1[1],
y3 = points2[1];
double z1 = points0[2],
z2 = points1[2],
z3 = points2[2];
double a1 = (y1 * z2 - y2 * z1 - y1 * z3 + y3 * z1 + y2 * z3 - y3 * z2),
b1 = -(x1 * z2 - x2 * z1 - x1 * z3 + x3 * z1 + x2 * z3 - x3 * z2),
c1 = (x1 * y2 - x2 * y1 - x1 * y3 + x3 * y1 + x2 * y3 - x3 * y2),
d1 = -(x1 * y2 * z3 - x1 * y3 * z2 - x2 * y1 * z3 + x2 * y3 * z1 + x3 * y1 * z2 - x3 * y2 * z1);
double a2 = 2 * (x2 - x1),
b2 = 2 * (y2 - y1),
c2 = 2 * (z2 - z1),
d2 = x1 * x1 + y1 * y1 + z1 * z1 - x2 * x2 - y2 * y2 - z2 * z2;
double a3 = 2 * (x3 - x1),
b3 = 2 * (y3 - y1),
c3 = 2 * (z3 - z1),
d3 = x1 * x1 + y1 * y1 + z1 * z1 - x3 * x3 - y3 * y3 - z3 * z3;
double xyz_dev = a1 * b2 * c3 - a1 * b3 * c2 - a2 * b1 * c3 + a2 * b3 * c1 + a3 * b1 * c2 - a3 * b2 * c1;
if (xyz_dev < 1e-6f)
{
return 0;
}
center[0] = -(b1 * c2 * d3 - b1 * c3 * d2 - b2 * c1 * d3 + b2 * c3 * d1 + b3 * c1 * d2 - b3 * c2 * d1)
/ xyz_dev;
center[1] = (a1 * c2 * d3 - a1 * c3 * d2 - a2 * c1 * d3 + a2 * c3 * d1 + a3 * c1 * d2 - a3 * c2 * d1)
/ xyz_dev;
center[2] = -(a1 * b2 * d3 - a1 * b3 * d2 - a2 * b1 * d3 + a2 * b3 * d1 + a3 * b1 * d2 - a3 * b2 * d1)
/ xyz_dev;
*radius = sqrt(pow((points0[0] - center[0]), 2) + pow((points0[1] - center[1]), 2) + pow((points0[2] - center[2]), 2));
return 1;
}