前言:
我的相机型号:MV-CE060-10UM
环境:QT5.9.0+Opencv2.4.11(编译器VS2013)
首先,调用这款相机是真的难,在特别是在我的环境限制下,调了快大半个月,期间找过商家的技术人员,因为海康的demo中没有opencv的案例,更别提QT了;没办法,只能去网上找资料,相关案例都看烂了,有时候同一篇文章打开过七八次;还在程序员网上花钱找人写demo(没办法,写程序我连萌新都算不上,笑话我也写不出完整的demo)找学校学视觉的研究生,也不行,环境太局限了。最后只能硬着头皮找上零散的demo加上海康的SDK开发手册,终于。。。。。在今天晚上采集到第一张图片,然后花了一点时间改改,终于可以得出图像了。OMG!awsl–
上程序吧:
mai’nwindow.cpp中:
#include "mainwindow.h"
#include "ui_mainwindow.h"
#include <QImageReader>
#include <QLabel>
#include <QDebug>
#include <myhkcamera.h>
#include <conio.h>
#include <stdio.h>
#include <Windows.h>
MainWindow::MainWindow(QWidget *parent) :
QMainWindow(parent),
ui(new Ui::MainWindow)
{
ui->setupUi(this);
timer = new QTimer(this);
timer2 =new QTimer(this);
// image = new QImage();
connect(timer,SIGNAL(timeout()),this,SLOT(readFarme()));
connect(timer2,SIGNAL(timeout()),this,SLOT(readHK()));
connect(ui->open,SIGNAL(clicked()),this,SLOT(on_open_triggered()));
connect(ui->stop,SIGNAL(clicked()),this,SLOT(on_stop_triggered()));
connect(ui->open_picture,SIGNAL(clicked()),this,SLOT(on_open_picture_triggered));
connect(ui->open_HKcarmera,SIGNAL(clicked()),this,SLOT(on_open_HKcarmera_triggered()));
// qDebug()<< carmera.connectCamera("00E63241168");
// qDebug()<< carmera.startCamera();
// Mat image;
// qDebug()<< carmera.setTriggerMode(0);
// qDebug()<< carmera.softTrigger();
// qDebug()<< carmera.ReadBuffer(image);
// // resizeWindow("image",200,200);
// imshow("Image",image);
// connect(ui->open_HKcarmera,SIGNAL(clicked()),this,SLOT(readHK()));
//resizeWindow("image",200,200);
// imshow("Image",image);
// QImage img = QImage((const unsigned char*)(dstt.data),dstt.cols, dstt.rows, QImage::Format_RGB888);
//设定图像大小自适应label窗口的大小
//wotanxiaoshisxwdeson
// img = img.scaled(ui->label_2->size(), Qt::IgnoreAspectRatio, Qt::SmoothTransformation);
// ui->label_2->setPixmap(QPixmap::fromImage(img));
}
MainWindow::~MainWindow()
{
delete ui;
}
void MainWindow::on_open_triggered()
{
cap.open(0);
timer->start(33);
}
void MainWindow::on_stop_triggered()
{
// 停止读取数据。
timer->stop();
cap.release();
ui->cameraView->clear();
}
void MainWindow::readFarme()
{
// cap.open(0);
// timer->start(33);
cap.read(src_image);
// qDebug()<<src_image.channels();
// qDebug()<<src_image.type();
cvtColor(src_image, src_image, CV_BGR2GRAY);
// Canny_Demo(0,0);
QImage imag = MatImageToQt(src_image);
// imag = imag.scaled(src_image.cols*0.5,src_image.rows*0.5,Qt::IgnoreAspectRatio,Qt::SmoothTransformation); //缩放图像
// QImage image2 = imag.convertToFormat(QImage::Format_Indexed8); //灰度图转换 --将QT类型的图转换
// image2.setColorCount(256);
// for(int i = 0; i < 256; i++)
// {
// image2.setColor(i, qRgb(i, i, i));
// }
// Mat dst = QtToMatImage(image2);
// QImage imag3 = MatImageToQt(dst);
ui->cameraView->setPixmap(QPixmap::fromImage(imag));
}
//Mat转成QImage
QImage MainWindow::MatImageToQt(const Mat &src)
{
//CV_8UC1 8位无符号的单通道---灰度图片
QImage qImage;
if(src.type() == CV_8UC1)
{
qImage =QImage((const unsigned char*)(src.data),
src.cols,src.rows,
src.cols*src.channels(),
QImage::Format_Indexed8);
return qImage;
}
//为3通道的彩色图片
else if(src.type() == CV_8UC3)
{
//得到图像的的首地址
const uchar *pSrc = (const uchar*)src.data;
//以src构造图片
QImage qImage(pSrc,src.cols,src.rows,src.step,QImage::Format_RGB888);
//在不改变实际图像数据的条件下,交换红蓝通道
return qImage.rgbSwapped();
}
//四通道图片,带Alpha通道的RGB彩色图像
else if(src.type() == CV_8UC4)
{
const uchar *pSrc = (const uchar*)src.data;
QImage qImage(pSrc, src.cols, src.rows, src.step, QImage::Format_ARGB32);
//返回图像的子区域作为一个新图像
return qImage.copy();
}
else
{
return QImage();
}
}
Mat MainWindow::QtToMatImage(QImage & qim) //QT转QImage
{
Mat mat;
switch(qim.format())
{
case QImage::Format_RGB888:
mat = Mat(qim.height(), qim.width(),
CV_8UC3,(void*)qim.constBits(),qim.bytesPerLine());
break;
case QImage::Format_ARGB32_Premultiplied:
mat = Mat(qim.height(), qim.width(),
CV_8UC4,(void*)qim.constBits(),qim.bytesPerLine());
break;
}
return mat;
}
void MainWindow::Canny_Demo(int,void*)
{
int threshold_value = 50;
blur(src_image, src, Size(3, 3), Point(-1, -1), BORDER_DEFAULT);
Canny(src, src, threshold_value, threshold_value*2,3,false);
}
void MainWindow::on_open_picture_triggered()
{
QImage *imgReader =new QImage; //("D:\tutu1.jpg");
imgReader->load("D:\\tutu2.jpg");
ui->label->setPixmap(QPixmap::fromImage(*imgReader));
// QPixmap img;
// img.load("D:\\tutu2.jpg"); // D:\tutu2.jpg 这种错误 \\绝对路径
// ui->label->clear();
// ui->label->setPixmap(img);
}
void MainWindow::on_open_HKcarmera_triggered()
{
//std::cout<<"SLOT函数"<<std::endl;
qDebug()<< carmera.connectCamera("00E63241168");
qDebug()<< carmera.startCamera();
timer2->start(100);
// qDebug()<< carmera.connectCamera("00E63241168");
// qDebug()<< carmera.startCamera();
// Mat image;
// qDebug()<< carmera.setTriggerMode(0);
// qDebug()<< carmera.softTrigger();
// qDebug()<< carmera.ReadBuffer(image);
// QImage img = QImage((const unsigned char*)(image.data),image.cols, image.rows, image.cols*image.channels(),QImage::Format_Indexed8);
// //设定图像大小自适应label窗口的大小
// // img = img.scaled(ui->label_2->size(), Qt::IgnoreAspectRatio, Qt::SmoothTransformation);
// ui->label_2->setPixmap(QPixmap::fromImage(img));
// qImage =QImage((const unsigned char*)(src.data),
// src.cols,src.rows,
// src.cols*src.channels(),
// QImage::Format_Indexed8);
}
void MainWindow::readHK()
{
// qDebug()<< carmera.connectCamera("00E63241168");
// qDebug()<< carmera.startCamera();
Mat image;
qDebug()<< carmera.setTriggerMode(0);
qDebug()<< carmera.softTrigger();
qDebug()<< carmera.ReadBuffer(image);
// cvtColor(image, image, CV_BGR2GRAY);
QImage Qimag = MatImageToQt(image);
Qimag = Qimag.scaled(image.cols*0.2,image.rows*0.2,Qt::IgnoreAspectRatio,Qt::SmoothTransformation); //缩放图像
ui->label_2->setPixmap(QPixmap::fromImage(Qimag));
// QImage image = DiaoYongHK(image);
// qImage =QImage((const unsigned char*)(src.data),
// src.cols,src.rows,
// src.cols*src.channels(),
// QImage::Format_Indexed8);
// ui->cameraView->setPixmap(QPixmap::fromImage(imag));
}
void WaitForKeyPress(void)
{
while(!_kbhit())
{
Sleep(10);
}
_getch();
}
bool PrintDeviceInfo(MV_CC_DEVICE_INFO* pstMVDevInfo)
{
if (NULL == pstMVDevInfo)
{
printf("The Pointer of pstMVDevInfo is NULL!\n");
return false;
}
if (pstMVDevInfo->nTLayerType == MV_GIGE_DEVICE)
{
int nIp1 = ((pstMVDevInfo->SpecialInfo.stGigEInfo.nCurrentIp & 0xff000000) >> 24);
int nIp2 = ((pstMVDevInfo->SpecialInfo.stGigEInfo.nCurrentIp & 0x00ff0000) >> 16);
int nIp3 = ((pstMVDevInfo->SpecialInfo.stGigEInfo.nCurrentIp & 0x0000ff00) >> 8);
int nIp4 = (pstMVDevInfo->SpecialInfo.stGigEInfo.nCurrentIp & 0x000000ff);
printf("CurrentIp: %d.%d.%d.%d\n" , nIp1, nIp2, nIp3, nIp4);
printf("UserDefinedName: %s\n\n" , pstMVDevInfo->SpecialInfo.stGigEInfo.chUserDefinedName);
}
else if (pstMVDevInfo->nTLayerType == MV_USB_DEVICE)
{
printf("UserDefinedName: %s\n", pstMVDevInfo->SpecialInfo.stUsb3VInfo.chUserDefinedName);
printf("Serial Number: %s\n", pstMVDevInfo->SpecialInfo.stUsb3VInfo.chSerialNumber);
printf("Device Number: %d\n\n", pstMVDevInfo->SpecialInfo.stUsb3VInfo.nDeviceNumber);
}
else
{
printf("Not support.\n");
}
return true;
}
void __stdcall ImageCallBackEx(unsigned char * pData, MV_FRAME_OUT_INFO_EX* pFrameInfo, void* pUser)
{
if (pFrameInfo)
{
printf("Get One Frame: Width[%d], Height[%d], nFrameNum[%d]\n",
pFrameInfo->nWidth, pFrameInfo->nHeight, pFrameInfo->nFrameNum);
}
}
myhkcamera.cpp
#include "myhkcamera.h"
#include "mainwindow.h"
#include <QTimer>
MV_CC_DEVICE_INFO_LIST m_stDevList; // ch:设备信息列表结构体变量,用来存储设备列表
MV_CC_DEVICE_INFO* m_Device=NULL; //设备对象
MyHKcamera::MyHKcamera()
{
m_hDevHandle = NULL;
}
MyHKcamera::~MyHKcamera()
{
if (m_hDevHandle)
{
MV_CC_DestroyHandle(m_hDevHandle);
m_hDevHandle = NULL;
}
}
//查询设备列表
int MyHKcamera::EnumDevices(MV_CC_DEVICE_INFO_LIST* pstDevList)
{
int temp= MV_CC_EnumDevices(MV_GIGE_DEVICE | MV_USB_DEVICE, pstDevList);
if (MV_OK != temp)
{
printf("error:EnumDevices fail [%x]\n",temp);
return -1;
}
return 0;
}
//连接相机
int MyHKcamera::connectCamera(string id)
{
int temp= EnumDevices(&m_stDevList);
if(temp!=0)
//设备更新成功接收命令的返回值为0,返回值不为0则为异常
return -1;
if(m_stDevList.nDeviceNum==0)
//未找到任何相机
return 2;
for (unsigned int i = 0; i < m_stDevList.nDeviceNum; i++)
{
printf("[device %d]:\n", i); //m_stDevList.nDeviceNum=1
MV_CC_DEVICE_INFO* pDeviceInfo = m_stDevList.pDeviceInfo[i];
if (NULL == pDeviceInfo)
{
continue;
// break;
}
// if(id== (char*)pDeviceInfo->SpecialInfo.stGigEInfo.chUserDefinedName||id== (char*)pDeviceInfo->SpecialInfo.stGigEInfo.chSerialNumber)
if(id== (char*)pDeviceInfo->SpecialInfo.stUsb3VInfo.chUserDefinedName||id== (char*)pDeviceInfo->SpecialInfo.stUsb3VInfo.chSerialNumber)
{
m_Device= m_stDevList.pDeviceInfo[i];
//printf("[ID %d]:\n", 10);
break;
}else
{
//printf("[ID %d]:\n", 00);
continue;
}
}
if(m_Device==NULL)
{
//未找到指定名称的相机
return 3;
}
temp = MV_CC_CreateHandle(&m_hDevHandle, m_Device); //创建设备句柄
if(temp !=0)
return -1;
temp = MV_CC_OpenDevice(m_hDevHandle); //打开设备
if (temp !=0)
{
MV_CC_DestroyHandle(m_hDevHandle);
m_hDevHandle = NULL;
return -1;
}else
{
setTriggerMode(1); //设置触发模式
printf("[find camera %x]:\n", 11);
return 0;
}
if (m_Device->nTLayerType == MV_GIGE_DEVICE)
{
//std::cout<<"Gige Camera"<<std::endl;
}
}
//启动相机采集
int MyHKcamera::startCamera()
{
if ( !MV_CC_IsDeviceConnected(m_hDevHandle) ) //判断该相机是否处于连接状态
{
int state = MV_CC_IsDeviceConnected(m_hDevHandle);
printf("connect fail &x",state);
return 3;
}
int temp=MV_CC_StartGrabbing(m_hDevHandle); //开始抓图
if(temp!=0)
{
return -1;
}else
{
return 0;
}
}
//发送软触发
int MyHKcamera::softTrigger()
{
int tempValue= MV_CC_SetCommandValue(m_hDevHandle, "TriggerSoftware");
printf("softwae error code %x\n",tempValue);
if(tempValue!=0)
{
return -1;
}else
{
return 0;
}
}
//读取相机中的图像
int MyHKcamera::ReadBuffer(Mat &image)
{
int tempValue;
unsigned int nRecvBufSize = 0;
MVCC_INTVALUE stParam;
memset(&stParam, 0, sizeof(MVCC_INTVALUE));
int temp = MV_CC_GetIntValue(m_hDevHandle, "PayloadSize", &stParam);
if (temp != 0)
{
printf("over point%d\n",1);
return -1;
}
nRecvBufSize = stParam.nCurValue;
m_pBufForDriver = (unsigned char *)malloc(nRecvBufSize);
MV_FRAME_OUT_INFO_EX stImageInfo = {0};
tempValue= MV_CC_GetOneFrameTimeout(m_hDevHandle, m_pBufForDriver, nRecvBufSize, &stImageInfo, 700); //主动调用取图
if(tempValue!=0)
{
printf("over %x\n",tempValue);
return -1;
}
m_nBufSizeForSaveImage = stImageInfo.nWidth * stImageInfo.nHeight * 3 + 2048;
m_pBufForSaveImage = (unsigned char*)malloc(m_nBufSizeForSaveImage);
bool isMono;//判断是否为黑白图像
switch (stImageInfo.enPixelType)
{
case PixelType_Gvsp_Mono8:
case PixelType_Gvsp_Mono10:
case PixelType_Gvsp_Mono10_Packed:
case PixelType_Gvsp_Mono12:
case PixelType_Gvsp_Mono12_Packed:
isMono=true;
break;
default:
isMono=false;
break;
}
if(isMono)
{
image=Mat(stImageInfo.nHeight,stImageInfo.nWidth,CV_8UC1,m_pBufForDriver);
}
else
{
//转换图像格式为BGR8
MV_CC_PIXEL_CONVERT_PARAM stConvertParam = {0};
memset(&stConvertParam, 0, sizeof(MV_CC_PIXEL_CONVERT_PARAM));
stConvertParam.nWidth = stImageInfo.nWidth; //ch:图像宽 | en:image width
stConvertParam.nHeight = stImageInfo.nHeight; //ch:图像高 | en:image height
stConvertParam.pSrcData = m_pBufForDriver; //ch:输入数据缓存 | en:input data buffer
stConvertParam.nSrcDataLen = stImageInfo.nFrameLen; //ch:输入数据大小 | en:input data size
stConvertParam.enSrcPixelType = stImageInfo.enPixelType; //ch:输入像素格式 | en:input pixel format
//stConvertParam.enDstPixelType = PixelType_Gvsp_BGR8_Packed; //ch:输出像素格式 | en:output pixel format 适用于OPENCV的图像格式
stConvertParam.enDstPixelType = PixelType_Gvsp_RGB8_Packed; //ch:输出像素格式 | en:output pixel format
stConvertParam.pDstBuffer = m_pBufForSaveImage; //ch:输出数据缓存 | en:output data buffer
stConvertParam.nDstBufferSize = m_nBufSizeForSaveImage; //ch:输出缓存大小 | en:output buffer size
MV_CC_ConvertPixelType(m_hDevHandle, &stConvertParam);
image=Mat(stImageInfo.nHeight,stImageInfo.nWidth,CV_8UC3,m_pBufForSaveImage);
}
return 0;
}
//设置心跳时间
int MyHKcamera::setHeartBeatTime(unsigned int time)
{
//心跳时间最小为500ms
if(time<500)
time=500;
int temp=MV_CC_SetIntValue(m_hDevHandle, "GevHeartbeatTimeout", time);
if(temp!=0)
{
return -1;
}
else
{
return 0;
}
}
//设置曝光时间
int MyHKcamera::setExposureTime(float ExposureTimeNum)
{
int temp= MV_CC_SetFloatValue(m_hDevHandle, "ExposureTime",ExposureTimeNum );
if(temp!=0)
return -1;
return 0;
}
//设置是否为触发模式
int MyHKcamera::setTriggerMode(unsigned int TriggerModeNum)
{
int tempValue= MV_CC_SetEnumValue(m_hDevHandle,"TriggerMode", TriggerModeNum);
if(tempValue!=0)
{
return -1;
}else
{
return 0;
}
}
myhkcamera.h
#ifndef MYHKCAMERA_H
#define MYHKCAMERA_H
#include"Includes/MvCameraControl.h"
#include"iostream"
#include"opencv2/opencv.hpp"
#include <opencv2/core/core.hpp>
#include <opencv2/highgui/highgui.hpp>
using namespace std;
using namespace cv;
class MyHKcamera
{
public:
MyHKcamera();
~MyHKcamera();
//声明相关变量及函数等
static int EnumDevices(MV_CC_DEVICE_INFO_LIST* pstDevList);
// ch:连接相机
int connectCamera(string id);
//设置是否为触发模式
int setTriggerMode(unsigned int TriggerModeNum);
//开启相机采集
int startCamera();
//发送软触发
int softTrigger();
//读取buffer
int ReadBuffer(Mat &image);
//设置心跳时间
int setHeartBeatTime(unsigned int time);
//设置曝光时间
int setExposureTime(float ExposureTimeNum);
private:
void* m_hDevHandle;
public:
unsigned char* m_pBufForSaveImage; // 用于保存图像的缓存
unsigned int m_nBufSizeForSaveImage;
unsigned char* m_pBufForDriver; // 用于从驱动获取图像的缓存
unsigned int m_nBufSizeForDriver;
};
#endif // MYHKCAMERA_H
运行结果
万事开头难!虽然能显示视频了,但是程序内部还是有点问题:
1).我相机的最大帧率是42,但通过QT调用的图像帧率最多25;原因待查找;
2).采集一段时间后,上位机界面会突然断开然后相机就显示一堆问题,包括以下问题
**3)**毕竟是要进行图像处理的,不知道在QT上写入处理函数,读取速度会不会更慢!
难搞!!
最后,为了让有相关需求的朋友不会重蹈我的覆辙,我会在末尾附上我的相关源码 。毕竟萌新真没必要花大半个月浪费在调用相机上。
添加链接描述