Commit cde6cc7d 张士柳

1 个父辈 7dd24868
......@@ -405,9 +405,9 @@ namespace eyemLib_Sharp
public IntPtr lpszName; // 名称
}
[StructLayout(LayoutKind.Sequential)]
public struct EyemRigidMatrix
{
public double a00; // a00
public double a01; // a01
public double b00; // b00
......@@ -415,6 +415,17 @@ namespace eyemLib_Sharp
public double a11; // a11
public double b10; // b10
}
[StructLayout(LayoutKind.Sequential)]
public struct EyemHSVModel
{
[MarshalAs(UnmanagedType.ByValArray, SizeConst = 3)]
public double[] dpRangeL, dpRangeU; // 提取下限,提取上限[H S V]
[MarshalAs(UnmanagedType.ByValArray, SizeConst = 3)]
public double[] dpRangeLExt, dpRangeUExt; // 额外提取下限,额外提取上限(针对处于跨模型颜色,比如红色)[H S V]
}// 用于HSV颜色模型分割(H(0-180)、S(0-255)、V(0-255))
#endregion
#region 通用
......@@ -537,7 +548,7 @@ namespace eyemLib_Sharp
/// <param name="tpDstImg">结果</param>
/// <returns></returns>
[DllImport("eyemLib.dll", CharSet = CharSet.None, CallingConvention = CallingConvention.Cdecl)]
private static extern int eyemBinThresholdC(EyemImage tpImage, [MarshalAs(UnmanagedType.LPArray)]int[] ipRangeL, [MarshalAs(UnmanagedType.LPArray)]int[] ipRangeU, out EyemImage tpDstImg);
private static extern int eyemBinThresholdC(EyemImage tpImage, EyemHSVModel tpHSVModel, out EyemImage tpDstImg);
/// <summary>
/// 局部自适应二值化
......@@ -1031,7 +1042,7 @@ namespace eyemLib_Sharp
private static extern int setSkipProcessID(int pid);
//圆形mark点定位
[DllImport("eyemLib.dll", CharSet = CharSet.None, CallingConvention = CallingConvention.Cdecl)]
private static extern int eyemMarkerTracing(EyemImage tpImage, double dThreshold, ref EyemOcsFXYR tpCircle, out EyemImage tpDstImg, bool bHighAccuracy = false);
private static extern int eyemMarkerTracing(EyemImage tpImage, EyemHSVModel tpHSVModel, ref EyemOcsFXYR tpCircle, out EyemImage tpDstImg, bool bHighAccuracy = false);
//多功能工具
[DllImport("eyemLib.dll", CharSet = CharSet.None, CallingConvention = CallingConvention.Cdecl)]
private static extern int eyemMulFuncTool(EyemImage tpImage, EyemRect tpRoi, string funcName, double dThreshold, int iNumToIgnore, ref EyemOcsFXYR tpCircle, out EyemImage tpDstImg);
......@@ -1040,7 +1051,7 @@ namespace eyemLib_Sharp
#region 测试专用接口
//测试接口
[DllImport("eyemLib.dll", CharSet = CharSet.None, CallingConvention = CallingConvention.Cdecl)]
private static extern int eyemLibImpl(EyemImage tpImage, out EyemImage tpDstImg);
private static extern int eyemLibImpl(EyemImage tpImage, EyemHSVModel tpHSVModel, out EyemImage tpDstImg);
[DllImport("eyemLib.dll", CharSet = CharSet.None, CallingConvention = CallingConvention.Cdecl)]
private static extern int eyemEdge1dRidgeDetection(EyemImage tpImage);
......@@ -1108,7 +1119,7 @@ namespace eyemLib_Sharp
string file = fileName.Split(new string[] { "\\" }, StringSplitOptions.RemoveEmptyEntries)[2];
flag = eyemInitNNDataCodeModel(".\\darknet\\detect-tiny.cfg", ".\\darknet\\detect-tiny.weights", "", "") & eyemInitNNDetector(".\\darknet\\detect-tiny-label.cfg", ".\\darknet\\detect-tiny-label.weights");
//flag = eyemInitNNDataCodeModel(".\\darknet\\detect-tiny.cfg", ".\\darknet\\detect-tiny.weights", "", "") & eyemInitNNDetector(".\\darknet\\detect-tiny-label.cfg", ".\\darknet\\detect-tiny-label.weights");
//EyemImage image1 = new EyemImage(); EyemImage image2 = new EyemImage();
//flag = eyemMatMalloc(512, 512, 1, "uint8_t", out image1);
......@@ -1126,23 +1137,39 @@ namespace eyemLib_Sharp
//flag = eyemCvtColor(tpDstImg, ColorConversionCodes.COLOR_GRAY2BGR, ref tpDstImg);
flag = eyemLibImpl(image, out tpDstImg);
////红色分割
//EyemHSVModel tpHsvModel = new EyemHSVModel();
//tpHsvModel.dpRangeL = new double[] { 0, 43, 46 }; tpHsvModel.dpRangeU = new double[] { 10, 255, 255 };
//tpHsvModel.dpRangeLExt = new double[] { 156, 43, 46 }; tpHsvModel.dpRangeUExt = new double[] { 180, 255, 255 };
////绿色分割
//EyemHSVModel tpHsvModel = new EyemHSVModel();
//tpHsvModel.dpRangeL = new double[] { 35, 43, 46 }; tpHsvModel.dpRangeU = new double[] { 77, 255, 255 };
//tpHsvModel.dpRangeLExt = new double[] { 0, 0, 0 }; tpHsvModel.dpRangeUExt = new double[] { 0, 0, 0 };
////蓝色分割
//EyemHSVModel tpHsvModel = new EyemHSVModel();
//tpHsvModel.dpRangeL = new double[] { 100, 43, 46 }; tpHsvModel.dpRangeU = new double[] { 124, 255, 255 };
//tpHsvModel.dpRangeLExt = new double[] { 0, 0, 0 }; tpHsvModel.dpRangeUExt = new double[] { 0, 0, 0 };
//flag = eyemLibImpl(image, tpHsvModel, out tpDstImg);
//Bitmap bitmap = eyemCvtToBitmap(tpDstImg);
//if (bitmap != null)
//{
// bitmap.Save(System.Windows.Forms.Application.StartupPath + "\\ResOut\\" + file);
//}
return;
//return;
//flag = eyemNormalize(ref image);
//EyemImage image1 = new EyemImage(); EyemImage image2 = new EyemImage(); EyemImage image3 = new EyemImage();
//eyemDecompose(image, out image1, out image2, out image3);
//flag = eyemBinThresholdC(image, new int[] { 55, 0, 0 }, new int[] { 135, 225, 225 }, out tpDstImg);
//flag = eyemBinThresholdC(image, tpHsvModel, out tpDstImg);
//sw.Restart();
//flag = eyemMarkerTracing(image, 120, ref tpCircle, out tpDstImg, false);
//EyemOcsFXYR tpCircle = new EyemOcsFXYR();
//flag = eyemMarkerTracing(image, tpHsvModel, ref tpCircle, out tpDstImg, false);
//Bitmap bitmap = eyemCvtToBitmap(tpDstImg);
//if (bitmap != null)
......@@ -1152,10 +1179,9 @@ namespace eyemLib_Sharp
//eyemImageFree(ref tpDstImg);
//sw.Stop();
//Console.WriteLine("时间:" + sw.ElapsedMilliseconds.ToString());
//return;
return;
//flag = eyemEdge1dRidgeDetection(image);
//return;
#region Test Blob
//sw.Restart();
......@@ -1312,9 +1338,9 @@ namespace eyemLib_Sharp
#endregion
EyemRect tpRoi = new EyemRect();
tpRoi.iXs = 200; tpRoi.iYs = 200;
tpRoi.iWidth = image.iWidth - 400;
tpRoi.iHeight = image.iHeight - 400;
tpRoi.iXs = 50; tpRoi.iYs = 50;
tpRoi.iWidth = image.iWidth - 100;
tpRoi.iHeight = image.iHeight - 100;
//flag = eyemMulFuncTool(image, tpRoi, "__func1", 65, 75, ref tpCircle, out tpDstImg);
......@@ -1390,7 +1416,7 @@ namespace eyemLib_Sharp
//"IP_SMALL_PARTS","IP_LARGE_PARTS","IP_LONG_PARTS","IP_LOWCONTRAST_PARTS"
//eyemCountObject(image, tpRoi, file.Replace(".png", ""), ipReelNum, out tpDstImg);
//eyemCountObjectIrregularParts(image, tpRoi, file.Replace(".png", ""), "IP_LONG_PARTS", ipReelNum, out tpDstImg);
//eyemCountObjectIrregularParts(image, tpRoi, file.Replace(".png", ""), "IP_SQUARE_PARTS", ipReelNum, out tpDstImg);
//eyemCountObjectE(image, tpRoi, file.Replace(".png", ""), ipReelNum, out tpDstImg);
//eyemCountObjectIrregularPartsE(image, tpRoi, file.Replace(".png", ""), "20210825095751-1", hModelID, ipReelNum, out tpDstImg);
//eyemCountObjectIrregularPartsE(image, tpRoi, file.Replace(".png", ""), "D:\\模板文件\\" + "20210825095751-1.tpl", hModelID, ipReelNum, out tpDstImg);
......
......@@ -1243,13 +1243,15 @@ int eyemBinAutoThreshold(EyemImage tpImage, double dSigma, int iLightDark, int b
return FUNC_OK;
}
int eyemBinThresholdC(EyemImage tpImage, int ipRangeL[3], int ipRangeU[3], EyemImage *tpDstImg)
int eyemBinThresholdC(EyemImage tpImage, EyemHSVModel tpHSVModel, EyemImage *tpDstImg)
{
cv::Mat image = cv::Mat(tpImage.iHeight, tpImage.iWidth, MAKETYPE(tpImage.iDepth, tpImage.iChannels), tpImage.vpImage).clone();
if (image.empty())
return FUNC_IMAGE_NOT_EXIST;
//图像尺寸
const int X = image.cols, Y = image.rows;
//非彩色图像处理
int incn = image.channels();
if (incn > 3) {
cv::cvtColor(image, image, cv::COLOR_BGRA2BGR);
......@@ -1261,9 +1263,19 @@ int eyemBinThresholdC(EyemImage tpImage, int ipRangeL[3], int ipRangeU[3], EyemI
cv::Mat imghsv;
cv::cvtColor(image, imghsv, cv::COLOR_BGR2HSV);
//分割
cv::Mat mask;
cv::inRange(imghsv, cv::Scalar(ipRangeL[0], ipRangeL[1], ipRangeL[2]), cv::Scalar(ipRangeU[0], ipRangeU[1], ipRangeU[2]), mask);
//红色比较特殊,分两个区间
cv::Mat mask1, mask2(cv::Size(X, Y), CV_8UC1, cv::Scalar(0));
cv::inRange(imghsv, cv::Scalar(tpHSVModel.dpRangeL[0], tpHSVModel.dpRangeL[1], tpHSVModel.dpRangeL[2]),
cv::Scalar(tpHSVModel.dpRangeU[0], tpHSVModel.dpRangeU[1], tpHSVModel.dpRangeU[2]), mask1);
//多个分割阈值
if ((tpHSVModel.dpRangeLExt[0] + tpHSVModel.dpRangeLExt[1] + tpHSVModel.dpRangeLExt[2]) != 0 ||
(tpHSVModel.dpRangeUExt[0] + tpHSVModel.dpRangeUExt[1] + tpHSVModel.dpRangeUExt[2]) != 0) {
cv::inRange(imghsv, cv::Scalar(tpHSVModel.dpRangeLExt[0], tpHSVModel.dpRangeLExt[1], tpHSVModel.dpRangeLExt[2]),
cv::Scalar(tpHSVModel.dpRangeUExt[0], tpHSVModel.dpRangeUExt[1], tpHSVModel.dpRangeUExt[2]), mask2);
}
//合并
cv::Mat maskj;
cv::bitwise_or(mask1, mask2, maskj);
//输出结果图像
if (NULL != tpDstImg->vpImage) {
......@@ -1273,7 +1285,7 @@ int eyemBinThresholdC(EyemImage tpImage, int ipRangeL[3], int ipRangeU[3], EyemI
tpDstImg->vpImage = NULL;
}
tpDstImg->iWidth = mask.cols; tpDstImg->iHeight = mask.rows; tpDstImg->iDepth = mask.depth(); tpDstImg->iChannels = mask.channels();
tpDstImg->iWidth = maskj.cols; tpDstImg->iHeight = maskj.rows; tpDstImg->iDepth = maskj.depth(); tpDstImg->iChannels = maskj.channels();
//内存尺寸
int _Size = tpDstImg->iWidth*tpDstImg->iHeight*tpDstImg->iChannels * sizeof(uint8_t);
......@@ -1285,7 +1297,7 @@ int eyemBinThresholdC(EyemImage tpImage, int ipRangeL[3], int ipRangeU[3], EyemI
memset(tpDstImg->vpImage, 0, _Size);
//拷贝数据
memcpy(tpDstImg->vpImage, mask.data, _Size);
memcpy(tpDstImg->vpImage, maskj.data, _Size);
return FUNC_OK;
}
......
......@@ -95,7 +95,7 @@ std::vector<std::string> CodeDetector::Impl::decode(const cv::Mat& img, std::vec
continue;
}
float padding_w = 0.1f, padding_h = 0.1f;
auto min_padding = 35;
auto min_padding = 15;
int padx = (int)cv::max(padding_w * bbox.width, static_cast<float>(min_padding));
int pady = (int)cv::max(padding_h * bbox.height, static_cast<float>(min_padding));
......@@ -115,7 +115,7 @@ std::vector<std::string> CodeDetector::Impl::decode(const cv::Mat& img, std::vec
auto scale_list = getScaleList(cropped_img.cols, cropped_img.rows);
for (auto cur_scale : scale_list) {
//测试用
cv::medianBlur(cropped_img, cropped_img, 3);
//cv::medianBlur(cropped_img, cropped_img, 3);
//缩放图像
cv::Mat scaled_img =
processImageScale(cropped_img, cur_scale);
......
......@@ -504,7 +504,7 @@ int eyemEdge1dGenArc(EyemImage tpImage, EyemOcsDXY tpLineSt, EyemOcsDXY tpLineEd
return FUNC_OK;
}
int eyemEdge1dRidgeDetection(EyemImage tpImage)
int eyemPolarTrans(EyemImage tpImage, EyemOcsDXY tpCenter, int iRadius, int iSapWidth)
{
cv::Mat image = cv::Mat(tpImage.iHeight, tpImage.iWidth, MAKETYPE(tpImage.iDepth, tpImage.iChannels), tpImage.vpImage).clone();
if (image.empty()) {
......@@ -519,71 +519,65 @@ int eyemEdge1dRidgeDetection(EyemImage tpImage)
cv::cvtColor(image, image, cv::COLOR_BGR2GRAY);
}
const int X = image.cols, Y = image.rows;
auto polarity = line_polarity::dark;
float sigma = 1.832, low_thr = 0.0, high_thr = 0.05;
cv::Mat imageF;
image.convertTo(imageF, CV_64F);
std::vector<double> dimg(size_t(Y * X));
std::memcpy(dimg.data(), imageF.ptr(0), image.total() * sizeof(double));
std::vector<std::vector<float>> out;
get_all_derivatives(dimg, sigma, X, Y, out);
std::vector<int32_t> ismax;
std::vector<std::vector<double>> line_out;
cv::Mat esp;
compute_line_points(out, X, Y, polarity, ismax, line_out, low_thr, high_thr, esp);
std::map<int32_t, int32_t> lm;
auto results = compute_contours(ismax, line_out[0], line_out[5], line_out[6], line_out[1], line_out[2], line_out[3], line_out[5], X, Y, sigma, polarity);
float length_total = 0;
for (auto cc = 0; cc < results.size(); cc++) {
const contour& ct = results[cc];
auto length = ct.compute_length();
int ilength = (int)length;
auto miter = lm.find(ilength);
if (miter == lm.end())
lm[ilength] = 0;
auto current = lm[ilength];
lm[ilength] = current + 1;
length_total += length;
//判断越界
if (tpCenter.dX < 0 || tpCenter.dY < 0 || tpCenter.dX>X || tpCenter.dY>Y) {
return 0;
}
float avg_node_count = results.empty() ? 0 : length_total / float(results.size());
std::string output = "AvgLength: " + std::to_string(avg_node_count);
std::string output2 = "Count: " + std::to_string(results.size());
cv::Mat display;
cv::cvtColor(image, display, cv::COLOR_GRAY2BGR);
if (true) {
int factor = 8;
for (auto cc = 0; cc < results.size(); cc++) {
const contour& ct = results[cc];
std::vector<cv::Point> pts;
std::vector<float>::const_iterator rowItr = ct.row.begin();
std::vector<float>::const_iterator colItr = ct.col.begin();
for (auto pp = 0; pp < ct.row.size(); pp++, rowItr++, colItr++) {
pts.emplace_back(static_cast<int>(*colItr * factor), static_cast<int>(*rowItr * factor));
//计算目标尺寸
float C = 2.0f*(float)CV_PI*(float)(iRadius + iSapWidth);
//步长(角度)
float plusStep = 2.0f*asinf((1.0f + (C - (float)cvRound(C)) / (float)cvRound(C)) / (2.0f * (float)(iRadius + +iSapWidth)));
//长度
int iSapLength = cvRound(2.0f*CV_PI / plusStep);
//结果图像
cv::Mat polarMat(cv::Size(iSapLength, 2 * iSapWidth), CV_8UC1, cv::Scalar(0));
for (int n = iSapWidth; n > -iSapWidth; n--)
{
//周长
C = 2.0f*(float)CV_PI*(float)(iRadius + n);
//步长(角度)
plusStep = 2.0f*asinf((1.0f + (C - (float)cvRound(C)) / (float)cvRound(C)) / (2.0f * (float)(iRadius + n)));
//长度
iSapLength = cvRound(2.0f*CV_PI / plusStep);
//线采样
cv::Size szMap(iSapLength, 1);
//采样图像
int m = 0; float *pPolarBuf = new float[szMap.width*szMap.height * sizeof(float_t)];
for (float t = -CV_PI; t < CV_PI; t += plusStep, m++)
{
//路径上的点
float _plusX = float(tpCenter.dX + (float)(iRadius + n)*cos(t));
float _plusY = float(tpCenter.dY + (float)(iRadius + n)*sin(t));
//防止越界
if (_plusX < 1 || _plusX >= X - 2 || _plusY < 1 || _plusY >= Y - 2) {
continue;
}
polylines(display, pts, false, cv::Scalar(0, 0, 255), 1, cv::LINE_AA, int(std::log2(factor)));
}
int start = 100;
auto it = lm.begin();
while (it != lm.end()) {
std::string hout = "histogram [" + std::to_string(it->first) + "] = " + std::to_string(it->second);
cv::putText(display, hout.c_str(), cv::Point(500, start), cv::FONT_HERSHEY_SIMPLEX, 1.0, cv::Scalar(255, 0, 0), 2);
start += 30;
it++;
//整数部分
int x = cvRound(_plusX), y = cvRound(_plusY);
//小数部分
float u = abs(_plusX - ((float)x + 0.5f));
float v = abs(_plusY - ((float)y - 1.0f + 0.5f));
//插值计算灰度值
float gv = (1.0f - v)*(image.ptr<uint8_t>(y - 1)[x] * (1.0f - u) + image.ptr<uint8_t>(y - 1)[x - 1] * u)
+ v*(image.ptr<uint8_t>(y)[x] * (1.0f - u) + image.ptr<uint8_t>(y)[x - 1] * u);
//填入灰度值
pPolarBuf[m] = gv;
}
//仅支持8U类型
cv::Mat polarMat1(szMap, CV_32FC1, pPolarBuf);
polarMat1.convertTo(polarMat1, CV_8UC1);
//插值,默认双线性插值
cv::Mat polarMat2;
cv::resize(polarMat1, polarMat2, cv::Size(polarMat.cols, 1));
//保存到大图
polarMat2.copyTo(polarMat(cv::Rect(0, iSapWidth - n, polarMat.cols, 1)));
//释放资源
delete[] pPolarBuf;
pPolarBuf = NULL;
}
//绘制profileLine
//drawCircle("", tpCenter, iRadius - iSapWidth, cv::Scalar(255, 153, 0), 2);
//drawCircle("", tpCenter, iRadius + iSapWidth, cv::Scalar(255, 153, 0), 2);
return FUNC_OK;
}
......
......@@ -365,6 +365,12 @@ typedef struct {
} EyemOcsDCRUVW; // 用于表示椭圆体
typedef struct {
double dpRangeL[3], dpRangeU[3]; // 提取下限,提取上限
double dpRangeLExt[3], dpRangeUExt[3]; // 额外提取下限,额外提取上限(针对处于跨模型颜色,比如红色)
} EyemHSVModel; // 用于HSV颜色模型分割(H(0-180)、S(0-255)、V(0-255))
/********************************************************************************************/
/* 每个特定于源的标头 */
/********************************************************************************************/
......@@ -639,7 +645,7 @@ extern "C" {
// 函数接口
EXPORTS int eyemBinThreshold(EyemImage tpSrcImg, int iLightDark, double dThresh, double dMaxVal, EyemImage *tpDstImg);
EXPORTS int eyemBinThresholdC(EyemImage tpImage, int ipRangeL[3], int ipRangeU[3], EyemImage *tpDstImg);
EXPORTS int eyemBinThresholdC(EyemImage tpImage, EyemHSVModel tpHSVModel, EyemImage *tpDstImg);
EXPORTS int eyemBinAutoThreshold(EyemImage tpSrcImg, double dSigma, int iLightDark, int binMethod, EyemImage *tpDstImg);
EXPORTS int eyemBinNiBlack(EyemImage tpSrcImg, int iType, int iWinSize, double dK, int binarizationMethod, double dR, EyemImage *tpDstImg);
EXPORTS int eyemBinDynThreshold(EyemImage tpSrcImg, EyemImage tpPreImg, double dOffset, int iType, EyemImage *tpDstImg);
......@@ -669,7 +675,7 @@ extern "C" {
EXPORTS int eyemEdge1dGenPosRect(EyemImage tpImage, EyemOcsDXY tpLineSt, EyemOcsDXY tpLineEd, int iWhRoi, int iTransition, double dSigma, double dAmpThresh, IntPtr *hObject);
EXPORTS int eyemEdge1dFindCircle(EyemImage tpImage, EyemOcsDXY tpPoint, int iRadius, int iCapLength, int iCapWidth, int nCalipers, int nFilterSize, int iSearchDirec, double dAmpThreshold, const char *ccTransition, IntPtr *hObject);
EXPORTS int eyemEdge1dFindLine(EyemImage tpImage, EyemOcsDXY tpLineSt, EyemOcsDXY tpLineEd, int iCapLength, int iCapWidth, int nCalipers, int iFilterSize, int iSearchDirec, double dAmpThreshold, const char *ccTransition, IntPtr *hObject);
EXPORTS int eyemEdge1dRidgeDetection(EyemImage tpImage);
EXPORTS int eyemPolarTrans(EyemImage tpImage, EyemOcsDXY tpCenter, int iRadius, int iSapWidth);
EXPORTS bool eyemEdge1dGenMeasureFree(IntPtr hObject);
#ifdef __cplusplus
......@@ -884,9 +890,9 @@ extern "C" {
EXPORTS int eyemReleaseModel(IntPtr &hModelID);
EXPORTS int eyemTrackFeature(EyemImage tpPrevImg, EyemImage tpNextImg, EyemRect3 *tpRois, int iRoiNum, int *ipResults, EyemImage *tpDstImg);
EXPORTS int eyemAOIForTSAV(EyemImage tpRefImg, EyemImage tpNextImg, EyemRect3 *tpRois, int iRoiNum);
EXPORTS int eyemMarkerTracing(EyemImage tpImage, double dThreshold, EyemOcsFXYR *tpCircle, EyemImage *tpDstImg, bool bHighAccuracy = false);
EXPORTS int eyemMarkerTracing(EyemImage tpImage, EyemHSVModel tpHSVModel, EyemOcsFXYR *tpCircle, EyemImage *tpDstImg, bool bHighAccuracy = false);
EXPORTS int eyemMulFuncTool(EyemImage tpImage, EyemRect tpRoi, const char *funcName, double dThreshold, int iNumToIgnore, EyemOcsFXYR *tpCircle, EyemImage *tpDstImg);
EXPORTS int eyemLibImpl(EyemImage tpImage, EyemImage *tpDstImg);
EXPORTS int eyemLibImpl(EyemImage tpImage, EyemHSVModel tpHSVModel, EyemImage *tpDstImg);
EXPORTS int eyemDrawLine(EyemImage tpImage, EyemOcsDABC tpLine);
EXPORTS int eyemDrawCircle(EyemImage tpImage, EyemOcsDXYR tpCircle);
EXPORTS int eyemDrawRectangle(EyemImage tpImag, EyemRect tpRect);
......@@ -895,7 +901,7 @@ extern "C" {
}
#endif
//跳过某接口执行
// 跳过某接口执行
extern "C" __declspec(dllexport) void setSkipProcessID(int pid);
// 日志回调定义
......
......@@ -183,7 +183,6 @@
<ClInclude Include="eyemSmooth.h" />
<ClInclude Include="eyemCodeDetector.h" />
<ClInclude Include="resource.h" />
<ClInclude Include="stegers.h" />
<ClInclude Include="yoloWrapper.h" />
</ItemGroup>
<ItemGroup>
......@@ -208,7 +207,6 @@
<ClCompile Include="eyemSmooth.cpp" />
<ClCompile Include="eyemCodeDetector.cpp" />
<ClCompile Include="libopencv.cpp" />
<ClCompile Include="stegers.cpp" />
<ClCompile Include="yoloWrapper.cpp" />
</ItemGroup>
<ItemGroup>
......
......@@ -69,9 +69,6 @@
<ClInclude Include="eyemNNDetector.h">
<Filter>源文件</Filter>
</ClInclude>
<ClInclude Include="stegers.h">
<Filter>源文件</Filter>
</ClInclude>
<ClInclude Include="eyemMatchShapes.h">
<Filter>源文件</Filter>
</ClInclude>
......@@ -140,9 +137,6 @@
<ClCompile Include="eyemNNDetector.cpp">
<Filter>源文件</Filter>
</ClCompile>
<ClCompile Include="stegers.cpp">
<Filter>源文件</Filter>
</ClCompile>
<ClCompile Include="eyemMatchShapes.cpp">
<Filter>源文件</Filter>
</ClCompile>
......
......@@ -6,6 +6,7 @@
#define __EYEM_MATCHSHAPES_H
#include "eyemLib.h"
#include <algorithm>
class shape_based_matching
{
......@@ -38,5 +39,4 @@ private:
void release_double_matrix(double** &matrix, int size);
};
#endif/* __EYEM_MATCHSHAPES_H */
\ No newline at end of file
......@@ -2494,6 +2494,7 @@ int eyemCountObjectIrregularParts(EyemImage tpImage, EyemRect tpRoi, const char
if (coeff > 1.0f) {
cv::resize(_tplMat, _tplMat, cv::Size(), coeff, coeff);
}
//防止报错
if (_tplMat.cols > _traceMat.cols || _tplMat.rows > _traceMat.rows) {
return FUNC_CANNOT_CALC;
}
......@@ -2711,7 +2712,8 @@ int eyemCountObjectIrregularParts(EyemImage tpImage, EyemRect tpRoi, const char
//更新元件间角度
partDist = (2 * asin(dChordL / (2 * trackRadius))) * 180.0 / PI;
//追踪到了重复的元件
if (trackMat.ptr<uint8_t>(cvRound(trackCenter.y))[cvRound(trackCenter.x)] == 255) {
if ((trackCenter.x<0 || trackCenter.x>X - 1 ||
trackCenter.y<0 || trackCenter.y>Y - 1) || trackMat.ptr<uint8_t>(cvRound(trackCenter.y))[cvRound(trackCenter.x)] == 255) {
found = false;
}
else {
......@@ -2836,6 +2838,7 @@ int eyemCountObjectIrregularParts(EyemImage tpImage, EyemRect tpRoi, const char
if (coeff > 1.0f) {
cv::resize(_tplMat, _tplMat, cv::Size(), coeff, coeff);
}
//防止报错
if (_tplMat.cols > _traceMat.cols || _tplMat.rows > _traceMat.rows) {
return FUNC_CANNOT_CALC;
}
......@@ -3046,7 +3049,8 @@ int eyemCountObjectIrregularParts(EyemImage tpImage, EyemRect tpRoi, const char
//更新元件间角度
partDist = (2.0 * asin(dChordL / (2.0 * trackRadius))) * 180.0 / PI;
//追踪到了重复的元件
if (trackMat.ptr<uint8_t>(cvRound(trackCenter.y))[cvRound(trackCenter.x)] == 255) {
if ((trackCenter.x<0 || trackCenter.x>X - 1 ||
trackCenter.y<0 || trackCenter.y>Y - 1) || trackMat.ptr<uint8_t>(cvRound(trackCenter.y))[cvRound(trackCenter.x)] == 255) {
found = false;
}
else {
......@@ -3537,6 +3541,10 @@ int eyemCountObjectIrregularParts(EyemImage tpImage, EyemRect tpRoi, const char
if (coeff > 1.0f) {
cv::resize(_tplMat, _tplMat, cv::Size(), coeff, coeff);
}
//防止报错
if (_tplMat.cols > _traceMat.cols || _tplMat.rows > _traceMat.rows) {
return FUNC_CANNOT_CALC;
}
//考虑并行计算两个模板结果
cv::Mat tplResult0;
cv::matchTemplate(_traceMat, _tplMat, tplResult0, cv::TM_SQDIFF_NORMED);
......@@ -3751,7 +3759,8 @@ int eyemCountObjectIrregularParts(EyemImage tpImage, EyemRect tpRoi, const char
//更新元件间角度
partDist = (2 * asin(dChordL / (2 * trackRadius))) * 180.0 / PI;
//追踪到了重复的元件
if (trackMat.ptr<uint8_t>(cvRound(trackCenter.y))[cvRound(trackCenter.x)] == 255) {
if ((trackCenter.x<0 || trackCenter.x>X - 1 ||
trackCenter.y<0 || trackCenter.y>Y - 1) || trackMat.ptr<uint8_t>(cvRound(trackCenter.y))[cvRound(trackCenter.x)] == 255) {
found = false;
}
else {
......@@ -3876,6 +3885,10 @@ int eyemCountObjectIrregularParts(EyemImage tpImage, EyemRect tpRoi, const char
if (coeff > 1.0f) {
cv::resize(_tplMat, _tplMat, cv::Size(), coeff, coeff);
}
//防止报错
if (_tplMat.cols > _traceMat.cols || _tplMat.rows > _traceMat.rows) {
return FUNC_CANNOT_CALC;
}
//考虑并行计算两个模板结果
cv::Mat tplResult0;
cv::matchTemplate(_traceMat, _tplMat, tplResult0, cv::TM_SQDIFF_NORMED);
......@@ -4083,7 +4096,8 @@ int eyemCountObjectIrregularParts(EyemImage tpImage, EyemRect tpRoi, const char
//更新元件间角度
partDist = (2.0 * asin(dChordL / (2.0 * trackRadius))) * 180.0 / PI;
//追踪到了重复的元件
if (trackMat.ptr<uint8_t>(cvRound(trackCenter.y))[cvRound(trackCenter.x)] == 255) {
if ((trackCenter.x<0 || trackCenter.x>X - 1 ||
trackCenter.y<0 || trackCenter.y>Y - 1) || trackMat.ptr<uint8_t>(cvRound(trackCenter.y))[cvRound(trackCenter.x)] == 255) {
found = false;
}
else {
......@@ -4147,7 +4161,7 @@ int eyemCountObjectIrregularParts(EyemImage tpImage, EyemRect tpRoi, const char
std::vector<uchar> colors0(nccomps + 1, 0);
for (int i = 1; i < nccomps; i++) {
colors0[i] = 255;
if ((((int *)m2.data)[(cv::CC_STAT_AREA) + (i)*m2.cols] <= 10) || m2.ptr<int>(i)[cv::CC_STAT_WIDTH] * m2.ptr<int>(i)[cv::CC_STAT_HEIGHT] > 400000)//经验值
if ((((int *)m2.data)[(cv::CC_STAT_AREA) + (i)*m2.cols] <= 21) || m2.ptr<int>(i)[cv::CC_STAT_WIDTH] * m2.ptr<int>(i)[cv::CC_STAT_HEIGHT] > 400000)//经验值
{
colors0[i] = 0;
}
......@@ -5278,6 +5292,10 @@ int eyemCountObjectE(EyemImage tpImage, EyemRect tpRoi, const char *fileName, in
if (coeff > 1.0f) {
cv::resize(_tplMat, _tplMat, cv::Size(), coeff, coeff);
}
//防止报错
if (_tplMat.cols > _traceMat.cols || _tplMat.rows > _traceMat.rows) {
return FUNC_CANNOT_CALC;
}
//考虑并行计算两个模板结果
cv::Mat tplResult0;
cv::matchTemplate(_traceMat, _tplMat, tplResult0, cv::TM_SQDIFF_NORMED);
......@@ -5491,7 +5509,8 @@ int eyemCountObjectE(EyemImage tpImage, EyemRect tpRoi, const char *fileName, in
//更新元件间角度
partDist = (2 * asin(dChordL / (2 * trackRadius))) * 180.0 / PI;
//追踪到了重复的元件
if (trackMat.ptr<uint8_t>(cvRound(trackCenter.y))[cvRound(trackCenter.x)] == 255) {
if ((trackCenter.x<0 || trackCenter.x>X - 1 ||
trackCenter.y<0 || trackCenter.y>Y - 1) || trackMat.ptr<uint8_t>(cvRound(trackCenter.y))[cvRound(trackCenter.x)] == 255) {
found = false;
}
else {
......@@ -5616,6 +5635,10 @@ int eyemCountObjectE(EyemImage tpImage, EyemRect tpRoi, const char *fileName, in
if (coeff > 1.0f) {
cv::resize(_tplMat, _tplMat, cv::Size(), coeff, coeff);
}
//防止报错
if (_tplMat.cols > _traceMat.cols || _tplMat.rows > _traceMat.rows) {
return FUNC_CANNOT_CALC;
}
//考虑并行计算两个模板结果
cv::Mat tplResult0;
cv::matchTemplate(_traceMat, _tplMat, tplResult0, cv::TM_SQDIFF_NORMED);
......@@ -5824,7 +5847,8 @@ int eyemCountObjectE(EyemImage tpImage, EyemRect tpRoi, const char *fileName, in
//更新元件间角度
partDist = (2.0 * asin(dChordL / (2.0 * trackRadius))) * 180.0 / PI;
//追踪到了重复的元件
if (trackMat.ptr<uint8_t>(cvRound(trackCenter.y))[cvRound(trackCenter.x)] == 255) {
if ((trackCenter.x<0 || trackCenter.x>X - 1 ||
trackCenter.y<0 || trackCenter.y>Y - 1) || trackMat.ptr<uint8_t>(cvRound(trackCenter.y))[cvRound(trackCenter.x)] == 255) {
found = false;
}
else {
......@@ -7568,7 +7592,7 @@ int eyemAOIForTSAV(EyemImage tpRefImg, EyemImage tpNextImg, EyemRect3 *tpRois, i
return FUNC_OK;
}
int eyemMarkerTracing(EyemImage tpImage, double dThreshold, EyemOcsFXYR *tpCircle, EyemImage *tpDstImg, bool bHighAccuracy)
int eyemMarkerTracing(EyemImage tpImage, EyemHSVModel tpHSVModel, EyemOcsFXYR *tpCircle, EyemImage *tpDstImg, bool bHighAccuracy)
{
cv::Mat image = cv::Mat(tpImage.iHeight, tpImage.iWidth, MAKETYPE(tpImage.iDepth, tpImage.iChannels), tpImage.vpImage).clone();
......@@ -7576,7 +7600,6 @@ int eyemMarkerTracing(EyemImage tpImage, double dThreshold, EyemOcsFXYR *tpCircl
return FUNC_IMAGE_NOT_EXIST;
const int X = image.cols; const int Y = image.rows;
int incn = image.channels();
if (incn > 3) {
cv::cvtColor(image, image, cv::COLOR_BGRA2BGR);
......@@ -7584,23 +7607,26 @@ int eyemMarkerTracing(EyemImage tpImage, double dThreshold, EyemOcsFXYR *tpCircl
else if (incn == 1) {
cv::cvtColor(image, image, cv::COLOR_GRAY2BGR);
}
//滤波
cv::blur(image, image, cv::Size(5, 5));
//用于显示
cv::Mat cc = image.clone();
//转hsv空间
cv::Mat imgGray;
cv::cvtColor(image, imgGray, cv::COLOR_BGR2HSV);
//红色比较特殊,分两个区间
cv::Mat mask1, mask2;
cv::inRange(imgGray, cv::Scalar(0, 43, 46), cv::Scalar(10, 255, 255), mask1);
cv::inRange(imgGray, cv::Scalar(156, 43, 46), cv::Scalar(180, 255, 255), mask2);
cv::Mat maskj, imageR;
cv::Mat mask1, mask2(cv::Size(X, Y), CV_8UC1, cv::Scalar(0));
cv::inRange(imgGray, cv::Scalar(tpHSVModel.dpRangeL[0], tpHSVModel.dpRangeL[1], tpHSVModel.dpRangeL[2]),
cv::Scalar(tpHSVModel.dpRangeU[0], tpHSVModel.dpRangeU[1], tpHSVModel.dpRangeU[2]), mask1);
//多个分割阈值
if ((tpHSVModel.dpRangeLExt[0] + tpHSVModel.dpRangeLExt[1] + tpHSVModel.dpRangeLExt[2]) != 0 ||
(tpHSVModel.dpRangeUExt[0] + tpHSVModel.dpRangeUExt[1] + tpHSVModel.dpRangeUExt[2]) != 0) {
cv::inRange(imgGray, cv::Scalar(tpHSVModel.dpRangeLExt[0], tpHSVModel.dpRangeLExt[1], tpHSVModel.dpRangeLExt[2]),
cv::Scalar(tpHSVModel.dpRangeUExt[0], tpHSVModel.dpRangeUExt[1], tpHSVModel.dpRangeUExt[2]), mask2);
}
//合并
cv::Mat maskj;
cv::bitwise_or(mask1, mask2, maskj);
//去掉干扰
......@@ -7642,20 +7668,6 @@ int eyemMarkerTracing(EyemImage tpImage, double dThreshold, EyemOcsFXYR *tpCircl
cv::Point2i(bbox.br().x + minSize, bbox.br().y + minSize))&cv::Rect(0, 0, X, Y);
cv::Mat limit = image(limRec).clone();
//转hsv空间
//cv::cvtColor(limit, limit, cv::COLOR_BGR2HSV);
/*cv::Mat mask1, mask2;
cv::inRange(limit, cv::Scalar(0, 43, 46), cv::Scalar(10, 255, 255), mask1);
cv::inRange(limit, cv::Scalar(156, 43, 46), cv::Scalar(180, 255, 255), mask2);
cv::Mat maskj, imageR;
cv::bitwise_or(mask1, mask2, maskj);
std::vector<std::vector<cv::Point>> _contours;
cv::findContours(maskj, _contours, cv::RETR_EXTERNAL, cv::CHAIN_APPROX_NONE, limRec.tl());*/
//过滤
std::vector<cv::Point> approx;
float arcL = (float)cv::arcLength(cv::Mat(contour), true);
......@@ -7714,7 +7726,6 @@ int eyemMarkerTracing(EyemImage tpImage, double dThreshold, EyemOcsFXYR *tpCircl
if (AFAs.empty()) {
return FUNC_FAILED_DETECT;
}
//排序
std::sort(AFAs.begin(), AFAs.end(), std::less<AFA>());
//高精度定位
......@@ -7744,7 +7755,6 @@ int eyemMarkerTracing(EyemImage tpImage, double dThreshold, EyemOcsFXYR *tpCircl
tpCircle->fY = (float)AFAs[0].tpCircle.dY;
tpCircle->fR = (float)AFAs[0].tpCircle.dR;
}
//画图
cv::rectangle(cc, cv::Rect(cv::Point2f(tpCircle->fX - 2.0f*tpCircle->fR, tpCircle->fY - 2.0f*tpCircle->fR),
cv::Point2f(tpCircle->fX + 2.0f*tpCircle->fR, tpCircle->fY + 2.0f*tpCircle->fR)), cv::Scalar(0, 255, 255), 4);
......@@ -7928,10 +7938,8 @@ int eyemMulFuncTool(EyemImage tpImage, EyemRect tpRoi, const char *funcName, dou
return FUNC_OK;
}
#include "eyemMatchShapes.h"
int eyemLibImpl(EyemImage tpImage, EyemImage *tpDstImg)
int eyemLibImpl(EyemImage tpImage, EyemHSVModel tpHSVModel, EyemImage *tpDstImg)
{
CV_Assert(NULL != tpImage.vpImage);
......@@ -7940,10 +7948,17 @@ int eyemLibImpl(EyemImage tpImage, EyemImage *tpDstImg)
if (image.empty())
return FUNC_IMAGE_NOT_EXIST;
//多个分割阈值
if ((tpHSVModel.dpRangeLExt[0] + tpHSVModel.dpRangeLExt[1] + tpHSVModel.dpRangeLExt[2]) != 0 ||
(tpHSVModel.dpRangeUExt[0] + tpHSVModel.dpRangeUExt[1] + tpHSVModel.dpRangeUExt[2]) != 0) {
std::cout << "红色" << std::endl;
}
return FUNC_OK;
//shape_based_matching GM; // object to implent geometric matching
//shape_based_matching GM; // object to implent geometric matching
//int lowThreshold = 10; //deafult value
//int highThreashold = 100; //deafult value
//int highThreashold = 100; //deafult value
//double minScore = 0.25; //deafult value
//double greediness = 0.8; //deafult value
......
#include "stegers.h"
contour::contour() { clear(); }
//constructor
contour::contour(int32_t nnum, std::vector<float>& nrow, std::vector<float>& ncol, std::vector<float>& nangle, std::vector<float>& nresponse, contour_class ncont_class) {
num = nnum;
row = nrow;
col = ncol;
angle = nangle;
response = nresponse;
cont_class = ncont_class;
}
void contour::clear() {
num = 0;
row.resize(0);
col.resize(0);
angle.resize(0);
response.resize(0);
width_l.resize(0);
width_r.resize(0);
asymmetry.resize(0);
contrast.resize(0);
cont_class = contour_class::cont_no_junc;
}
chord::chord()
{
r = 0;
cb = 0;
ce = 0;
}
chord::chord(short nr, short ncb, short nce)
{
r = nr;
cb = ncb;
ce = nce;
}
region::region(const std::vector<int32_t>& image, uint32_t min_val,
int32_t image_width, int32_t image_m_height) {
rl.resize(0);
long grey;
long r, c, l, count;
bool inside;
inside = false;
count = 0;
rl.emplace_back();
for (r = 0; r < image_m_height; r++) {
for (c = 0; c < image_width; c++) {
l = LCOR(r, c, image_width);
grey = image[l];
if (grey >= min_val) {
if (!inside) {
inside = true;
rl[count].r = (int16_t)r;
rl[count].cb = (int16_t)c;
}
}
else {
if (inside) {
inside = false;
rl[count].ce = (int16_t)(c - 1);
count++;
rl.emplace_back();
}
}
}
if (inside) {
inside = false;
rl[count].ce = (int16_t)(image_width - 1);
count++;
rl.emplace_back();
}
}
this->num = count;
}
//
//bool region::test() {
// const char* frame[] =
// {
// "00100100",
// "00110100",
// "00011000",
// "01000100",
// "01000000",
// 0 };
// const char* gold[] =
// {
// "00100100",
// "00110100",
// "00011000",
// "02000100",
// "02000000",
// 0 };
//
// cv::Mat pels(5, 8, CV_8U);
// DrawShape(pels, frame);
//
// std::vector<int32_t> lpels(5 * 8);
// for (auto row = 0; row < pels.rows; row++)
// for (auto col = 0; col < pels.cols; col++) {
// auto l = LCOR(row, col, pels.cols);
// lpels[l] = int32_t(pels.at<uint8_t>(row, col));
// }
//
// region rg(lpels, 1, pels.cols, pels.rows);
// bool check = rg.rl.size() == 9;
// if (!check) return check;
//
// std::vector<chord> golds;
// golds.emplace_back(0, 2, 2);
// golds.emplace_back(0, 5, 5);
// golds.emplace_back(1, 2, 3);
// golds.emplace_back(1, 5, 5);
// golds.emplace_back(2, 3, 4);
// golds.emplace_back(3, 1, 1);
// golds.emplace_back(3, 5, 5);
// golds.emplace_back(4, 1, 1);
// golds.emplace_back(0, 0, 0);
//
// for (int i = 0; i < 9; i++)
// assert(rg.rl[i] == golds[i]);
//
// return true;
//}
\ No newline at end of file
#pragma once
#include <vector>
#include <ostream>
#ifndef LCOR
#define LCOR(row,col,width) (row)*(width) + (col)
#endif
enum class contour_class
{
/** no end point is a junction */
cont_no_junc,
/** only the start point of the line is a junction */
cont_start_junc,
/** only the end point of the line is a junction */
cont_end_junc,
/** both end points of the line are junctions */
cont_both_junc,
/** the contour is closed */
cont_closed
};
class contour
{
/** number of points */
public:
//default constructor
contour();
//constructor
contour(int32_t nnum, std::vector<float>& nrow, std::vector<float>& ncol, std::vector<float>& nangle, std::vector<float>& nresponse, contour_class ncont_class);
void clear();
float compute_length() const {
std::vector<float>::const_iterator row_b = row.begin() + 1;
std::vector<float>::const_iterator col_b = col.begin() + 1;
float length = 0.0f;
for (; row_b < row.end() && col_b < col.end(); row_b++, col_b++) {
float dr = *(row_b)-*(row_b - 1);
float dc = *(col_b)-*(col_b - 1);
length += std::sqrt(dr * dr + dc * dc);
}
return length;
}
int32_t num = 0;
std::vector<float> row;
/** column coordinates of the line points (X coordinate in ImageJ) */
std::vector<float> col;
/** angle of normal (measured from the row (Y) axis) */
std::vector<float> angle;
/** response of line point (second derivative) */
std::vector<float> response;
/** width to the left of the line */
std::vector<float> width_l;
/** width to the right of the line */
std::vector<float> width_r;
/** asymmetry of the line point */
std::vector<float> asymmetry;
/** contrast of the line point */
std::vector<float> contrast;
/** contour class (e.g., closed, no_junc) */
contour_class cont_class;
};
class crossRef {
public:
/*
Storage the Crossref variables, it is the Correction.java code
This data structure facilitates the quick search for the next possible starting point of a line.An array of crossrefs will be accumulatedand
sorted according to its value.xand y are the coordinates of a point in the image.When this point has been processed it will be marked as done.
*/
crossRef(int32_t x = 0, int32_t y = 0, double value = 0.0, bool done = false) : m_x(x), m_y(y), m_val(value), m_done(done) {}
// Accessors
const int32_t& x() const { return m_x; }
const int32_t& y() const { return m_y; }
const double& value() const { return m_val; }
bool done() const { return m_done; }
void setDone() const { m_done = true; }
void setUnDone() const { m_done = false; }
int32_t compareTo(crossRef& other) {
int32_t rt = (m_val > other.value()) ? -1 : (m_val < other.value()) ? 1 : 0;
return rt;
}
bool operator==(const crossRef& other) const {
return this->m_val == other.value();
}
bool operator<(const crossRef& other) const {
return this->m_val < other.value();
}
bool operator>(const crossRef& other) const {
return this->m_val > other.value();
}
friend std::ostream& operator<< (std::ostream& ous, const crossRef& dis)
{
// return + str(self.x) + + str(self.y) + + str(self.value) + + str(self.done)
ous << "x: " << dis.x() << "\ty: " << dis.y() << "\tvalue: " << dis.value() << "\tdone: " << std::boolalpha << dis.done();
return ous;
}
private:
int32_t m_x, m_y;
double m_val;
mutable bool m_done;
};
/** Data structure to store three doubles: x,y and t (distance along line) */
class doublepoint
{
public:
double cx = 0;
double cy = 0;
double t = 0;
doublepoint() {}
doublepoint(double ccx, double ccy, double ct) {
cx = ccx;
cy = ccy;
t = ct;
}
doublepoint(double ccx, double ccy) {
cx = ccx;
cy = ccy;
t = 0;
}
};
/** Offsets to a specific location in the image. An array of this type is
returned by the modified Bresenham algorithm in width.c. It is also used
in link.c to hold an array of pixel locations to check for appropriate
neighbors. */
class offset
{
public:
int32_t x;
int32_t y;
offset() { x = 0; y = 0; }
offset(int32_t nx, int32_t ny) {
x = nx;
y = ny;
}
};
/** This data structure is used to accumulate junction information. It is
needed to split lines at junction points. */
class junction
{
/** Index of line that is already processed */
public:
int32_t cont1 = 0;
/** Index of line that runs into cont1 */
int32_t cont2 = 0;
/** Index of the junction point in cont1 */
int32_t pos = 0;
/** y-(row-)coordinate of the junction point (corrected for ImageJ)*/
float x = 0;
/** x-(col-)coordinate of the junction point (corrected for ImageJ)*/
float y = 0;
junction() {}
junction(int32_t ncont1, int32_t ncont2, int32_t npos, float nx, float ny) {
cont1 = ncont1;
cont2 = ncont2;
pos = npos;
x = nx;
y = ny;
}
bool operator==(const junction& other) const {
return this->pos == other.pos;
}
bool operator<(const junction& other) const {
return this->pos < other.pos;
}
bool operator>(const junction& other) const {
return this->pos > other.pos;
}
/** This function compares two junctions according to their first line indexes,
and, if needed, by the position of the junction within the line. It is
called by qsort. */
static bool compare(const junction& thisJunction, const junction& otherjunction) {
if (thisJunction.cont1 == otherjunction.cont1)
{
return thisJunction.pos >= otherjunction.pos;
}
else
{
return thisJunction.cont1 > otherjunction.cont1;
}
}
friend std::ostream& operator<< (std::ostream& ous, const junction& dis)
{
ous << "cont1: " << dis.cont1 << "\tcont2: " << dis.cont2 << "\tpos: " << dis.pos << "\txy: " << dis.x << "," << dis.y;
return ous;
}
};
/** A chord in a run-length encoded region */
class chord
{
/** row coordinate of the chord */
public:
// default dtor, copy ctor ok
short r = 0;
/** column coordinate of the start of the chord */
short cb = 0;
/** column coordinate of the end of the chord */
short ce = 0;
chord();
chord(short nr, short ncb, short nce);
bool operator==(chord& other) const {
return this->r == other.r && this->cb == other.cb && this->ce == other.ce;
}
};
/** Run-length encoded region of an image. This type is returned by the
threshold() function. It provides the means to efficiently link line points
into lines. */
class region
{
public:
region(const std::vector<int32_t>& image, uint32_t min_val,
int32_t image_width, int32_t image_m_height);
int32_t num = 0; // number of chords
std::vector<chord> rl; // array of chords
//static bool test();
};
\ No newline at end of file
支持 Markdown 格式
你添加了 0 到此讨论。请谨慎行事。
Finish editing this message first!