27#include "preprocessimage.hpp"
28#include <opencv2/photo.hpp>
29#include "opencv2/opencv.hpp"
32const int bgr_max = 255;
33const int bgr_half = 128;
34std::vector<cv::Point> PreprocessImage::poi = {};
36PreprocessImage::PreprocessImage()
38 CV_DNN_REGISTER_LAYER_CLASS(Crop, MyCropLayer);
41cv::Mat PreprocessImage::deskewRotate(cv::Mat &image)
44 cv::Mat orig = image.clone();
46 fourPointTransform(orig, warped, poi);
47 cvtColor(warped, warped, cv::COLOR_RGB2BGRA);
54void PreprocessImage::adaptThreshold(cv::Mat &image,
60 cvtColor(image, image, cv::COLOR_BGRA2GRAY);
62 adaptiveThreshold(image, image, 255, cv::ADAPTIVE_THRESH_GAUSSIAN_C, cv::THRESH_BINARY, 9, 14);
67 cv::getStructuringElement(cv::MORPH_RECT,
68 cv::Size(morphKernel, morphKernel)));
71 cv::getStructuringElement(cv::MORPH_RECT, cv::Size(morphKernel, morphKernel)));
74 GaussianBlur(image, image, cv::Size(blurValue, blurValue), 0);
75 cvtColor(image, image, cv::COLOR_GRAY2BGRA);
79void PreprocessImage::toGray(cv::Mat &image, uint8_t blurValue)
82 cvtColor(image, image, cv::COLOR_BGRA2GRAY);
83 GaussianBlur(image, image, cv::Size(blurValue, blurValue), 0);
84 cvtColor(image, image, cv::COLOR_GRAY2BGRA);
88bool PreprocessImage::compareContourAreas(std::vector<cv::Point> &contour1,
89 std::vector<cv::Point> &contour2)
91 double i = fabs(contourArea(cv::Mat(contour1)));
92 double j = fabs(contourArea(cv::Mat(contour2)));
96bool PreprocessImage::compareXCords(cv::Point &p1, cv::Point &p2)
101bool PreprocessImage::compareYCords(cv::Point &p1, cv::Point &p2)
103 return (p1.y < p2.y);
106bool PreprocessImage::compareDistance(std::pair<cv::Point, cv::Point> &p1,
107 std::pair<cv::Point, cv::Point> &p2)
109 return (cv::norm(p1.first - p1.second) < cv::norm(p2.first - p2.second));
112double PreprocessImage::_distance(cv::Point &p1, cv::Point &p2)
114 return sqrt(((p1.x - p2.x) * (p1.x - p2.x)) + ((p1.y - p2.y) * (p1.y - p2.y)));
117void PreprocessImage::resizeToHeight(cv::Mat &src, cv::Mat &dst,
int height)
119 cv::Size2d s = cv::Size2d(src.cols * (height /
double(src.rows)), height);
120 cv::resize(src, dst, s, cv::INTER_AREA);
123void PreprocessImage::orderPoints(std::vector<cv::Point> &inpts, std::vector<cv::Point> &ordered)
125 sort(inpts.begin(), inpts.end(), &compareXCords);
126 std::vector<cv::Point> lm(inpts.begin(), inpts.begin() + 2);
127 std::vector<cv::Point> rm(inpts.end() - 2, inpts.end());
129 sort(lm.begin(), lm.end(), &compareYCords);
132 std::vector<std::pair<cv::Point, cv::Point>> tmp;
133 for (
size_t i = 0; i < rm.size(); i++) {
134 tmp.push_back(std::make_pair(tl, rm[i]));
137 sort(tmp.begin(), tmp.end(), &compareDistance);
138 cv::Point tr(tmp[0].second);
139 cv::Point br(tmp[1].second);
141 ordered.push_back(tl);
142 ordered.push_back(tr);
143 ordered.push_back(br);
144 ordered.push_back(bl);
147void PreprocessImage::fourPointTransform(cv::Mat &src, cv::Mat &dst, std::vector<cv::Point> &pts)
149 std::vector<cv::Point> ordered_pts;
150 orderPoints(pts, ordered_pts);
152 std::vector<cv::Point2f> src_;
153 std::vector<cv::Point2f> dst_;
155 double wa = _distance(ordered_pts[2], ordered_pts[3]);
156 double wb = _distance(ordered_pts[1], ordered_pts[0]);
157 double mw = fmax(wa, wb);
159 double ha = _distance(ordered_pts[1], ordered_pts[2]);
160 double hb = _distance(ordered_pts[0], ordered_pts[3]);
161 double mh = fmax(ha, hb);
164 cv::Point(ordered_pts[0].x, ordered_pts[0].y),
165 cv::Point(ordered_pts[1].x, ordered_pts[1].y),
166 cv::Point(ordered_pts[2].x, ordered_pts[2].y),
167 cv::Point(ordered_pts[3].x, ordered_pts[3].y),
169 dst_ = {cv::Point(0, 0),
170 cv::Point(
static_cast<int>(mw) - 1, 0),
171 cv::Point(
static_cast<int>(mw) - 1,
static_cast<int>(mh) - 1),
172 cv::Point(0,
static_cast<int>(mh) - 1)};
173 cv::Mat m = getPerspectiveTransform(src_, dst_);
174 cv::warpPerspective(src, dst, m, cv::Size(
static_cast<int>(mw),
static_cast<int>(mh)));
177std::vector<cv::Point2f> PreprocessImage::getPoints(cv::Mat &src)
180 std::vector<cv::Point2f> src_;
181 std::vector<cv::Point2f> dst_;
182 std::vector<std::vector<cv::Point>> contours;
183 std::vector<cv::Vec4i> hierarchy = {};
184 std::vector<std::vector<cv::Point>> approx;
187 double ratio = src.rows / 500.0;
188 preProcess(src, edged);
189 cv::findContours(edged, contours, hierarchy, cv::RETR_LIST, cv::CHAIN_APPROX_SIMPLE);
190 approx.resize(contours.size());
192 for (i = 0; i < contours.size(); i++) {
193 double peri = cv::arcLength(contours[i],
true);
194 cv::approxPolyDP(contours[i], approx[i], 0.03 * peri,
true);
197 sort(approx.begin(), approx.end(), compareContourAreas);
199 for (i = 0; i < approx.size(); i++) {
200 if (approx[i].size() == 4) {
205 if (i < approx.size()) {
206 for (j = 0; j < approx[i].size(); j++) {
207 approx[i][j] *= ratio;
210 std::vector<cv::Point> ordered_pts;
211 orderPoints(approx[i], ordered_pts);
213 double wa = _distance(ordered_pts[2], ordered_pts[3]);
214 double wb = _distance(ordered_pts[1], ordered_pts[0]);
215 double mw = fmax(wa, wb);
217 double ha = _distance(ordered_pts[1], ordered_pts[2]);
218 double hb = _distance(ordered_pts[0], ordered_pts[3]);
219 double mh = fmax(ha, hb);
222 cv::Point(ordered_pts[0].x, ordered_pts[0].y),
223 cv::Point(ordered_pts[1].x, ordered_pts[1].y),
224 cv::Point(ordered_pts[2].x, ordered_pts[2].y),
225 cv::Point(ordered_pts[3].x, ordered_pts[3].y),
227 dst_ = {cv::Point(0, 0),
228 cv::Point(
static_cast<int>(mw) - 1, 0),
229 cv::Point(
static_cast<int>(mw) - 1,
static_cast<int>(mh) - 1),
230 cv::Point(0,
static_cast<int>(mh) - 1)};
236void PreprocessImage::setPoints(std::vector<cv::Point2f> pt)
244void PreprocessImage::preProcess(cv::Mat &src, cv::Mat &dst)
247 cv::Mat imageOpen, imageClosed, imageBlurred;
249 cv::cvtColor(src, imageGrayed, cv::COLOR_BGRA2GRAY);
251 cv::adaptiveThreshold(imageGrayed,
254 cv::ADAPTIVE_THRESH_GAUSSIAN_C,
259 cv::Mat structuringElmt = cv::getStructuringElement(cv::MORPH_RECT, cv::Size(4, 4));
260 cv::morphologyEx(imageGrayed, imageOpen, cv::MORPH_OPEN, structuringElmt);
261 cv::morphologyEx(imageOpen, imageClosed, cv::MORPH_CLOSE, structuringElmt);
263 cv::medianBlur(imageClosed, imageBlurred, 9);
265 cv::Canny(imageBlurred, dst, 200, 250);
268double PreprocessImage::computeSkew(cv::Mat src)
270 cv::cvtColor(src, src, cv::COLOR_BGRA2GRAY);
271 cv::Size size = src.size();
272 cv::threshold(src, src, 180, 255, cv::THRESH_OTSU);
273 cv::bitwise_not(src, src);
274 std::vector<cv::Vec4i> lines;
275 cv::HoughLinesP(src, lines, 1, CV_PI / 180, 100, size.width / 2., 10);
276 cv::Mat disp_lines(size, CV_8UC1, cv::Scalar(0, 0, 0));
278 unsigned nb_lines =
static_cast<unsigned>(lines.size());
279 for (
unsigned i = 0; i < nb_lines; ++i) {
281 cv::Point(lines[i][0], lines[i][1]),
282 cv::Point(lines[i][2], lines[i][3]),
283 cv::Scalar(255, 0, 0));
284 angle += atan2(
static_cast<double>(lines[i][3]) - lines[i][1],
285 static_cast<double>(lines[i][2]) - lines[i][0]);
288 return angle * 180 / CV_PI;
291cv::Mat PreprocessImage::adjustBrightness(cv::Mat &in,
int value, cv::Rect rect)
296 const bool is_inside = (rect & cv::Rect(0, 0, in.cols, in.rows)) == rect;
298 if(is_inside && rect.area() > 0)
301 cv::Mat roi(in(rect));
303 roi.convertTo(out, -1, 1, value);
304 out.copyTo(out2(rect));
308 in.convertTo(out, -1, 1, value);
316cv::Mat PreprocessImage::adjustContrast(cv::Mat &in,
int beta)
337 factor = 1-double(abs(beta)/255.0);
338 qDebug() <<
"COnstarsr factor" << factor << abs(beta);
342 factor = 6.0*(beta/100)+10;
344 qDebug() <<
"COnstarsr factor" << factor << abs(beta) << abs(beta)/255;
345 in.convertTo(out, -1, factor, 1);
354cv::Mat PreprocessImage::hue(cv::Mat matrix,
int h_shift)
356 qDebug() <<
"Adjust HUE" << h_shift;
357 cv::Mat processed_mat;
358 cv::cvtColor(matrix, processed_mat, cv::COLOR_BGR2HSV);
362 for (
int y = 0; y < processed_mat.rows; y++)
364 for (
int x = 0; x < processed_mat.cols; x++)
366 short h = processed_mat.at<cv::Vec3b>(y,x)[idx];
367 processed_mat.at<cv::Vec3b>(y,x)[idx] = (h + h_shift) % 180;
371 cv::cvtColor(processed_mat, processed_mat, cv::COLOR_HSV2BGR);
372 return processed_mat;
382cv::Mat PreprocessImage::gamma(cv::Mat matrix,
double gamma)
387 gamma = -1 / (gamma - 1);
391 cv::Mat processed_mat = matrix.clone();
393 short max_n = bgr_max + 1;
394 cv::Mat lookUpTable(1, max_n, CV_8U);
395 uchar* p = lookUpTable.ptr();
396 for(
int i = 0; i < max_n; ++i)
398 p[i] = cv::saturate_cast<uchar>(std::pow(i / (
double)bgr_max, gamma) * (
double)bgr_max);
401 cv::LUT(processed_mat, lookUpTable, processed_mat);
403 return processed_mat;
414cv::Mat PreprocessImage::sharpness(cv::Mat matrix,
double beta)
424 cv::Mat processed_mat = matrix.clone();
429 double alpha = 1 + -1 * beta;
432 cv::GaussianBlur(processed_mat, processed_mat, cv::Size(size, size), sigma, sigma);
433 cv::addWeighted(matrix, alpha, processed_mat, beta, gamma, processed_mat);
435 return processed_mat;
438cv::Mat PreprocessImage::manualThreshold(cv::Mat &image,
442 if (!image.empty()) {
443 cv::Mat img = image.clone();
444 cvtColor(img, img, cv::COLOR_BGRA2GRAY, 1);
445 cv::threshold(img, img, threshValue, 255, cv::THRESH_BINARY);
446 GaussianBlur(img, img, cv::Size(blurValue, blurValue), 0);
447 cvtColor(img, img, cv::COLOR_GRAY2BGRA);
455cv::Mat PreprocessImage::adjustSaturation(cv::Mat &in,
int value)
461 cv::cvtColor(in, out, cv::COLOR_BGR2HSV);
463 std::vector<cv::Mat> channels;
464 cv::split(out, channels);
470 channels[idx].convertTo(channels[idx], rtype, alpha, value);
472 cv::merge(channels, out);
473 cv::cvtColor(out, out, cv::COLOR_HSV2BGR);
482void PreprocessImage::hedEdgeDetectDNN(cv::Mat &image,
483 std::string &prototxt,
484 std::string &caffemodel,
487 cv::dnn::Net net = cv::dnn::readNet(prototxt, caffemodel);
490 cv::cvtColor(image, img, cv::COLOR_BGRA2BGR);
491 cv::Size reso(size, size);
493 resize(img, theInput, reso);
494 cv::Mat blob = cv::dnn::blobFromImage(theInput,
497 cv::Scalar(104.00698793, 116.66876762, 122.67891434),
504 std::vector<cv::Mat> vectorOfImagesFromBlob;
505 cv::dnn::imagesFromBlob(out, vectorOfImagesFromBlob);
506 cv::Mat tmpMat = vectorOfImagesFromBlob[0] * 255;
508 tmpMat.convertTo(tmpMatUchar, CV_8U);
511 cv::resize(tmpMatUchar, image, img.size());
514void PreprocessImage::CalcBlockMeanVariance(cv::Mat& Img, cv::Mat& Res,
float blockSide)
520 Img.convertTo(I,CV_32FC1);
521 Res=Mat::zeros(Img.rows/blockSide,Img.cols/blockSide,CV_32FC1);
527 for(
int i=0;i<Img.rows-blockSide;i+=blockSide)
529 for (
int j=0;j<Img.cols-blockSide;j+=blockSide)
531 patch=I(
Range(i,i+blockSide+1),
Range(j,j+blockSide+1));
532 cv::meanStdDev(patch,m,s);
535 Res.at<
float>(i/blockSide,j/blockSide)=m[0];
538 Res.at<
float>(i/blockSide,j/blockSide)=0;
543 cv::resize(I,smallImg,Res.size());
545 cv::threshold(Res,inpaintmask,0.02,1.0,cv::THRESH_BINARY);
548 smallImg.convertTo(smallImg,CV_8UC1,255);
550 inpaintmask.convertTo(inpaintmask,CV_8UC1);
551 cv::inpaint(smallImg, inpaintmask, inpainted, 5, cv::INPAINT_TELEA);
553 cv::resize(inpainted,Res,Img.size());
554 Res.convertTo(Res,CV_32FC1,1.0/255.0);