I've used the cv::merge()
function at the end of the following code, but it throws an unhandled exception when the compiler reaches to the cv::merge()
function call.
I've tried both cv::Mat[]
array and vector of cv::Mat
as inputs, but it is still throws the C++ exception.
The purpose of the code is to extract the red channel of an underwater image, and apply some new values in order to enhance color distribution according to equation 8 of this reference (Color Correction Based on CFA and Enhancement Based on Retinex With Dense Pixels for Underwater Images).
It only works with cv::merge(planes, 1, image2)
; which returns one page of planes
in image2
. It must merge three planes in planes
into image2
to give a color image not a gray.
#include <iostream>
#include <opencv2/core/core.hpp>
#include <opencv2/highgui/highgui.hpp>
#include <opencv2/imgproc/imgproc.hpp>
using namespace cv;
using namespace std;
int main()
{
//read an image
Mat image = imread("9554.png", 1);
//check for existence of data
if (!image.data)
{ printf("no image data.
"); return -1; }
//planes is a vector for holding rgb channels separately
//std::vector<Mat> planes;
Mat planes[3];
//split the image into channels
//planes[2] is the red channel
split(image, planes);
// converting planes from uchar to double
planes[0].convertTo(planes[0], CV_64FC1);
planes[1].convertTo(planes[1], CV_64FC1);
planes[2].convertTo(planes[2], CV_64FC1);
// defining coefficients of green and blue channel for blending
double a = 0.05, b = 0.95;
//sum_im stores pixelwise sum of Red, Green and Blue planes
Mat imBlendNormal_B_G, sum_im;
//converting to double
imBlendNormal_B_G.convertTo(imBlendNormal_B_G, CV_64FC1);
sum_im.convertTo(sum_im, CV_64FC1);
//blending green and blue planes with a and b coefficients
// and 0.0 offset(or gamma)
addWeighted(planes[1], a, planes[0], b, 0.0, imBlendNormal_B_G);
// sum of red, green and blue pixel in two addWeighted calls
addWeighted(planes[2], 1.0, planes[1], 1.0, 0.0, sum_im);
addWeighted(planes[0], 1.0, sum_im, 1.0, 0.0, sum_im);
//dividing blended green and blue image to total RGB sum
divide(imBlendNormal_B_G, sum_im, imBlendNormal_B_G);
//defining average kernel 3x3
Mat avg3x3_kernel = (Mat_<double>(3, 3) << 1.0 / 9.0, 1.0 / 9.0, 1.0 / 9.0, 1.0 / 9.0, 1.0 / 9.0, 1.0 / 9.0, 1.0 / 9.0, 1.0 / 9.0, 1.0 / 9.0);
//defining matrices for storing 3x3 average of blue and green planes
Mat blueAverage, greenAverage;
// converting to double type
blueAverage.convertTo(blueAverage, CV_64FC1);
greenAverage.convertTo(greenAverage, CV_64FC1);
// taking 3x3 average
filter2D(planes[0], blueAverage, planes[0].depth(), avg3x3_kernel);
filter2D(planes[1], greenAverage, planes[1].depth(), avg3x3_kernel);
//imBlendAverage_B_G_R: for blending of averaged green and blue channels
Mat imBlendAverage_B_G_R;
//convert to double
imBlendAverage_B_G_R.convertTo(imBlendAverage_B_G_R, CV_64FC1);
//blend averaged green and blue with a and b coeffs
addWeighted(greenAverage, a, blueAverage, b, 0.0, imBlendAverage_B_G_R);
//differentiate red values
addWeighted(imBlendAverage_B_G_R, 1.0, planes[2], -1.0, 0.0, imBlendAverage_B_G_R);
//CompensationTermRed: storing finally compensated red channel intensities
Mat CompensationTermRed;
//coverting to double
CompensationTermRed.convertTo(CompensationTermRed, CV_64FC1);
//multiplication term
CompensationTermRed = imBlendAverage_B_G_R.mul(imBlendNormal_B_G);
//final add term
addWeighted(CompensationTermRed, 1.0, planes[2], 1.0, 0.0, CompensationTermRed);
//convert to uchar
Mat CompensationTermRed_uint8;
CompensationTermRed.convertTo(CompensationTermRed_uint8, CV_8UC1);
//imshow("CompensationTermRed_uint8", CompensationTermRed_uint8);
// assign new red channel values to planes[2]
planes[2] = CompensationTermRed_uint8;
Mat image2 = image;
cv::merge(planes, 1, image2);
image2.convertTo(image2, CV_8UC3);
imshow("merge",image2);
waitKey(0);
return 0;
}
question from:
https://stackoverflow.com/questions/65950255/how-to-use-the-merge-function-properly