以下是展示如何使用org.opencv.imgproc.Imgproc的最佳示例。 我们使用了代码质量辨别算法从开源项目中提取出了最佳的优秀示例。
实例 1
public static LinkedList<DMImage> decodeBufferedImage(BufferedImage image)
{
int cvType = 0;
int bufferedImageType = image.getType();
if (bufferedImageType == BufferedImage.TYPE_INT_BGR || bufferedImageType == BufferedImage.TYPE_3BYTE_BGR)
cvType = Imgproc.COLOR_BGR2HSV;
else if (bufferedImageType == BufferedImage.TYPE_INT_RGB)
cvType = Imgproc.COLOR_RGB2HSV;
else if (bufferedImageType == BufferedImage.TYPE_INT_ARGB)
throw new RuntimeException("Don't like ARGB");
else
{
throw new RuntimeException("Unexpected image type: " + bufferedImageType);
}
// Convert the image to a mat
Mat mat = CVUtils.getMatFromBufferedImage(image);
// Process the mat
Imgproc.cvtColor(mat, mat, cvType);
// Get V from HSV
ArrayList<Mat> list = new ArrayList<Mat>();
org.opencv.core.Core.split(mat, list);
Mat vMat = null;
if (list.size() == 3)
{
vMat = list.get(2);
}
else
{
throw new RuntimeException("Image does not split to 3 channels!");
}
// Adaptive Thresholding
Imgproc.adaptiveThreshold(vMat, vMat, 255, Imgproc.ADAPTIVE_THRESH_MEAN_C, Imgproc.THRESH_BINARY, 9, 10); // 5,
// 25
// Erosion & Dilation
vMat = CVUtils.invertMat(vMat);
Mat kernal = new Mat();
org.opencv.core.Point p = new org.opencv.core.Point(-1, -1);
LinkedList<MatOfPoint> contours = new LinkedList<MatOfPoint>();
LinkedList<MatOfPoint> finalListOfContours = new LinkedList<MatOfPoint>();
for (int i = 0; finalListOfContours.size() == 0; i++) {
Mat dst = vMat.clone();
contours.clear();
Imgproc.dilate(vMat, dst, kernal, p, 1);
Imgproc.erode(dst, vMat, kernal, p, 1);
Imgproc.dilate(vMat, dst, kernal, p, 3 * (i + 1));
Imgproc.erode(dst, vMat, kernal, p, 3 * (i + 1));
Utils.out.println("i:" + i);
// Get contours
Mat hierarchy = new Mat();
Imgproc.findContours(vMat, contours, hierarchy, Imgproc.RETR_EXTERNAL, Imgproc.CHAIN_APPROX_NONE);
// Filter out bad contours
finalListOfContours.clear();
for (int j = 0; j < contours.size(); j++)
{
if (Imgproc.contourArea(contours.get(j)) > (vMat.rows() * vMat.cols() * 0.005))
{// 0.01 is default
finalListOfContours.add(contours.get(j));
}
}
}
contours = null;
LinkedList<DMImage> dmImageList = new LinkedList<DMImage>();
DMImageFactory factory = new DMImageFactory();
JSUtils.resetImageNameCounter();
// Copy across image parts
for (int i = 0; i < finalListOfContours.size(); i++)
{
MatOfPoint thisContour = finalListOfContours.get(i);
Rect bb = Imgproc.boundingRect(thisContour);
// Translate contours
Point topLeft = new Point(bb.x, bb.y);
Mat matChunk = Mat.zeros(bb.size(), image.getType());
MatOfPoint translatedContour = CVUtils.translateMatOfPoints(thisContour, topLeft);
matChunk = mat.submat(bb).clone();
// Draw fill mask
Mat contourMask = new Mat(matChunk.size(), CvType.CV_32SC1);
contourMask.setTo(new org.opencv.core.Scalar(0));
LinkedList<MatOfPoint> listOfPoints = new LinkedList<MatOfPoint>();
listOfPoints.add(translatedContour);
Core.fillPoly(contourMask, listOfPoints, new org.opencv.core.Scalar(-1));
// Add to DMImage list
Imgproc.cvtColor(matChunk, matChunk, Imgproc.COLOR_HSV2BGR);
BufferedImage bufImage = CVUtils.getAlphaBufferedImageFromMat(matChunk, contourMask);
// Add positional info
String name = JSUtils.generateImageName();
DMImage dmImage = factory.new DMImage(name, topLeft, bufImage);
dmImageList.add(dmImage);
}
if (finalListOfContours.size() == 0) {
JOptionPane.showMessageDialog(PaperPanel.getInstance(), "We could not find any characters in this image, please try again!");
PaperPanel.getInstance().webcamCancel();
}
return dmImageList;
}
实例 2
复制
public void setImageContrastCV(double brightness, double contrast, int colormap, boolean inv) {
double diff = getWidth();
double ImWidth = (1 - (contrast / 100.0d)) * diff;
double alpha = 255.0d / ImWidth;
double beta = alpha*(-mMin);
mLevel = ImWidth / 2.0d + (diff - ImWidth) * (1.0d - (brightness / 100.0d));
mMax = ImWidth + (diff - ImWidth) * (1.0d - (brightness / 100.0d));
mMin = (diff - ImWidth) * (1.0d - (brightness / 100.0d));
int i = 0;
int n = (int) diff;
Mat cmap = new Mat(1, n, CvType.CV_32S);
for (i=0; i<n; i++) {
cmap.put(0, i, i);
}
if (inv) {
alpha *= -1.0d;
beta = 255.0d - beta;
}
cmap.convertTo(cmap, CvType.CV_8UC1, alpha, beta);
if (colormap >= 0) {
Contrib.applyColorMap(cmap, cmap, colormap);
//applyColorMap returns a BGR image, but createBitmap expects RGB
//do a conversion to swap blue and red channels:
Imgproc.cvtColor(cmap, cmap, Imgproc.COLOR_RGB2BGR);
}
Bitmap cmapBitmap = Bitmap.createBitmap(n, 1, Bitmap.Config.ARGB_8888);
Utils.matToBitmap(cmap, cmapBitmap, false);
setImageBitmap(cmapBitmap);
}
实例 3
复制
public boolean onTouch(View v, MotionEvent event) {
int cols = mRgba.cols();
int rows = mRgba.rows();
int xOffset = (mOpenCvCameraView.getWidth() - cols) / 2;
int yOffset = (mOpenCvCameraView.getHeight() - rows) / 2;
int x = (int)event.getX() - xOffset;
int y = (int)event.getY() - yOffset;
Log.i(TAG, "Touch image coordinates: (" + x + ", " + y + ")");
if ((x < 0) || (y < 0) || (x > cols) || (y > rows)) return false;
Rect touchedRect = new Rect();
touchedRect.x = (x>4) ? x-4 : 0;
touchedRect.y = (y>4) ? y-4 : 0;
touchedRect.width = (x+4 < cols) ? x + 4 - touchedRect.x : cols - touchedRect.x;
touchedRect.height = (y+4 < rows) ? y + 4 - touchedRect.y : rows - touchedRect.y;
Mat touchedRegionRgba = mRgba.submat(touchedRect);
Mat touchedRegionHsv = new Mat();
Imgproc.cvtColor(touchedRegionRgba, touchedRegionHsv, Imgproc.COLOR_RGB2HSV_FULL);
// Calculate average color of touched region
mBlobColorHsv = Core.sumElems(touchedRegionHsv);
int pointCount = touchedRect.width*touchedRect.height;
for (int i = 0; i < mBlobColorHsv.val.length; i++)
mBlobColorHsv.val[i] /= pointCount;
mBlobColorRgba = converScalarHsv2Rgba(mBlobColorHsv);
Log.i(TAG, "Touched rgba color: (" + mBlobColorRgba.val[0] + ", " + mBlobColorRgba.val[1] +
", " + mBlobColorRgba.val[2] + ", " + mBlobColorRgba.val[3] + ")");
mDetector.setHsvColor(mBlobColorHsv);
Imgproc.resize(mDetector.getSpectrum(), mSpectrum, SPECTRUM_SIZE);
mIsColorSelected = true;
touchedRegionRgba.release();
touchedRegionHsv.release();
return false; // don't need subsequent touch events
}
实例 4
/**
* The main frame processing, filtering, contour detection and eventually object detection
*/
public void processFrame(){
Log.i("","blah " + this.aspectMin);
//Convert frame to HSV image
Imgproc.cvtColor(this.frame, this.hsvMat, Imgproc.COLOR_BGR2HSV);
//Equalize histogram - NOTE: OPTIONAL. MIGHT DAMAGE COLORS!
List<Mat> hsvPlanes = new ArrayList<Mat>();
Core.split(this.hsvMat, hsvPlanes);
Imgproc.equalizeHist(hsvPlanes.get(2), hsvPlanes.get(2));
Core.merge(hsvPlanes, hsvMat);
//Filter by HSV range
Core.inRange(this.hsvMat, this.hsvMinRange , this.hsvMaxRange, this.hsvThreshed);
//Apply morphological filters for noise removal
morphOps(hsvThreshed);
//Find contours
contours = new ArrayList<MatOfPoint>();
this.filteredBlock = this.hsvThreshed.clone();
//Convert threshed image to BGR for possible later color drawing on top of it
Imgproc.cvtColor(this.hsvThreshed, hsvThreshed, Imgproc.COLOR_GRAY2BGR);
Imgproc.findContours(this.filteredBlock, contours, hierarchy, Imgproc.RETR_TREE, Imgproc.CHAIN_APPROX_SIMPLE, new Point(0,0));
//Filter contours by size and properties
double contourArea, maxArea = -1;
MatOfPoint maxContour = null;
for (MatOfPoint contour : contours){
contourArea = Imgproc.contourArea(contour);
boundingBox = Imgproc.boundingRect(contour);
aspectRatio = (double) boundingBox.width/boundingBox.height;
extent = contourArea/(boundingBox.width*boundingBox.height);
if (contourArea > maxArea && contourArea > 200 &&
aspectRatio > this.aspectMin &&
aspectRatio < this.aspectMax &&
extent > this.extentMin && extent < this.extentMax){
maxArea = contourArea;
maxContour = contour;
}
}
//Create Block object to represent the detected object
if (maxContour != null){
this.detectedCube = new Block(maxContour, this.objectWidth);
}
else this.detectedCube = null;
}