diff --git a/include/detect/uv_led_detect_adaptive.cpp b/include/detect/uv_led_detect_adaptive.cpp index 016b81f..fde4abc 100644 --- a/include/detect/uv_led_detect_adaptive.cpp +++ b/include/detect/uv_led_detect_adaptive.cpp @@ -223,7 +223,7 @@ std::vector UVDARLedDetectAdaptive::applyAdaptiveThreshold(const cv:: cv::Mat mask = cv::Mat::zeros(binaryRoi.size(), CV_8UC1); //Get the number of contours - //std::cout << "[UVDARLedDetectAdaptive]: NUMBER OF CONTOURS: " << contours.size() << std::endl; + std::cout << "[UVDARLedDetectAdaptive]: NUMBER OF CONTOURS: " << contours.size() << std::endl; if (contours.size() >= 4){ //Return empty roiDetectedPoints @@ -237,11 +237,11 @@ std::vector UVDARLedDetectAdaptive::applyAdaptiveThreshold(const cv:: double area = cv::contourArea(contour); // Filter based on area if (area < MAX_AREA) { - //std::cout << "[UVDARLedDetectAdaptive]: IN AREA: " << area << std::endl; + std::cout << "[UVDARLedDetectAdaptive]: IN AREA: " << area << std::endl; // Draw the contour on the mask cv::drawContours(mask, std::vector>{contour}, -1, cv::Scalar(255), cv::FILLED); }else{ - //std::cout << "[UVDARLedDetectAdaptive]: OUT OF AREA: " << area << std::endl; + std::cout << "[UVDARLedDetectAdaptive]: OUT OF AREA: " << area << std::endl; } } @@ -250,10 +250,10 @@ std::vector UVDARLedDetectAdaptive::applyAdaptiveThreshold(const cv:: // Detect points within this ROI std::vector roiDetectedPoints = detectPointsFromRoi(binaryRoi, roi); - //std::cout << "[UVDARLedDetectAdaptive]: ROI DETECTED POINTS: " << roiDetectedPoints.size() << std::endl; + std::cout << "[UVDARLedDetectAdaptive]: ROI DETECTED POINTS: " << roiDetectedPoints.size() << std::endl; - if (roiDetectedPoints.size() > 3){ - //std::cout << "[UVDARLedDetectAdaptive]: NOISY ROI: " << roiDetectedPoints.size() << std::endl; + if (roiDetectedPoints.size() > 5){ + std::cout << "[UVDARLedDetectAdaptive]: NOISY ROI: " << roiDetectedPoints.size() << std::endl; //saveRoiImage(binaryRoi, point, roiIndex_++, thresholdValue, -1.0); numberDetectedPoints.push_back(roiDetectedPoints.size()); @@ -811,4 +811,5 @@ ROIData UVDARLedDetectAdaptive::prepareAdaptiveDataForLogging() { return adaptiveData; } + } // namespace uvdar \ No newline at end of file diff --git a/src/detector.cpp b/src/detector.cpp index 4ab35c7..db8f825 100644 --- a/src/detector.cpp +++ b/src/detector.cpp @@ -407,7 +407,6 @@ class UVDARDetector : public nodelet::Nodelet{ * * */ - ROS_INFO_STREAM("[UVDARDetector]: Processing image with standard thresholding."); if ( ! (uvdf_->processImage( image->image, @@ -446,8 +445,6 @@ class UVDARDetector : public nodelet::Nodelet{ */ - ROS_INFO_STREAM("[UVDARDetector]: Processing image with tracking points only."); - if( ! (uvda_[image_index]->processImageAdaptive( image->image, trackingPoints, @@ -470,7 +467,15 @@ class UVDARDetector : public nodelet::Nodelet{ /* publishAdaptive //{ */ void publishAdaptive(const cv_bridge::CvImageConstPtr& image, int image_index, const std::vector& adaptive_detected_points) { - ROS_INFO_STREAM("[UVDARDetector]: Publishing adaptive points."); + + ROS_INFO_STREAM("[UVDARDetector]: Publishing adaptive points. In camera: " << image_index); + + if (!adaptive_detected_points.empty()) { + //Current camera + ROS_INFO_STREAM("[UVDARDetector]: Detected adaptive points in camera " << image_index << ": " << adaptive_detected_points.size()); + } else { + ROS_INFO_STREAM("[UVDARDetector]: No detected adaptive points. In camera: " << image_index); + } uvdar_core::ImagePointsWithFloatStamped msg_detected; msg_detected.stamp = image->header.stamp; msg_detected.image_width = image->image.cols; @@ -499,13 +504,13 @@ class UVDARDetector : public nodelet::Nodelet{ /* publishStandard //{ */ void publishStandard(const cv_bridge::CvImageConstPtr& image, int image_index, const std::vector& detected_points) { - ROS_INFO_STREAM("[UVDARDetector]: Publishing standard points."); - ROS_INFO_STREAM("[UVDARDetector]: This is the image index: " << image_index); + + ROS_INFO_STREAM("[UVDARDetector]: Detected points in camera " << image_index << ": " << detected_points.size()); if (!detected_points.empty()) { ROS_INFO_STREAM("[UVDARDetector]: Detected points: " << detected_points.size()); } else { - ROS_INFO_STREAM("[UVDARDetector]: No detected points."); + ROS_INFO_STREAM("[UVDARDetector]: No detected points. In camera: " << image_index); } /* // Print the points @@ -579,9 +584,12 @@ class UVDARDetector : public nodelet::Nodelet{ adaptive_detected_points_[image_index].clear(); //TODO: Check if its worth it, this to be able to detect new points that where not currently detected + ROS_INFO_STREAM("[UVDARDetector]: Processing image with standard thresholding. In camera: " << image_index); + processStandard(image, image_index); //ROS_INFO_STREAM("[UVDARDetector]: Processing image with tracking points only."); + ROS_INFO_STREAM("[UVDARDetector]: Processing image with adaptive thresholding. In camera: " << image_index); processAdaptive(image, image_index, trackingPointsPerCamera[image_index]); } @@ -643,6 +651,7 @@ class UVDARDetector : public nodelet::Nodelet{ } if (_adaptive_threshold_ && adaptive_detected_points_[image_index].size() > 0){ + publishAdaptive(image, image_index, adaptive_detected_points_[image_index]); } else{