Skip to content

Commit

Permalink
Changes for better tuning
Browse files Browse the repository at this point in the history
  • Loading branch information
MarlonRiv committed Nov 16, 2024
1 parent cdba7a1 commit 2222a2c
Show file tree
Hide file tree
Showing 16 changed files with 152 additions and 95 deletions.
76 changes: 50 additions & 26 deletions include/detect/uv_led_detect_adaptive.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -27,14 +27,21 @@ struct PointComparator
namespace uvdar
{

UVDARLedDetectAdaptive::UVDARLedDetectAdaptive(int neighborhoodSize, double point_similarity_threshold, std::string adaptive_method, bool adaptive_debug, int contours_size_limit, int contour_max_size_limit, int roi_detected_points_limit)
UVDARLedDetectAdaptive::UVDARLedDetectAdaptive(int neighborhoodSize, double point_similarity_threshold, std::string adaptive_method, bool adaptive_debug,
int contours_size_limit, int contour_max_size_limit, int roi_detected_points_limit, double sigma_x,
double sigma_y, double grayscale_roi_weight, double sharped_roi_weight)
: neighborhoodSize_(neighborhoodSize),
point_similarity_threshold_(point_similarity_threshold),
adaptive_method_(adaptive_method),
adaptive_debug_(adaptive_debug),
contours_size_limit_(contours_size_limit),
contour_max_size_limit_(contour_max_size_limit),
roi_detected_points_limit_(roi_detected_points_limit)
roi_detected_points_limit_(roi_detected_points_limit),
sigmaX_(sigma_x),
sigmaY_(sigma_y),
grayscale_ROI_weight_(grayscale_roi_weight),
sharped_ROI_weight_(sharped_roi_weight)


{
}
Expand Down Expand Up @@ -82,6 +89,10 @@ namespace uvdar
return false;
}

// To include the standardPoints
/* std::vector<cv::Point> combinedPoints; */
// Add all adaptive points
/* combinedPoints.insert(combinedPoints.end(), trackingPoints.begin(), trackingPoints.end()); */

std::vector<cv::Rect> rois;
for (const auto& point : trackingPoints)
Expand Down Expand Up @@ -151,16 +162,14 @@ namespace uvdar

// Apply Gaussian blur to the ROI
cv::Mat blurred;
double sigmaX = 6.0;
double sigmaY = 6.0;
cv::GaussianBlur(grayImage, blurred, cv::Size(0, 0), sigmaX, sigmaY);
cv::GaussianBlur(grayImage, blurred, cv::Size(0, 0), sigmaX_, sigmaY_);

// Create unsharp mask by subtracting the blurred version from the original image
cv::Mat unsharpMask = grayImage - blurred;

// Apply the unsharp mask to the original image to enhance edges
cv::Mat enhancedImage;
cv::addWeighted(grayImage, 0.25, unsharpMask, 1.75, 0, enhancedImage);
cv::addWeighted(grayImage, grayscale_ROI_weight_, unsharpMask, sharped_ROI_weight_, 0, enhancedImage);

// BinaryRoi to save after applying the threshold
cv::Mat binaryRoi;
Expand Down Expand Up @@ -188,6 +197,10 @@ namespace uvdar
// Create a mask to draw the filtered contours
cv::Mat mask = cv::Mat::zeros(binaryRoi.size(), CV_8UC1);

lastProcessedROIs_.push_back(roi); // Store the ROI for visualization
// Store the binary ROI (For debugging/visualization)
lastProcessedBinaryROIs_.push_back(binaryRoi);

if (adaptive_debug_)
{
// Print the contours size
Expand All @@ -197,8 +210,16 @@ namespace uvdar
// TODO find proper value for noisy ROI
if (static_cast<int>(contours.size()) >= contours_size_limit_)
{
if (adaptive_debug_)
{
std::cout << "[UVDARLedDetectAdaptive]: NUMBER OF CONTOURS OUTSIDE OF THE LIMIT: " << contours.size() << std::endl;
}
// Return empty roiDetectedPoints
std::vector<cv::Point> empty_roiDetectedPoints = {};
thresholdValues_.push_back(thresholdValue_);
klDivergences_.push_back(0.0);
numberDetectedPoints_.push_back(0);
validRois_.push_back(0);
return empty_roiDetectedPoints;
}

Expand Down Expand Up @@ -236,9 +257,16 @@ namespace uvdar
std::cout << "[UVDARLedDetectAdaptive]: ROI NUMBER OF DETECTED POINTS: " << roiDetectedPoints.size() << std::endl;
}

// TODO find the proper value of the detected points
// 5
numberDetectedPoints_.push_back(roiDetectedPoints.size());
thresholdValues_.push_back(thresholdValue_);

if (adaptive_method_ == "Otsu" || adaptive_method_ == "otsu")
{
klDivergences_.push_back(0.0);
} else
{
klDivergences_.push_back(minKLDivergence_);
}

if (static_cast<int>(roiDetectedPoints.size()) > roi_detected_points_limit_)
{
Expand All @@ -257,32 +285,23 @@ namespace uvdar
{
klDivergences_.push_back(minKLDivergence_);
}
validRois_.push_back(0);

validRois_.push_back(0);
// Return empty roiDetectedPoints
std::vector<cv::Point> empty_roiDetectedPoints = {};
return empty_roiDetectedPoints;
// Clear the lastProcessedROIs_ and lastProcessedBinaryROIs_ vectors
lastProcessedROIs_.clear();
lastProcessedBinaryROIs_.clear();
/* lastProcessedROIs_.clear(); */
/* lastProcessedBinaryROIs_.clear(); */

return empty_roiDetectedPoints;
} else
{

// This is the reason it seems the visualization is blinking, getting some noisy rois in between and not being used
lastProcessedROIs_.push_back(roi); // Store the ROI for visualization
// Store the binary ROI (For debugging/visualization)
lastProcessedBinaryROIs_.push_back(binaryRoi);
/* // This is the reason it seems the visualization is blinking, getting some noisy rois in between and not being used */
/* lastProcessedROIs_.push_back(roi); // Store the ROI for visualization */
/* // Store the binary ROI (For debugging/visualization) */
/* lastProcessedBinaryROIs_.push_back(binaryRoi); */


numberDetectedPoints_.push_back(roiDetectedPoints.size());
thresholdValues_.push_back(thresholdValue_);
if (adaptive_method_ == "Otsu" || adaptive_method_ == "otsu")
{
klDivergences_.push_back(0.0);
} else
{
klDivergences_.push_back(minKLDivergence_);
}
validRois_.push_back(1);

return roiDetectedPoints;
Expand Down Expand Up @@ -495,6 +514,11 @@ namespace uvdar
}
if (!isOverlap)
{
if (adaptive_debug_)
{

std::cout << "[UVDARLedDetectAdaptive]: ADDING STANDARD POINT " << std::endl;
}
combinedPoints.push_back(standardPoint);
}
}
Expand Down
14 changes: 10 additions & 4 deletions include/detect/uv_led_detect_adaptive.h
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,8 @@ namespace uvdar
{
public:
UVDARLedDetectAdaptive(int neighborhoodSize = 25, double point_similarity_threshold = 5.0, std::string adaptive_method = "Otsu",
bool adaptive_debug = false, int contours_size_limit = 4, int contour_max_size_limit = 15, int roi_detected_points_limit = 5);
bool adaptive_debug = false, int contours_size_limit = 4, int contour_max_size_limit = 15, int roi_detected_points_limit = 5,
double sigma_x = 6.0, double sigma_y = 6.0, double grayscale_roi_weight = 0.25, double sharped_roi_weight = 1.75);
~UVDARLedDetectAdaptive();

bool processImageAdaptive(const cv::Mat& inputImage, const std::vector<cv::Point>& trackingPoints, std::vector<cv::Point>& detectedPoints,
Expand Down Expand Up @@ -88,9 +89,14 @@ namespace uvdar
double point_similarity_threshold_;
std::string adaptive_method_;
bool adaptive_debug_;
int contours_size_limit_;
int contour_max_size_limit_;
int roi_detected_points_limit_;
int contours_size_limit_;
int contour_max_size_limit_;
int roi_detected_points_limit_;
double sigmaX_;
double sigmaY_;
double grayscale_ROI_weight_;
double sharped_ROI_weight_;

int roiIndex_;
int MAX_SIZE = 50;
int MIN_SIZE = 5;
Expand Down
12 changes: 10 additions & 2 deletions launch/rosbag.launch
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,10 @@
<arg name="contour_size_limit" default="4"/>
<arg name="contour_max_size_limit" default="15"/>
<arg name="roi_detected_points_limit" default="5"/>
<arg name="sigma_x" default="6.0"/>
<arg name="sigma_y" default="6.0"/>
<arg name="grayscale_roi_weight" default="0.25"/>
<arg name="sharped_roi_weight" default="1.75"/>

<!-- Blink processor settings: -->
<arg name="blink_process_rate" default="10"/>
Expand Down Expand Up @@ -134,16 +138,20 @@
<!-- <param name="gui" type="bool" value="true"/> -->
<param name="gui" type="bool" value="false"/>
<!-- <param name="publish_visualization" type="bool" value="true"/> -->
<param name="publish_visualization" type="bool" value="false"/>
<param name="publish_visualization" type="bool" value="true"/>
<param name="publish" type="bool" value="$(arg publish)"/>
<param name="justReport" type="bool" value="true"/>
<param name="threshold" type="int" value="$(arg threshold)"/>
<param name="adaptive_threshold" type="bool" value="true"/>
<param name="adaptive_method" type="string" value="otsu"/>
<param name="adaptive_debug" type="bool" value="false"/>
<param name="adaptive_debug" type="bool" value="true"/>
<param name="contour_size_limit" type="int" value="$(arg contour_size_limit)"/>
<param name="contour_max_size_limit" type="int" value="$(arg contour_max_size_limit)"/>
<param name="roi_detected_points_limit" type="int" value="$(arg roi_detected_points_limit)"/>
<param name="sigma_x" type="double" value="$(arg sigma_x)"/>
<param name="sigma_y" type="double" value="$(arg sigma_y)"/>
<param name="grayscale_roi_weight" type="double" value="$(arg grayscale_roi_weight)"/>
<param name="sharped_roi_weight" type="double" value="$(arg sharped_roi_weight)"/>

<!-- <rosparam param="camera_topics"> ["uav6/uvdar_bluefox/left/image_raw"] </rosparam> -->
<rosparam param="camera_topics"> ["camera_left"] </rosparam>
Expand Down
2 changes: 1 addition & 1 deletion scripts/.tmuxinator.yml
Original file line number Diff line number Diff line change
Expand Up @@ -35,7 +35,7 @@ windows:
# - waitForRos; sleep 3; rosbag play --clock ~/experiments/20_04/two_tx/two_tx/rx/occlusion_1/rx.bag
# - waitForRos; sleep 3; rosbag play --clock ~/experiments/20_04/two_tx/two_tx/rx/occlusion_2/rx.bag
# - waitForRos; rosbag play --clock ~/experiments/20_04/tumult/tumult/rx/rx.bag
- waitForRos; sleep 3; rosbag play --clock ~/bag_files/datasets_08_2024/uav36_rosbags_rx/marlon/69_2024_08_20_14_12_22variant1/_2024-08-20-14-14-09.bag
- waitForRos; sleep 3; rosbag play --clock ~/bag_files/datasets_08_2024/uav36_rosbags_rx/marlon/70_2024_08_20_14_22_18variant2/_2024-08-20-14-23-37.bag
- blinkers-seen:
layout: even-vertical
panes:
Expand Down
2 changes: 1 addition & 1 deletion scripts/.~lock.threshold_analysis_summary_adaptive.csv#
Original file line number Diff line number Diff line change
@@ -1 +1 @@
,rivermar,rivermar,03.11.2024 18:39,file:///home/rivermar/.config/libreoffice/4;
,rivermar,rivermar,16.11.2024 14:29,file:///home/rivermar/.config/libreoffice/4;
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
,rivermar,rivermar,16.11.2024 14:30,file:///home/rivermar/.config/libreoffice/4;
44 changes: 25 additions & 19 deletions scripts/Processing_evaluation_distance.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -657,15 +657,15 @@
},
{
"cell_type": "code",
"execution_count": 2,
"execution_count": 5,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"Optimal Threshold based on Combined Error Rate: 40\n",
"Optimal Threshold based on Max Error Rate: 40\n"
"Optimal Threshold based on Combined Error Rate: valid_4_5_15\n",
"Optimal Threshold based on Max Error Rate: valid_4_5_15\n"
]
}
],
Expand All @@ -677,23 +677,29 @@
"\n",
"# Define the file paths and thresholds\n",
"file_paths = [\n",
" '/home/rivermar/Desktop/MRS_Master_Project/paper/processed_csv_automation_dynamic/motiv_dynamic_th40_processed.csv',\n",
" '/home/rivermar/Desktop/MRS_Master_Project/paper/processed_csv_automation_dynamic/motiv_dynamic_th60_processed.csv',\n",
" '/home/rivermar/Desktop/MRS_Master_Project/paper/processed_csv_automation_dynamic/motiv_dynamic_th80_processed.csv',\n",
" '/home/rivermar/Desktop/MRS_Master_Project/paper/processed_csv_automation_dynamic/motiv_dynamic_th100_processed.csv',\n",
" '/home/rivermar/Desktop/MRS_Master_Project/paper/processed_csv_automation_dynamic/motiv_dynamic_th120_processed.csv',\n",
" '/home/rivermar/Desktop/MRS_Master_Project/paper/processed_csv_automation_dynamic/motiv_dynamic_th160_processed.csv',\n",
" '/home/rivermar/Desktop/MRS_Master_Project/paper/processed_csv_automation_dynamic/motiv_dynamic_th200_processed.csv'\n",
" '/home/rivermar/Desktop/MRS_Master_Project/paper/processed_csv_automation_adaptive/variant01_adaptive_otsu_invalid_processed.csv',\n",
" '/home/rivermar/Desktop/MRS_Master_Project/paper/processed_csv_automation_adaptive/variant01_adaptive_otsu_valid_processed.csv',\n",
" '/home/rivermar/Desktop/MRS_Master_Project/paper/processed_csv_automation_adaptive/variant01_adaptive_otsu_valid_c4_d5_mxs50_processed.csv',\n",
" '/home/rivermar/Desktop/MRS_Master_Project/paper/processed_csv_automation_adaptive/variant01_adaptive_otsu_c4_d4_mxs15_processed.csv',\n",
" '/home/rivermar/Desktop/MRS_Master_Project/paper/processed_csv_automation_adaptive/variant01_adaptive_otsu_c4_d5_mxs15_processed.csv',\n",
" '/home/rivermar/Desktop/MRS_Master_Project/paper/processed_csv_automation_adaptive/variant01_adaptive_otsu_c5_d5_mxs15_processed.csv',\n",
" '/home/rivermar/Desktop/MRS_Master_Project/paper/processed_csv_automation_adaptive/variant01_adaptive_otsu_processed.csv',\n",
" '/home/rivermar/Desktop/MRS_Master_Project/paper/processed_csv_automation_adaptive/variant01_adaptive_kl_processed.csv',\n",
" '/home/rivermar/Desktop/MRS_Master_Project/paper/processed_csv_automation_adaptive/variant01_adaptive_otsu_c4_d5_mxs15_invalids_processed.csv',\n",
"\n",
"\n",
"\n",
"]\n",
"\n",
"\n",
"\n",
"\n",
"thresholds = [40, 60, 80, 100, 120, 160, 200]\n",
"keys = ['invalid', 'valid', 'valid_4_5_50', 'valid_4_4_15', 'valid_4_5_15', 'valid_5_5_,15','otsu', 'kl','valid_4_5_15_invalids']\n",
"\n",
"# Initialize a dataframe to hold the summarized data\n",
"summary_data = {\n",
" 'Threshold': [],\n",
" 'Key': [],\n",
" 'Total Errors Signal 1': [],\n",
" 'Total Errors Signal 2': [],\n",
" 'Total Instances Signal 1': [],\n",
Expand All @@ -709,7 +715,7 @@
"}\n",
"\n",
"# Loop through the files and calculate the required metrics\n",
"for path, threshold in zip(file_paths, thresholds):\n",
"for path, key in zip(file_paths, keys):\n",
" # Load the data\n",
" df = pd.read_csv(path)\n",
" \n",
Expand All @@ -731,7 +737,7 @@
" fully_present_signal_2 = (df['num_errors_signal_2'] == 0).sum() / total_entries * 100\n",
"\n",
" # Append the data to the summary\n",
" summary_data['Threshold'].append(threshold)\n",
" summary_data['Key'].append(key)\n",
" summary_data['Total Errors Signal 1'].append(total_errors_signal_1)\n",
" summary_data['Total Errors Signal 2'].append(total_errors_signal_2)\n",
" summary_data['Total Instances Signal 1'].append(total_signal_1)\n",
Expand All @@ -742,7 +748,7 @@
" summary_data['Percentage Fully Present Signal 2'].append(fully_present_signal_2)\n",
"\n",
"# After looping, calculate performance differences and optimization metrics\n",
"for i in range(len(summary_data['Threshold'])):\n",
"for i in range(len(summary_data['Key'])):\n",
" error_rate_diff = summary_data['Error Rate Signal 2'][i] - summary_data['Error Rate Signal 1'][i]\n",
" relative_error_diff = (error_rate_diff / summary_data['Error Rate Signal 1'][i]) * 100 if summary_data['Error Rate Signal 1'][i] != 0 else 0\n",
" combined_error_rate = summary_data['Error Rate Signal 1'][i] + summary_data['Error Rate Signal 2'][i]\n",
Expand All @@ -757,11 +763,11 @@
"df_summary = pd.DataFrame(summary_data)\n",
"\n",
"# Find the optimal threshold based on combined error rate and max error rate\n",
"optimal_combined_threshold = df_summary.loc[df_summary['Combined Error Rate'].idxmin(), 'Threshold']\n",
"optimal_max_threshold = df_summary.loc[df_summary['Max Error Rate'].idxmin(), 'Threshold']\n",
"optimal_combined_threshold = df_summary.loc[df_summary['Combined Error Rate'].idxmin(), 'Key']\n",
"optimal_max_threshold = df_summary.loc[df_summary['Max Error Rate'].idxmin(), 'Key']\n",
"\n",
"# Save the summary to a CSV file\n",
"df_summary.to_csv('threshold_analysis_summary_dynamic.csv', index=False)\n",
"df_summary.to_csv('threshold_analysis_summary_adaptive.csv', index=False)\n",
"\n",
"# Output the optimal thresholds\n",
"print(f'Optimal Threshold based on Combined Error Rate: {optimal_combined_threshold}')\n",
Expand Down Expand Up @@ -5924,7 +5930,7 @@
},
{
"cell_type": "code",
"execution_count": 139,
"execution_count": null,
"metadata": {},
"outputs": [
{
Expand Down Expand Up @@ -5958,7 +5964,7 @@
"## Real world\n",
"\n",
"# Calculating the overall presence of each signal in both datasets\n",
"overall_presence_adaptive = data_otsu_adaptive[['signal_0','signal_1', 'signal_3']].mean()\n",
"overall_presence_adaptive = 3[['signal_0','signal_1', 'signal_3']].mean()\n",
"overall_presence_standard = data_standard[['signal_0','signal_1', 'signal_3']].mean()\n",
"overall_presence_adaptive_kl = data_kl_adaptive[['signal_0','signal_1', 'signal_3']].mean()\n",
"\n",
Expand Down
Loading

0 comments on commit 2222a2c

Please sign in to comment.