-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathcalibration.py
1894 lines (1765 loc) · 96.3 KB
/
calibration.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
764
765
766
767
768
769
770
771
772
773
774
775
776
777
778
779
780
781
782
783
784
785
786
787
788
789
790
791
792
793
794
795
796
797
798
799
800
801
802
803
804
805
806
807
808
809
810
811
812
813
814
815
816
817
818
819
820
821
822
823
824
825
826
827
828
829
830
831
832
833
834
835
836
837
838
839
840
841
842
843
844
845
846
847
848
849
850
851
852
853
854
855
856
857
858
859
860
861
862
863
864
865
866
867
868
869
870
871
872
873
874
875
876
877
878
879
880
881
882
883
884
885
886
887
888
889
890
891
892
893
894
895
896
897
898
899
900
901
902
903
904
905
906
907
908
909
910
911
912
913
914
915
916
917
918
919
920
921
922
923
924
925
926
927
928
929
930
931
932
933
934
935
936
937
938
939
940
941
942
943
944
945
946
947
948
949
950
951
952
953
954
955
956
957
958
959
960
961
962
963
964
965
966
967
968
969
970
971
972
973
974
975
976
977
978
979
980
981
982
983
984
985
986
987
988
989
990
991
992
993
994
995
996
997
998
999
1000
#!/usr/bin/env python
# coding: utf-8
import numpy as np
import cupy as cp
import nstep_fringe as nstep
import nstep_fringe_cp as nstep_cp
import cv2
import os
import glob
import pandas as pd
from tqdm import tqdm
import matplotlib.pyplot as plt
from matplotlib.ticker import MaxNLocator
import seaborn as sns
import reconstruction as rc
from plyfile import PlyData, PlyElement
from scipy.optimize import leastsq
from scipy.spatial import distance
import shutil
EPSILON = -0.5
TAU = 5.5
#TODO: Fix calibration reconstruction
class Calibration:
"""
Calibration class is used to calibrate camera and projector setting. User can choose between phase coded , multi frequency and multi wavelength temporal unwrapping.
After calibration the camera and projector parameters are saved as npz file at the given calibration image path.
"""
def __init__(self,
proj_width,
proj_height,
cam_width,
cam_height,
mask_limit,
type_unwrap,
N_list,
pitch_list,
board_gridrows,
board_gridcolumns,
dist_betw_circle,
bobdetect_areamin,
bobdetect_convexity,
kernel_v,
kernel_h,
path,
data_type,
processing,
dark_bias_path):
"""
Parameters
----------
proj_width: int.
Width of projector.
proj_height: int.
Height of projector.
cam_width: int.
Width of camera.
cam_height: int.
Height of camera.
mask_limit: float.
Modulation limit for applying mask to captured images.
type_unwrap: string.
Type of temporal unwrapping to be applied.
'multifreq' = multi frequency unwrapping method
'multiwave' = multi wavelength unwrapping method.
N_list: list.
The number of steps in phase shifting algorithm.
If phase coded unwrapping method is used this is a single element list.
For other methods corresponding to each pitch one element in the list.
pitch_list: list.
Array of number of pixels per fringe period.
board_gridrows: int.
Number of rows in the asymmetric circle pattern.
board_gridcolumns: int.
Number of columns in the asymmetric circle pattern.
dist_betw_circle: float.
Distance between circle centers.
kernel_v: int.
Kernel for vertical filter
kernel_h: int.
Kernel for horizontal filter.
path: str.
Path to read captured calibration images and save calibration results.
data_type:str
Calibration image data can be either .tiff or .npy.
processing:str.
Type of data processing. Use 'cpu' for desktop computation and 'gpu' for gpu.
"""
self.proj_width = proj_width
self.proj_height = proj_height
self.cam_width = cam_width
self.cam_height = cam_height
self.limit = mask_limit
self.type_unwrap = type_unwrap
self.N = N_list
self.pitch = pitch_list
self.path = path
self.board_gridrows = board_gridrows
self.board_gridcolumns = board_gridcolumns
self.dist_betw_circle = dist_betw_circle
self.bobdetect_areamin = bobdetect_areamin
self.bobdetect_convexity = bobdetect_convexity
self.kernel_v = kernel_v
self.kernel_h = kernel_h
if (self.type_unwrap == 'multifreq') or (self.type_unwrap == 'multiwave'):
self.phase_st = 0
else:
print('ERROR: Invalid type_unwrap')
return
if not os.path.exists(self.path):
print('ERROR: %s does not exist' % self.path)
if (data_type != 'tiff') and (data_type != 'npy'):
print('ERROR: Invalid data type. Data type should be \'tiff\' or \'npy\'')
else:
self.data_type = data_type
if (processing != 'cpu') and (processing != 'gpu'):
print('ERROR: Invalid processing type. Processing type should be \'cpu\' or \'gpu\'')
else:
self.processing = processing
if not os.path.exists(dark_bias_path):
print('ERROR:Path for dark bias %s does not exist' % self.calib_path)
else:
self.dark_bias = np.load(dark_bias_path)
def calib(self, fx, fy, model=None):
"""
Function to calibrate camera and projector and save npz file of calibration parameter based on user choice
of temporal phase unwrapping.
Returns
-------
unwrapv_lst: list.
List of unwrapped phase maps obtained from horizontally varying intensity patterns.
unwraph_lst: list.
List of unwrapped phase maps obtained from vertically varying intensity patterns..
white_lst: list.
List of true images for each calibration pose.
mod_lst: list.
List of modulation intensity images for each calibration pose for intensity
varying both horizontally and vertically.
proj_img_lst: list.
List of projector images.
cam_objpts: list.
List of world coordinates used for camera calibration for each pose.
cam_imgpts: list.
List of circle centers grid for each calibration pose.
proj_imgpts: list.
List of circle center grid coordinates for each pose of projector calibration.
euler_angles : np.array.
Array of roll,pitch and yaw angles between camera and projector in degrees.
cam_mean_error: list.
List of camera mean error per calibration pose.
cam_delta: np.ndarray.
Array of camera re projection error.
cam_df1: pandas dataframe.
Dataframe of camera absolute error in x and y directions of all poses.
proj_mean_error: list.
List of projector mean error per calibration pose.
proj_delta: np.ndarray.
Array of projector re projection error.
proj_df1: pandas dataframe.
Dataframe of projector absolute error in x and y directions of all poses.
"""
objp = self.world_points()
if self.type_unwrap == 'multiwave':
unwrapv_lst, unwraph_lst, white_lst, mod_lst, wrapped_phase_lst, mask_lst = self.projcam_calib_img_multiwave()
else:
if self.type_unwrap != 'multifreq':
print("phase unwrapping type is not recognized, use 'multifreq'")
unwrapv_lst, unwraph_lst, white_lst, mod_lst, wrapped_phase_lst, maskv_lst, maskh_lst, sigma_sqphi_lst = self.projcam_calib_img_multifreq(model)
unwrapv_lst = [nstep.recover_image(u, maskv_lst[i], self.cam_height, self.cam_width) for i,u in enumerate(unwrapv_lst)]
unwraph_lst = [nstep.recover_image(u, maskh_lst[i], self.cam_height, self.cam_width) for i,u in enumerate(unwraph_lst)]
# Projector images
proj_img_lst = self.projector_img(unwrapv_lst, unwraph_lst, white_lst, fx, fy)
# Camera calibration
camr_error, cam_objpts, cam_imgpts, cam_mtx, cam_dist, cam_rvecs, cam_tvecs = self.camera_calib(objp, white_lst)
# Projector calibration
proj_ret, proj_imgpts, proj_mtx, proj_dist, proj_rvecs, proj_tvecs = self.proj_calib(cam_objpts,
cam_imgpts,
unwrapv_lst,
unwraph_lst,
proj_img_lst)
# Camera calibration error analysis
cam_mean_error, cam_delta = self.intrinsic_error_analysis(cam_objpts,
cam_imgpts,
cam_mtx,
cam_dist,
cam_rvecs,
cam_tvecs)
# Projector calibration error analysis
proj_mean_error, proj_delta = self.intrinsic_error_analysis(cam_objpts,
proj_imgpts,
proj_mtx,
proj_dist,
proj_rvecs,
proj_tvecs)
# Stereo calibration
criteria = (cv2.TERM_CRITERIA_EPS + cv2.TERM_CRITERIA_MAX_ITER, 40, 0.0001)
stereocalibration_flags = cv2.CALIB_FIX_INTRINSIC+cv2.CALIB_ZERO_TANGENT_DIST+cv2.CALIB_FIX_K3+cv2.CALIB_FIX_K4+cv2.CALIB_FIX_K5+cv2.CALIB_FIX_K6
st_retu, st_cam_mtx, st_cam_dist, st_proj_mtx, st_proj_dist, st_cam_proj_rmat, st_cam_proj_tvec, E, F = cv2.stereoCalibrate(cam_objpts,
cam_imgpts,
proj_imgpts,
cam_mtx,
cam_dist,
proj_mtx,
proj_dist,
white_lst[0].shape[::-1],
flags=stereocalibration_flags,
criteria=criteria)
project_mat = np.hstack((st_cam_proj_rmat, st_cam_proj_tvec))
_, _, _, _, _, _, euler_angles = cv2.decomposeProjectionMatrix(project_mat)
proj_h_mtx = np.dot(st_proj_mtx, np.hstack((st_cam_proj_rmat, st_cam_proj_tvec)))
cam_h_mtx = np.dot(st_cam_mtx, np.hstack((np.identity(3), np.zeros((3, 1)))))
u = np.arange(0, self.cam_width)
v = np.arange(0, self.cam_height)
uc_grid, vc_grid = np.meshgrid(u, v)
cordinates = np.stack((vc_grid.ravel(),uc_grid.ravel()),axis=1).astype("float64")
uv = cv2.undistortPoints(cordinates, st_cam_mtx, st_cam_dist, None, cam_mtx).reshape((self.cam_width * self.cam_height,2))
uc = uv[:,1]
vc = uv[:,0]
uc = uc.reshape(self.cam_height, self.cam_width)
vc = vc.reshape(self.cam_height, self.cam_width)
np.save(os.path.join(self.path,"uc_img.npy"), uc)
np.save(os.path.join(self.path,"vc_img.npy"), vc)
np.savez(os.path.join(self.path, '{}_mean_calibration_param.npz'.format(self.type_unwrap)),
cam_mtx_mean=st_cam_mtx,
cam_dist_mean=st_cam_dist,
proj_mtx_mean=st_proj_mtx,
proj_dist_mean=st_proj_dist,
st_rmat_mean=st_cam_proj_rmat,
st_tvec_mean=st_cam_proj_tvec,
cam_h_mtx_mean=cam_h_mtx,
proj_h_mtx_mean=proj_h_mtx)
np.savez(os.path.join(self.path, '{}_cam_rot_tvecs.npz'.format(self.type_unwrap)), cam_rvecs, cam_tvecs)
self.cam_mtx = st_cam_mtx
self.cam_dist = st_cam_dist
self.proj_mtx = st_proj_mtx
self.proj_dist = st_proj_dist
self.st_rmat = st_cam_proj_rmat
self.st_tvec = st_cam_proj_tvec
self.cam_h_mtx = cam_h_mtx
self.proj_h_mtx = proj_h_mtx
return sigma_sqphi_lst, unwrapv_lst, unwraph_lst, white_lst, maskv_lst, maskh_lst, mod_lst, proj_img_lst, cam_objpts, cam_imgpts, proj_imgpts, euler_angles, cam_mean_error, cam_delta, proj_mean_error, proj_delta
def update_list_calib(self, proj_df1, unwrapv_lst, unwraph_lst, white_lst, mod_lst, proj_img_lst, reproj_criteria):
"""
Function to remove outlier calibration poses.
Parameters
----------
proj_df1: pandas dataframe.
Dataframe of projector absolute error in x and y directions of all poses.
unwrapv_lst: list.
List of unwrapped phase maps obtained from horizontally varying intensity patterns.
unwraph_lst:list.
List of unwrapped phase maps obtained from vertically varying intensity patterns.
white_lst:list.
List of true images for each calibration pose.
mod_lst: list.
List of modulation intensity images for each calibration pose for intensity
varying both horizontally and vertically.
proj_img_lst: list.
List of circle center grid coordinates for each pose of projector calibration.
reproj_criteria: float.
Criteria to remove outlier poses.
Returns
-------
up_unwrapv_lst: list.
Updated list of unwrapped phase maps obtained from horizontally
varying intensity patterns.
up_unwraph_lst: list of float.
Updated list of unwrapped phase maps obtained from vertically
varying intensity patterns.
up_white_lst: list.
Updated list of true images for each calibration pose.
up_mod_lst: list.
Updated list of modulation intensity images for each
calibration pose for intensity varying both horizontally and vertically.
up_proj_img_lst: list.
Updated list of modulation intensity images for each
calibration pose for intensity varying both horizontally and vertically.
cam_objpts: list.
Updated list of world coordinates used for camera calibration for each pose.
cam_imgpts: list.
Updated list of circle centers grid for each calibration pose.
proj_imgpts: float.
Updated list of circle center grid coordinates for each pose of projector calibration.
euler_angles:float.
Array of roll,pitch and yaw angles between camera and projector in degrees
cam_mean_error: list.
List of camera mean error per calibration pose.
cam_delta: list.
List of camera re projection error.
cam_df1: pandas dataframe.
Dataframe of camera absolute error in x and y directions
of updated list of poses.
proj_mean_error: list. List of projector mean error per calibration pose.
proj_delta: list. List of projector re projection error.
proj_df1: pandas dataframe.
Dataframe of projector absolute error in x and y directions
of updated list of poses.
"""
up_lst = list(set(proj_df1[proj_df1['absdelta_x'] > reproj_criteria]['image'].to_list() + proj_df1[proj_df1['absdelta_y'] > reproj_criteria]['image'].to_list()))
up_white_lst = []
up_unwrapv_lst = []
up_unwraph_lst = []
up_mod_lst = []
up_proj_img_lst = []
for index, element in enumerate(white_lst):
if index not in up_lst:
up_white_lst.append(element)
up_unwrapv_lst.append(unwrapv_lst[index])
up_unwraph_lst.append(unwraph_lst[index])
up_mod_lst.append(mod_lst[index])
up_proj_img_lst.append(proj_img_lst[index])
objp = self.world_points()
camr_error, cam_objpts, cam_imgpts, cam_mtx, cam_dist, cam_rvecs, cam_tvecs = self.camera_calib(objp, up_white_lst)
# Projector calibration
proj_ret, proj_imgpts, proj_mtx, proj_dist, proj_rvecs, proj_tvecs = self.proj_calib(cam_objpts,
cam_imgpts,
up_unwrapv_lst,
up_unwraph_lst,
up_proj_img_lst)
# Camera calibration error analysis
cam_mean_error, cam_delta = self.intrinsic_error_analysis(cam_objpts,
cam_imgpts,
cam_mtx,
cam_dist,
cam_rvecs,
cam_tvecs)
# Projector calibration error analysis
proj_mean_error, proj_delta = self.intrinsic_error_analysis(cam_objpts,
proj_imgpts,
proj_mtx,
proj_dist,
proj_rvecs,
proj_tvecs)
# Stereo calibration
criteria = (cv2.TERM_CRITERIA_EPS + cv2.TERM_CRITERIA_MAX_ITER, 40, 0.0001)
stereocalibration_flags = cv2.CALIB_FIX_INTRINSIC+cv2.CALIB_ZERO_TANGENT_DIST+cv2.CALIB_FIX_K3+cv2.CALIB_FIX_K4+cv2.CALIB_FIX_K5+cv2.CALIB_FIX_K6
st_retu, st_cam_mtx, st_cam_dist, st_proj_mtx, st_proj_dist, st_cam_proj_rmat, st_cam_proj_tvec, E, F = cv2.stereoCalibrate(cam_objpts,
cam_imgpts,
proj_imgpts,
cam_mtx,
cam_dist,
proj_mtx,
proj_dist,
white_lst[0].shape[::-1],
flags=stereocalibration_flags,
criteria=criteria)
u = np.arange(0, self.cam_width)
v = np.arange(0, self.cam_height)
uc_grid, vc_grid = np.meshgrid(u, v)
cordinates = np.stack((vc_grid.ravel(),uc_grid.ravel()),axis=1).astype("float64")
uv = cv2.undistortPoints(cordinates, st_cam_mtx, st_cam_dist, None, cam_mtx).reshape((self.cam_width * self.cam_height,2))
uc = uv[:,1]
vc = uv[:,0]
uc = uc.reshape(self.cam_height, self.cam_width)
vc = vc.reshape(self.cam_height, self.cam_width)
np.save(os.path.join(self.path,"uc_img.npy"), uc)
np.save(os.path.join(self.path,"vc_img.npy"), vc)
project_mat = np.hstack((st_cam_proj_rmat, st_cam_proj_tvec))
_, _, _, _, _, _, euler_angles = cv2.decomposeProjectionMatrix(project_mat)
proj_h_mtx = np.dot(st_proj_mtx, np.hstack((st_cam_proj_rmat, st_cam_proj_tvec)))
cam_h_mtx = np.dot(st_cam_mtx, np.hstack((np.identity(3), np.zeros((3, 1)))))
np.savez(os.path.join(self.path, '{}_calibration_param.npz'.format(self.type_unwrap)),
cam_mtx_mean=st_cam_mtx,
cam_dist_mean=st_cam_dist,
proj_mtx_mean=st_proj_mtx,
proj_dist_mean=st_proj_dist,
st_rmat_mean=st_cam_proj_rmat,
st_tvec_std=st_cam_proj_tvec,
cam_h_mtx_mean=cam_h_mtx,
proj_h_mtx_mean=proj_h_mtx)
np.savez(os.path.join(self.path, '{}_cam_rot_tvecs.npz'.format(self.type_unwrap)), cam_rvecs, cam_tvecs)
return up_unwrapv_lst, up_unwraph_lst, up_white_lst, up_mod_lst, up_proj_img_lst, cam_objpts, cam_imgpts, proj_imgpts, euler_angles, cam_mean_error, cam_delta, proj_mean_error, proj_delta
def calib_center_reconstruction(self, cam_imgpts, unwrap_phase, mask_lst, sigma_sqphi_lst, dark_bias_path, model_path):
"""
This function is a wrapper function to reconstruct circle centers for each camera pose and compute error
with computed world projective coordinates in camera coordinate system.
Parameters
----------
cam_imgpts: list.
List of detected circle centers in camera images.
unwrap_phase: np.ndarray.
Unwrapped phase map
Returns
-------
delta_df: pandas dataframe.
Data frame of error for each pose all circle centers.
abs_delta_df: pandas dataframe.
Data frame of absolute error for each pose all circle centers.
center_cordi_lst: list.
List of x,y,z coordinates of detected circle centers in each calibration pose.
"""
vectors = np.load(os.path.join(self.path, '{}_cam_rot_tvecs.npz'.format(self.type_unwrap)))
rvec = vectors["arr_0"]
tvec = vectors["arr_1"]
# Function call to get all circle center x,y,z coordinates
center_cordi_lst,center_cordisigma_lst = self.center_xyz(cam_imgpts,
unwrap_phase,
mask_lst,
sigma_sqphi_lst,
dark_bias_path,
model_path)
true_coordinates = self.world_points()
# Function call to get projective xyz for each pose
proj_xyz_arr = self.project_validation(rvec, tvec, true_coordinates)
# Error dataframes
delta_df, abs_delta_df = self.center_err_analysis(center_cordi_lst, proj_xyz_arr)
return delta_df, abs_delta_df, center_cordi_lst, center_cordisigma_lst
def world_points(self):
"""
Function to generate world coordinate for asymmetric circle center calibration of camera and projector.
Returns
-------
coord = type: float. Array of world coordinates.
"""
col1 = np.append(np.tile([0, 0.5], int((self.board_gridcolumns-1) / 2)), 0).reshape(self.board_gridcolumns, 1)
col2 = np.ones((self.board_gridcolumns, self.board_gridrows)) * np.arange(0, self.board_gridrows)
col_mat = col1 + col2
row_mat = (0.5 * np.arange(0, self.board_gridcolumns).reshape(self.board_gridcolumns, 1))@np.ones((1, self.board_gridrows))
zer = np.zeros((self.board_gridrows * self.board_gridcolumns))
coord = np.column_stack((row_mat.ravel(), col_mat.ravel(), zer)) * self.dist_betw_circle
return coord.astype('float32')
def multifreq_analysis(self, data_array, model):
"""
Helper function to compute unwrapped phase maps using multi frequency unwrapping on CPU.
Parameters
----------
data_array: np.ndarray:float64.
Array of images used in 4 level phase unwrapping.
Returns
-------
unwrap_v: np.ndarray.
Unwrapped phase map for vertical fringes.
unwrap_h: np.ndarray.
Unwrapped phase map for horizontal fringes.
phase_v: np.ndarray.
Wrapped phase maps for each pitch in vertical direction.
phase_h: np.ndarray.
Wrapped phase maps for each pitch in horizontal direction.
orig_img: np.ndarray.
True image without fringes.
modulation: np.ndarray.
Modulation intensity image of each pitch.
flag: np.ndarray.
Flag to recover image from vector
"""
modulation, orig_img, phase_map, mask = nstep.phase_cal(data_array, self.limit, self.N, True )
phase_v = phase_map[::2]
phase_h = phase_map[1::2]
phase_v[0][phase_v[0] < EPSILON] = phase_v[0][phase_v[0] < EPSILON] + 2 * np.pi
phase_h[0][phase_h[0] < EPSILON] = phase_h[0][phase_h[0] < EPSILON] + 2 * np.pi
unwrap_v, k_arr_v, mask_v = nstep.multifreq_unwrap(self.pitch,
phase_v,
self.kernel_v,
'v',
mask,
self.cam_width,
self.cam_height)
unwrap_h, k_arr_h, mask_h = nstep.multifreq_unwrap(self.pitch,
phase_h,
self.kernel_h,
'h',
mask,
self.cam_width,
self.cam_height)
if model is not None:
cov_arr_v,_ = nstep.pred_var_fn(data_array[-2*self.N[-1]:-self.N[-1]], model)
sigma_sqphi_v = nstep.var_func(data_array[-2*self.N[-1]:-self.N[-1]],
mask_v,
self.N[-1],
cov_arr_v)
cov_arr_h,_ = nstep.pred_var_fn(data_array[-self.N[-1]:], model)
sigma_sqphi_h = nstep.var_func(data_array[-self.N[-1]:],
mask_v,
self.N[-1],
cov_arr_v)
else:
sigma_sqphi_v = None
sigma_sqphi_h - None
return unwrap_v, unwrap_h, phase_v, phase_h, orig_img[-1], modulation, mask_v, mask_h, sigma_sqphi_v, sigma_sqphi_h
def multifreq_analysis_cupy(self, data_array, model):
"""
Helper function to compute unwrapped phase maps using multi frequency unwrapping on GPU.
After computation all arrays are returned as numpy.
Parameters
----------
data_array: cp.ndarray:float64.
Cupy array of images used in 4 level phase unwrapping.
Returns
-------
unwrap_v: cp.ndarray.
Unwrapped phase map for vertical fringes.
unwrap_h: cp.ndarray.
Unwrapped phase map for horizontal fringes.
phase_v: cp.ndarray.
Wrapped phase maps for each pitch in vertical direction.
phase_h: cp.ndarray.
Wrapped phase maps for each pitch in horizontal direction.
orig_img: cp.ndarray.
True image without fringes.
modulation: cp.ndarray.
Modulation intensity image of each pitch.
flag: cp.ndarray.
Flag to recover image from vector
"""
modulation, orig_img, phase_map, mask = nstep_cp.phase_cal_cp(data_array, self.limit, self.N, True )
phase_v = phase_map[::2]
phase_h = phase_map[1::2]
phase_v[0][phase_v[0] < EPSILON] = phase_v[0][phase_v[0] < EPSILON] + 2 * np.pi
phase_h[0][phase_h[0] < EPSILON] = phase_h[0][phase_h[0] < EPSILON] + 2 * np.pi
unwrap_v, k_arr_v,mask_v = nstep_cp.multifreq_unwrap_cp(self.pitch,
phase_v,
self.kernel_v,
'v',
mask,
self.cam_width,
self.cam_height)
unwrap_h, k_arr_h,mask_h = nstep_cp.multifreq_unwrap_cp(self.pitch,
phase_h,
self.kernel_h,
'h',
mask,
self.cam_width,
self.cam_height)
if model is not None:
cov_arr_v,_ = nstep_cp.pred_var_fn(data_array[-2*self.N[-1]:-self.N[-1]], model)
sigma_sqphi_v = nstep_cp.var_func(data_array[-2*self.N[-1]:-self.N[-1]],
mask_v,
self.N[-1],
cov_arr_v)
cov_arr_h,_ = nstep_cp.pred_var_fn(data_array[-self.N[-1]:], model)
sigma_sqphi_h = nstep_cp.var_func(data_array[-self.N[-1]:],
mask_v,
self.N[-1],
cov_arr_h)
else:
sigma_sqphi_v = None
sigma_sqphi_h - None
return cp.asnumpy(unwrap_v), cp.asnumpy(unwrap_h), cp.asnumpy(phase_v), cp.asnumpy(phase_h), cp.asnumpy(orig_img[-1]), cp.asnumpy(modulation), cp.asnumpy(mask_v), cp.asnumpy(mask_h), cp.asnumpy(sigma_sqphi_v), cp.asnumpy(sigma_sqphi_h)
def projcam_calib_img_multifreq(self, model):
"""
Function is used to generate absolute phase map and true (single channel gray) images
(object image without fringe patterns)from fringe image for camera and projector calibration from raw captured
images using multi frequency temporal unwrapping method.
Returns
-------
unwrapv_lst: list.
List of unwrapped phase maps obtained from horizontally varying intensity patterns.
unwraph_lst: list.
List of unwrapped phase maps obtained from vertically varying intensity patterns.
white_lst: list.
List of true images for each calibration pose.
mod_lst: list.
List of modulation intensity images for each calibration pose.
wrapped_phase_lst: dictionary.
List of vertical and horizontal phase maps
"""
maskv_lst = []
maskh_lst = []
mod_lst = []
white_lst = []
wrapv_lst = []
wraph_lst = []
unwrapv_lst = []
unwraph_lst = []
sigma_sqphi_lst = []
all_img_paths = sorted(glob.glob(os.path.join(self.path, 'capt_*')), key=os.path.getmtime)
acquisition_index_list = [int(i[-14:-11]) for i in all_img_paths]
for x in tqdm(acquisition_index_list,
desc='generating unwrapped phases map for {} poses'.format(len(acquisition_index_list))):
if self.data_type == 'tiff':
if os.path.exists(os.path.join(self.path, 'capt_%03d_000000.tiff' % x)):
img_path = sorted(glob.glob(os.path.join(self.path, 'capt_%03d*.tiff' % x)), key=os.path.getmtime)
images_arr = np.array([cv2.imread(file, 0) for file in img_path]).astype(np.float64) - self.dark_bias
else:
print("ERROR: path is not exist! None item appended to the result")
images_arr = None
elif self.data_type == 'npy':
if os.path.exists(os.path.join(self.path, 'capt_%03d_000000.npy' % x)):
images_arr = np.load(os.path.join(self.path, 'capt_%03d_000000.npy' % x)).astype(np.float64) - self.dark_bias
else:
print("ERROR: path is not exist! None item appended to the result")
images_arr = None
else:
print("ERROR: data type is not supported, must be '.tiff' or '.npy'.")
images_arr = None
if images_arr is not None:
if self.processing == 'cpu':
unwrap_v, unwrap_h, phase_v, phase_h, orig_img, modulation, mask_v, mask_h, sigma_sqphi_v, sigma_sqphi_h = self.multifreq_analysis(images_arr, model)
else:
if self.processing != 'gpu':
print("WARNING: processing type is not recognized, use 'gpu'")
images_arr = cp.asarray(images_arr)
unwrap_v, unwrap_h, phase_v, phase_h, orig_img, modulation, mask_v, mask_h, sigma_sqphi_v, sigma_sqphi_h = self.multifreq_analysis_cupy(images_arr, model)
cp._default_memory_pool.free_all_blocks()
else:
unwrap_v = None
unwrap_h = None
phase_v = None
phase_h = None
orig_img = None
modulation = None
mask_v = None
mask_h = None
sigma_sqphi_lst = None
maskv_lst.append(mask_v)
maskh_lst.append(mask_h)
mod_lst.append(modulation)
white_lst.append(orig_img)
wrapv_lst.append(phase_v)
wraph_lst.append(phase_h)
unwrapv_lst.append(unwrap_v)
unwraph_lst.append(unwrap_h)
if model is not None:
sigma_sqphi_lst.append([sigma_sqphi_v,sigma_sqphi_h])
else:
sigma_sqphi_lst = None
wrapped_phase_lst = {"wrapv": wrapv_lst,
"wraph": wraph_lst}
return unwrapv_lst, unwraph_lst, white_lst, mod_lst, wrapped_phase_lst, maskv_lst, maskh_lst, sigma_sqphi_lst
def projcam_calib_img_multiwave(self):
"""
Function is used to generate absolute phase map and true (single channel gray) images (object image without
fringe patterns) from fringe image for camera and projector calibration from raw captured images using
multiwave temporal unwrapping method.
Returns
-------
unwrapv_lst: list.
List of unwrapped phase maps obtained from horizontally varying intensity patterns.
unwraph_lst: list.
List of unwrapped phase maps obtained from vertically varying intensity patterns.
white_lst: list.
List of true images for each calibration pose.
avg_lst: list.
List of average intensity images for each calibration pose.
mod_lst: list.
List of modulation intensity images for each calibration pose.
wrapped_phase_lst: dictionary.
List of vertical and horizontal phase maps
"""
flag_lst = []
mod_lst = []
white_lst = []
kv_lst = []
kh_lst = []
wrapv_lst = []
wraph_lst = []
unwrapv_lst = []
unwraph_lst = []
pitch_arr = self.pitch # there is manipulation of pitch
N_arr = self.N
eq_wav12 = (pitch_arr[-1] * pitch_arr[1]) / (pitch_arr[1]-pitch_arr[-1])
eq_wav123 = pitch_arr[0] * eq_wav12 / (pitch_arr[0] - eq_wav12)
pitch_arr = np.insert(pitch_arr, 0, eq_wav123)
pitch_arr = np.insert(pitch_arr, 2, eq_wav12)
all_img_paths = sorted(glob.glob(os.path.join(self.path, 'capt_*')), key=os.path.getmtime)
acquisition_index_list = [int(i[-14:-11]) for i in all_img_paths]
for x in tqdm(acquisition_index_list, desc='generating unwrapped phases map for {} images'.format(len(acquisition_index_list))):
if os.path.exists(os.path.join(self.path, 'capt_%03d_000000.tiff' % x)):
img_path = sorted(glob.glob(os.path.join(self.path, 'capt_%3d*.tiff' % x)), key=os.path.getmtime)
images_arr = np.array([cv2.imread(file, 0) for file in img_path]).astype(np.float64)
multi_mod_v3, multi_white_v3, multi_phase_v3, flagv3 = nstep.phase_cal(images_arr[0: N_arr[0]],
self.limit)
multi_mod_h3, multi_white_h3, multi_phase_h3, flagh3 = nstep.phase_cal(images_arr[N_arr[0]:2 * N_arr[0]],
self.limit)
multi_mod_v2, multi_white_v2, multi_phase_v2, flagv2 = nstep.phase_cal(images_arr[2 * N_arr[0]:2 * N_arr[0] + N_arr[1]],
self.limit)
multi_mod_h2, multi_white_h2, multi_phase_h2, flagh2 = nstep.phase_cal(images_arr[2 * N_arr[0] + N_arr[1]:2 * N_arr[0] + 2 * N_arr[1]],
self.limit)
multi_mod_v1, multi_white_v1, multi_phase_v1, flagv1 = nstep.phase_cal(images_arr[2 * N_arr[0] + 2 * N_arr[1]:2 * N_arr[0] + 2 * N_arr[1] + N_arr[2]],
self.limit)
multi_mod_h1, multi_white_h1, multi_phase_h1, flagh1 = nstep.phase_cal(images_arr[2 * N_arr[0] + 2 * N_arr[1] + N_arr[2]:2 * N_arr[0] + 2 * N_arr[1] + 2 * N_arr[2]],
self.limit)
flagv = flagv1 and flagv2 and flagv3
multi_phase_v3 = multi_phase_v3[flagv]
multi_phase_h3 = multi_phase_h3[flagv]
multi_phase_v2 = multi_phase_v2[flagv]
multi_phase_h2 = multi_phase_h2[flagv]
multi_phase_v1 = multi_phase_v1[flagv]
multi_phase_h1 = multi_phase_h1[flagv]
multi_phase_v12 = np.mod(multi_phase_v1 - multi_phase_v2, 2 * np.pi)
multi_phase_h12 = np.mod(multi_phase_h1 - multi_phase_h2, 2 * np.pi)
multi_phase_v123 = np.mod(multi_phase_v12 - multi_phase_v3, 2 * np.pi)
multi_phase_h123 = np.mod(multi_phase_h12 - multi_phase_h3, 2 * np.pi)
multi_phase_v123[multi_phase_v123 > TAU] = multi_phase_v123[multi_phase_v123 > TAU] - 2 * np.pi
multi_phase_h123[multi_phase_h123 > TAU] = multi_phase_h123[multi_phase_h123 > TAU] - 2 * np.pi
phase_arr_v = [multi_phase_v123, multi_phase_v3, multi_phase_v12, multi_phase_v2, multi_phase_v1]
phase_arr_h = [multi_phase_h123, multi_phase_h3, multi_phase_h12, multi_phase_h2, multi_phase_h1]
multiwav_unwrap_v, k_arr_v = nstep.multiwave_unwrap(pitch_arr, phase_arr_v, self.kernel_v, 'v')
multiwav_unwrap_h, k_arr_h = nstep.multiwave_unwrap(pitch_arr, phase_arr_h, self.kernel_h, 'h')
mod_lst.append(np.array([multi_mod_v3, multi_mod_v2, multi_mod_v1, multi_mod_h3, multi_mod_h2, multi_mod_h1]))
flag_lst.append(flagv)
white_lst.append(multi_white_h1)
wrapv_lst.append(phase_arr_v)
wraph_lst.append(phase_arr_h)
kv_lst.append(k_arr_v)
kh_lst.append(k_arr_h)
unwrapv_lst.append(multiwav_unwrap_v)
unwraph_lst.append(multiwav_unwrap_h)
wrapped_phase_lst = {"wrapv": wrapv_lst,
"wraph": wraph_lst}
return unwrapv_lst, unwraph_lst, white_lst, mod_lst, wrapped_phase_lst, flag_lst
@staticmethod
def _image_resize(image_lst, fx, fy):
resize_img_lst = []
for i in image_lst:
if i is not None:
resize_img_lst.append(cv2.resize(i, None, fx=fx, fy=fy))
return resize_img_lst
def projector_img(self, unwrap_v_lst, unwrap_h_lst, white_lst, fx, fy):
"""
Function to generate projector image using absolute phase maps from horizontally and vertically varying patterns.
Parameters
----------
unwrap_v_lst: list.
List of unwrapped absolute phase map from horizontally varying pattern.
unwrap_h_lst: list.
List of unwrapped absolute phase map from vertically varying pattern.
white_lst: list.
List of true object image (without patterns).
fx: float.
Scale factor along the horizontal axis.
fy: float.
Scale factor along the vertical axis
Returns
-------
proj_img: list.
Projector image list.
"""
unwrap_v_lst = Calibration._image_resize(unwrap_v_lst, fx, fy)
unwrap_h_lst = Calibration._image_resize(unwrap_h_lst, fx, fy)
white_lst = Calibration._image_resize(white_lst, fx, fy)
proj_img = []
for i in tqdm(range(0, len(unwrap_v_lst)), desc='projector images'):
# Convert phase map to coordinates
if white_lst[i] is not None:
unwrap_proj_u = (unwrap_v_lst[i] - self.phase_st) * self.pitch[-1] / (2 * np.pi)
unwrap_proj_v = (unwrap_h_lst[i] - self.phase_st) * self.pitch[-1] / (2 * np.pi)
unwrap_proj_u = unwrap_proj_u.astype(int)
unwrap_proj_v = unwrap_proj_v.astype(int)
orig_u = unwrap_proj_u.ravel()
orig_v = unwrap_proj_v.ravel()
orig_int = white_lst[i].ravel()
orig_data = np.column_stack((orig_u, orig_v, orig_int))
orig_df = pd.DataFrame(orig_data, columns=['u', 'v', 'int'])
orig_new = orig_df.groupby(['u', 'v'])['int'].mean().reset_index()
proj_y = np.arange(0, self.proj_height)
proj_x = np.arange(0, self.proj_width)
proj_u, proj_v = np.meshgrid(proj_x, proj_y)
proj_data = np.column_stack((proj_u.ravel(), proj_v.ravel()))
proj_df = pd.DataFrame(proj_data, columns=['u', 'v'])
proj_df_merge = pd.merge(proj_df, orig_new, how='left', on=['u', 'v'])
proj_df_merge['int'] = proj_df_merge['int'].fillna(0)
proj_mean_img = proj_df_merge['int'].to_numpy()
proj_mean_img = proj_mean_img.reshape(self.proj_height, self.proj_width)
proj_mean_img = cv2.normalize(proj_mean_img, None, 0, 255, cv2.NORM_MINMAX, cv2.CV_8U)
proj_img.append(proj_mean_img)
else:
proj_img.append(None)
return proj_img
def camera_calib(self, objp, white_lst, display=True):
"""
Function to calibrate camera using asymmetric circle pattern.
OpenCV bob detector is used to detect circle centers which is used for calibration.
Parameters
----------
objp: list.
World object coordinate.
white_lst: list.
List of calibration poses used for calibrations.
display: bool.
If set each calibration drawings are displayed.
Returns
-------
r_error: float.
Average re projection error.
objpoints: list.
List of image object points for each pose.
cam_imgpoints: list.
List of circle center grid coordinates for each pose.
cam_mtx: np.array.
Camera matrix from calibration.
cam_dist: np.array.
Camera distortion array from calibration.
cam_rvecs: np.array.
Array of rotation vectors for each calibration pose.
cam_tvecs: np.array.
Array of translational vectors for each calibration pose.
"""
# Set bob detector properties
criteria = (cv2.TERM_CRITERIA_EPS + cv2.TERM_CRITERIA_MAX_ITER, 30, 0.001)
blobParams = cv2.SimpleBlobDetector_Params()
# color
blobParams.filterByColor = True
blobParams.blobColor = 255
# Filter by Area.
blobParams.filterByArea = True
blobParams.minArea = self.bobdetect_areamin # 2000
# Convexity
blobParams.filterByConvexity = True
blobParams.minConvexity = self.bobdetect_convexity
blobDetector = cv2.SimpleBlobDetector_create(blobParams)
objpoints = [] # 3d point in real world space
cam_imgpoints = [] # 2d points in image plane.
found = 0
cv2.startWindowThread()
count_lst = []
ret_lst = []
for white in white_lst:
# Convert float image to uint8 type image.
if white is not None:
white = cv2.normalize(white, None, 0, 255, cv2.NORM_MINMAX, cv2.CV_8U)
white_color = cv2.cvtColor(white, cv2.COLOR_GRAY2RGB) # only for drawing purpose
keypoints = blobDetector.detect(white) # Detect blobs.
# Draw detected blobs as green circles. This helps cv2.findCirclesGrid() .
im_with_keypoints = cv2.drawKeypoints(white_color, keypoints, np.array([]), (0, 255, 0),
cv2.DRAW_MATCHES_FLAGS_DRAW_RICH_KEYPOINTS,
)
im_with_keypoints_gray = cv2.cvtColor(im_with_keypoints, cv2.COLOR_BGR2GRAY)
ret, corners = cv2.findCirclesGrid(im_with_keypoints_gray, (self.board_gridrows, self.board_gridcolumns), None,
flags=cv2.CALIB_CB_ASYMMETRIC_GRID+cv2.CALIB_CB_CLUSTERING,
blobDetector=blobDetector) # Find the circle grid
ret_lst.append(ret)
if ret:
objpoints.append(objp) # Certainly, every loop objp is the same, in 3D.
cam_imgpoints.append(corners)
count_lst.append(found)
found += 1
if display:
# Draw and display the centers.
im_with_keypoints = cv2.drawChessboardCorners(white_color,
(self.board_gridrows, self.board_gridcolumns),
corners,
ret) # circles
cv2.imshow("Camera calibration", im_with_keypoints) # display
cv2.waitKey(200)
cv2.destroyAllWindows()
if not all(ret_lst):
print('Warning: Centers are not detected for some poses. Modify bobdetect_areamin and bobdetect_areamin parameter',ret_lst)
# set flags to have tangential distortion = 0, k4 = 0, k5 = 0, k6 = 0
flags = cv2.CALIB_ZERO_TANGENT_DIST + cv2.CALIB_FIX_K3 + cv2.CALIB_FIX_K4 + cv2.CALIB_FIX_K5 + cv2.CALIB_FIX_K6
# camera calibration
cam_ret, cam_mtx, cam_dist, cam_rvecs, cam_tvecs = cv2.calibrateCamera(objpoints,
cam_imgpoints,
(self.cam_width, self.cam_height),
None,
None,
flags=flags,
criteria=criteria)
# Average re projection error
tot_error = 0
for i in range(len(objpoints)):
cam_img2, _ = cv2.projectPoints(objpoints[i], cam_rvecs[i], cam_tvecs[i], cam_mtx, cam_dist)
error = cv2.norm(cam_imgpoints[i], cam_img2, cv2.NORM_L2) / len(cam_img2)
tot_error += error
r_error = tot_error/len(objpoints)
if display:
print("Re projection error:", r_error)
return r_error, objpoints, cam_imgpoints, cam_mtx, cam_dist, cam_rvecs, cam_tvecs
def proj_calib(self,
cam_objpts,
cam_imgpts,
unwrap_v_lst,
unwrap_h_lst,
proj_img_lst=None):
"""
Function to calibrate projector by using absolute phase maps.
Circle centers detected using OpenCV is mapped to the absolute phase maps and the corresponding projector image coordinate for the centers are calculated.
Parameters
----------
cam_objpts: list.
List of world coordinates used for camera calibration for each pose.
cam_imgpts: list.
List of circle centers grid for each calibration pose.
unwrap_v_lst: list.
List of absolute phase maps for horizontally varying patterns for
each calibration pose.
unwrap_h_lst: list.
List of absolute phase maps for vertically varying patterns for
each calibration pose.
proj_img_lst: list/None.
List of computed projector image for each calibration pose.
If it is None calibration drawing is not diaplayed.
Returns
-------
r_error: float.
Average re projection error.
proj_imgpts: list.
List of circle center grid coordinates for each pose of projector calibration.
proj_mtx: np.array.
Projector matrix from calibration.
proj_dist: np.array.
Projector distortion array from calibration.
proj_rvecs: np.array.
Array of rotation vectors for each calibration pose.
proj_tvecs: np.array.
Array of translational vectors for each calibration pose.
"""
centers = [i.reshape(cam_objpts[0].shape[0], 2) for i in cam_imgpts]
proj_imgpts = []
for x, c in enumerate(centers):
# Phase to coordinate conversion for each pose
u = (nstep.bilinear_interpolate(unwrap_v_lst[x], c[:,0], c[:,1])[0] - self.phase_st) * self.pitch[-1] / (2*np.pi)
v = (nstep.bilinear_interpolate(unwrap_h_lst[x], c[:,0], c[:,1])[0] - self.phase_st) * self.pitch[-1] / (2*np.pi)
coordi = np.column_stack((u, v)).reshape(cam_objpts[0].shape[0], 1, 2).astype(np.float32)
proj_imgpts.append(coordi)
if proj_img_lst is not None:
proj_color = cv2.cvtColor(proj_img_lst[x], cv2.COLOR_GRAY2RGB) # only for drawing
proj_keypoints = cv2.drawChessboardCorners(proj_color, (self.board_gridrows, self.board_gridcolumns), coordi, True)
cv2.imshow("Projector calibration", proj_keypoints) # display
cv2.waitKey(200)
cv2.destroyAllWindows()
# Set all distortion = 0. linear model assumption
flags = cv2.CALIB_ZERO_TANGENT_DIST + cv2.CALIB_FIX_K1 + cv2.CALIB_FIX_K2 + cv2.CALIB_FIX_K3 + cv2.CALIB_FIX_K4 + cv2.CALIB_FIX_K5 + cv2.CALIB_FIX_K6
# Projector calibration
proj_ret, proj_mtx, proj_dist, proj_rvecs, proj_tvecs = cv2.calibrateCamera(cam_objpts,
proj_imgpts,
(self.proj_width, self.proj_height),
None,
None,
flags=flags,
criteria=(cv2.TERM_CRITERIA_EPS + cv2.TERM_CRITERIA_MAX_ITER, 30, 2e-16))
tot_error = 0
# Average re projection error
for i in range(len(cam_objpts)):
proj_img2, _ = cv2.projectPoints(cam_objpts[i], proj_rvecs[i], proj_tvecs[i], proj_mtx, proj_dist)
error = cv2.norm(proj_imgpts[i], proj_img2, cv2.NORM_L2) / len(proj_img2)
tot_error += error
r_error = tot_error / len(cam_objpts)
if proj_img_lst is not None:
print("Re projection error:", r_error)
return r_error, proj_imgpts, proj_mtx, proj_dist, proj_rvecs, proj_tvecs
def image_analysis(self, unwrap, title, aspect_ratio=1):
"""
Function to plot list of images for calibration diagnostic purpose. Eg: To plot list of
unwrapped phase maps of all calibration poses.