天天看點

ArUco----一個微型現實增強庫的介紹及視覺應用(二)

很重要的一點就是這個

ArUco----一個微型現實增強庫的介紹及視覺應用(二)

轉載自:https://www.cnblogs.com/shawn0102/p/8039439.html

ArUco----一個微型現實增強庫的介紹及視覺應用(二)

ArUco----一個微型現實增強庫的介紹及視覺應用(二)

一、第一個ArUco的視覺應用

  首先介紹第一個視覺應用的Demo,這個應用場景比較簡單,下面具體介紹:

1. 應用場景

  主線程:通過攝像頭檢測環境中的視覺标志,看到ID為100的标志後在圖像中圈出标志,在标志上繪制坐标系,得到視覺标志相對于相機坐标系的位置和姿态參數;

  子線程:将得到的視覺标志進一步轉換成需要的資料類型并發送給機器人。

2. 程式設計環境

  Ubuntu14.04(安裝有OpenCV以及ArUco)

3. 編譯工具

  Cmake

 4. 源碼下載下傳位址

  https://github.com/Zhanggx0102/Aruco_Blog_Demo.git

 5. 源碼處理

  下載下傳完成後重新編譯即可。

  cd Aruco_Blog_Demo-master

  rm -r build/

  mkdir build

  cd build

  cmake ..

  make 

二、源碼解讀

 源碼中已經做了比較詳細的注釋,這裡主要講解程式架構。

程式流程圖如下所示:

ArUco----一個微型現實增強庫的介紹及視覺應用(二)

程式流程圖

執行後的效果如下圖所示:

ArUco----一個微型現實增強庫的介紹及視覺應用(二)

下面是源碼截取的兩個主要的函數。

1

2

3

4

5

6

7

8

9

10

11

12

13

14

15

16

17

18

19

20

21

22

23

24

25

26

27

28

29

30

31

32

33

34

35

36

37

38

39

40

41

42

43

44

45

46

47

48

49

50

51

52

53

54

55

56

57

58

59

60

61

62

63

64

65

66

67

68

69

70

71

72

73

74

75

76

77

78

79

80

81

82

83

84

85

86

87

88

89

90

91

92

93

94

95

96

97

98

99

100

101

102

103

104

105

106

107

108

109

110

111

112

113

114

115

116

117

118

119

120

121

122

123

124

125

126

127

128

129

130

131

132

133

134

135

136

137

138

139

140

141

142

143

144

145

146

147

148

149

150

151

152

153

154

155

156

157

158

159

160

161

162

163

164

165

166

167

168

169

170

171

172

173

174

175

176

177

178

179

180

181

182

183

184

185

186

187

188

189

190

191

192

193

int

main(

int

argc,

char

**argv)

{

int

thread_return;

pthread_t Message_Send_Thread_ID;

//init thread lock

pthread_mutex_init(&IK_Solver_Lock, NULL);

//creat new thread

thread_return = pthread_create(&Message_Send_Thread_ID,NULL,Thread_Func_Message_Send,NULL);

//import the camera param (CameraMatrix)

float

camera_matrix_array[9] = { 1.0078520005023535e+003, 0., 6.3950000000000000e+002,

0.0, 1.0078520005023535e+003, 3.5950000000000000e+002,

0.0, 0.0, 1.0 };

cv::Mat Camera_Matrix(3,3,CV_32FC1);

InitMat(Camera_Matrix,camera_matrix_array);

cout <<

"Camera_Matrix = "

<< endl <<

""

<< Camera_Matrix << endl ;

//import the camera param (Distorsion)

float

Distorsion_array[5] = {-4.9694653328469340e-002, 2.3886698343464000e-001, 0., 0.,-2.1783942538569392e-001};

cv::Mat Distorsion_M(1,5,CV_32FC1);

InitMat(Distorsion_M,Distorsion_array);

cout <<

"Distorsion_M = "

<< endl <<

""

<< Distorsion_M << endl ;

CameraParameters LogiC170Param;

//LogiC170Param.readFromXMLFile("LogitchC170_Param.yml");

LogiC170Param.CameraMatrix = Camera_Matrix.clone();

LogiC170Param.Distorsion = Distorsion_M.clone();

LogiC170Param.CamSize.width = 1280;

LogiC170Param.CamSize.height = 720;

float

MarkerSize = 0.04;

int

Marker_ID;

MarkerDetector MDetector;

MDetector.setThresholdParams(7, 7);

MDetector.setThresholdParamRange(2, 0);

CvDrawingUtils MDraw;

//read the input image

VideoCapture cap(0);

// open the default camera

if

(!cap.isOpened()) 

// check if we succeeded 

return

-1;

cv::Mat frame;

cv::Mat Rvec;

//rotational vector

CvMat Rvec_Matrix;

//temp matrix

CvMat R_Matrix;

//rotational matrixs

cv::Mat Tvec;

//translation vector

cap>>frame;

//get first frame

//LogiC170Param.resize(frame.size());

printf

(

"%f, %f\n"

,cap.get(CV_CAP_PROP_FRAME_WIDTH),cap.get(CV_CAP_PROP_FRAME_HEIGHT)); 

cap.set(CV_CAP_PROP_FRAME_WIDTH, 1280); 

cap.set(CV_CAP_PROP_FRAME_HEIGHT, 720); 

//cap.set(CV_CAP_PROP_FPS, 10); 

printf

(

"%f, %f\n"

,cap.get(CV_CAP_PROP_FRAME_WIDTH),cap.get(CV_CAP_PROP_FRAME_HEIGHT));  

while

(1)

{

//get current frame

cap>>frame;

//Ok, let's detect

vector< Marker >  Markers=MDetector.detect(frame, LogiC170Param, MarkerSize);

//printf("marker count:%d \n",(int)(Markers.size()));

//for each marker, estimate its ID and if it is  100 draw info and its boundaries in the image

for

(unsigned

int

j=0;j<Markers.size();j++)

{

//marker ID test

Marker_ID = Markers[j].id;

printf

(

"Marker ID = %d \n"

,Marker_ID);

if

(Marker_ID == 100)

{

//cout<<Markers[j]<<endl;

Markers[j].draw(frame,Scalar(0,0,255),2);

Markers[j].calculateExtrinsics(MarkerSize, LogiC170Param,

false

);

//calculate rotational vector

Rvec = Markers[j].Rvec;

cout <<

"Rvec = "

<< endl <<

""

<< Rvec << endl ;

//calculate transformation vector

Tvec = Markers[j].Tvec;

cout <<

"Tvec = "

<< endl <<

""

<< Tvec << endl ;

//lock to update global variables: Rotat_Vec_Arr[3]  Rotat_M[9]  Trans_M[3]

pthread_mutex_lock(&IK_Solver_Lock);

//save rotational vector to float array

for

(

int

r = 0; r < Rvec.rows; r++) 

for

(

int

c = 0; c < Rvec.cols; c++) 

{    

//cout<< Rvec.at<float>(r,c)<<endl; 

Rotat_Vec_Arr[r] = Rvec.at<

float

>(r,c);

}    

}

printf

(

"Rotat_Vec_Arr[3] = [%f, %f, %f] \n"

,Rotat_Vec_Arr[0],Rotat_Vec_Arr[1],Rotat_Vec_Arr[2]);

//save array data to CvMat and convert rotational vector to rotational matrix

cvInitMatHeader(&Rvec_Matrix,1,3,CV_32FC1,Rotat_Vec_Arr,CV_AUTOSTEP);

//init Rvec_Matrix

cvInitMatHeader(&R_Matrix,3,3,CV_32FC1,Rotat_M,CV_AUTOSTEP);

//init R_Matrix and Rotat_M

cvRodrigues2(&Rvec_Matrix, &R_Matrix,0);

printf

(

"Rotat_M = \n[%f, %f, %f, \n  %f, %f, %f, \n  %f, %f, %f] \n"

,Rotat_M[0],Rotat_M[1],Rotat_M[2],Rotat_M[3],Rotat_M[4],Rotat_M[5],Rotat_M[6],Rotat_M[7],Rotat_M[8]);

//save transformation vector to float array

for

(

int

r = 0; r < Tvec.rows; r++)

for

(

int

c = 0; c < Tvec.cols; c++) 

{

Trans_M[r] = Tvec.at<

float

>(r,c);

}

}

printf

(

"Trans_M[3] = [%f, %f, %f] \n"

,Trans_M[0],Trans_M[1],Trans_M[2]);

//unlock

pthread_mutex_unlock(&IK_Solver_Lock);

// draw a 3d cube in each marker if there is 3d info

if

(LogiC170Param.isValid() && MarkerSize != -1)

{

MDraw.draw3dAxis(frame,LogiC170Param,Rvec,Tvec,0.04);

}

}

}

/

void

* Thread_Func_Message_Send(

void

*arg)

{

printf

(

"IK solver thread is running!\n"

);

//original pose and position

float

P_original[4];

float

N_original[4];

float

O_original[4];

float

A_original[4];

//final pose and position

float

P[3];

float

N[3];

float

O[3];

float

A[3];

P_original[3] = 1;

N_original[3] = 0;

O_original[3] = 0;

A_original[3] = 0;

while

(1)

{

//get the spacial pose

pthread_mutex_lock(&IK_Solver_Lock);

//memcpy(P_original, Trans_M, sizeof(Trans_M));

for

(

int

i=0;i<3;i++)

{

P_original[i] = Trans_M[i];

N_original[i] = Rotat_M[3*i];

O_original[i] = Rotat_M[3*i+1];

A_original[i] = Rotat_M[3*i+2];

}

pthread_mutex_unlock(&IK_Solver_Lock);

//debug printf

//

printf

(

"I send the message to robot here! \n"

);

<br>

//uodate every 5 s

sleep(5);

}

//kill the message send thread

pthread_exit(0); 

}

 

<-- 本篇完-->

歡迎留言、私信、郵箱、微信等任何形式的技術交流。

作者資訊:

名稱:Shawn

郵箱:[email protected]

微信二維碼:↓

ArUco----一個微型現實增強庫的介紹及視覺應用(二)

标簽: ArUco, 增項現實, 視覺應用