ViSP
Main Page
Related Pages
Modules
Classes
Examples
All
Classes
Functions
Variables
Enumerations
Enumerator
Friends
Groups
Pages
servoAfma6FourPoints2DCamVelocityInteractionDesired.cpp
1
/****************************************************************************
2
*
3
* $Id: servoAfma6FourPoints2DCamVelocityInteractionDesired.cpp 4056 2013-01-05 13:04:42Z fspindle $
4
*
5
* This file is part of the ViSP software.
6
* Copyright (C) 2005 - 2013 by INRIA. All rights reserved.
7
*
8
* This software is free software; you can redistribute it and/or
9
* modify it under the terms of the GNU General Public License
10
* ("GPL") version 2 as published by the Free Software Foundation.
11
* See the file LICENSE.txt at the root directory of this source
12
* distribution for additional information about the GNU GPL.
13
*
14
* For using ViSP with software that can not be combined with the GNU
15
* GPL, please contact INRIA about acquiring a ViSP Professional
16
* Edition License.
17
*
18
* See http://www.irisa.fr/lagadic/visp/visp.html for more information.
19
*
20
* This software was developed at:
21
* INRIA Rennes - Bretagne Atlantique
22
* Campus Universitaire de Beaulieu
23
* 35042 Rennes Cedex
24
* France
25
* http://www.irisa.fr/lagadic
26
*
27
* If you have questions regarding the use of this file, please contact
28
* INRIA at visp@inria.fr
29
*
30
* This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE
31
* WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE.
32
*
33
*
34
* Description:
35
* tests the control law
36
* eye-in-hand control
37
* velocity computed in the camera frame
38
*
39
* Authors:
40
* Eric Marchand
41
* Fabien Spindler
42
*
43
*****************************************************************************/
44
70
#include <visp/vpConfig.h>
71
#include <visp/vpDebug.h>
// Debug trace
72
#include <stdlib.h>
73
#if (defined (VISP_HAVE_AFMA6) && defined (VISP_HAVE_DC1394_2))
74
75
#include <visp/vp1394TwoGrabber.h>
76
#include <visp/vpImage.h>
77
#include <visp/vpImagePoint.h>
78
#include <visp/vpDisplay.h>
79
#include <visp/vpDisplayX.h>
80
#include <visp/vpDisplayOpenCV.h>
81
#include <visp/vpDisplayGTK.h>
82
83
#include <visp/vpMath.h>
84
#include <visp/vpTranslationVector.h>
85
#include <visp/vpRxyzVector.h>
86
#include <visp/vpRotationMatrix.h>
87
#include <visp/vpHomogeneousMatrix.h>
88
#include <visp/vpFeaturePoint.h>
89
#include <visp/vpPoint.h>
90
#include <visp/vpServo.h>
91
#include <visp/vpFeatureBuilder.h>
92
#include <visp/vpDot.h>
93
#include <visp/vpRobotAfma6.h>
94
#include <visp/vpServoDisplay.h>
95
#include <visp/vpIoTools.h>
96
97
// Exception
98
#include <visp/vpException.h>
99
#include <visp/vpMatrixException.h>
100
101
#define L 0.05 // to deal with a 10cm by 10cm square
102
103
int
104
main()
105
{
106
// Log file creation in /tmp/$USERNAME/log.dat
107
// This file contains by line:
108
// - the 6 computed camera velocities (m/s, rad/s) to achieve the task
109
// - the 6 mesured camera velocities (m/s, rad/s)
110
// - the 6 mesured joint positions (m, rad)
111
// - the 8 values of s - s*
112
std::string username;
113
// Get the user login name
114
vpIoTools::getUserName
(username);
115
116
// Create a log filename to save velocities...
117
std::string logdirname;
118
logdirname =
"/tmp/"
+ username;
119
120
// Test if the output path exist. If no try to create it
121
if
(
vpIoTools::checkDirectory
(logdirname) ==
false
) {
122
try
{
123
// Create the dirname
124
vpIoTools::makeDirectory
(logdirname);
125
}
126
catch
(...) {
127
std::cerr << std::endl
128
<<
"ERROR:"
<< std::endl;
129
std::cerr <<
" Cannot create "
<< logdirname << std::endl;
130
exit(-1);
131
}
132
}
133
std::string logfilename;
134
logfilename = logdirname +
"/log.dat"
;
135
136
// Open the log file name
137
std::ofstream flog(logfilename.c_str());
138
139
try
140
{
141
vpServo
task ;
142
143
vpImage<unsigned char>
I ;
144
int
i ;
145
146
vp1394TwoGrabber
g;
147
g.
setVideoMode
(
vp1394TwoGrabber::vpVIDEO_MODE_640x480_MONO8
);
148
g.
setFramerate
(
vp1394TwoGrabber::vpFRAMERATE_60
);
149
g.
open
(I) ;
150
151
#ifdef VISP_HAVE_X11
152
vpDisplayX
display(I,100,100,
"Current image"
) ;
153
#elif defined(VISP_HAVE_OPENCV)
154
vpDisplayOpenCV
display(I,100,100,
"Current image"
) ;
155
#elif defined(VISP_HAVE_GTK)
156
vpDisplayGTK
display(I,100,100,
"Current image"
) ;
157
#endif
158
159
g.
acquire
(I) ;
160
161
vpDisplay::display
(I) ;
162
vpDisplay::flush
(I) ;
163
164
std::cout << std::endl ;
165
std::cout <<
"-------------------------------------------------------"
<< std::endl ;
166
std::cout <<
" Test program for vpServo "
<<std::endl ;
167
std::cout <<
" Eye-in-hand task control, velocity computed in the camera frame"
<< std::endl ;
168
std::cout <<
" Use of the Afma6 robot "
<< std::endl ;
169
std::cout <<
" Interaction matrix computed with the desired features "
<< std::endl ;
170
171
std::cout <<
" task : servo 4 points on a square with dimention "
<< L <<
" meters"
<< std::endl ;
172
std::cout <<
"-------------------------------------------------------"
<< std::endl ;
173
std::cout << std::endl ;
174
175
176
vpDot2
dot[4] ;
177
vpImagePoint
cog;
178
179
std::cout <<
"Click on the 4 dots clockwise starting from upper/left dot..."
180
<< std::endl;
181
for
(i=0 ; i < 4 ; i++) {
182
dot[i].
initTracking
(I) ;
183
cog = dot[i].
getCog
();
184
vpDisplay::displayCross
(I, cog, 10,
vpColor::blue
) ;
185
vpDisplay::flush
(I);
186
}
187
188
vpRobotAfma6
robot;
189
190
vpCameraParameters::vpCameraParametersProjType
191
projModel =
vpCameraParameters::perspectiveProjWithDistortion
;
192
193
// Load the end-effector to camera frame transformation obtained
194
// using a camera intrinsic model with distortion
195
robot.
init
(
vpAfma6::TOOL_CCMOP
, projModel);
196
197
vpCameraParameters
cam ;
198
// Update camera parameters
199
robot.
getCameraParameters
(cam, I);
200
201
// Sets the current position of the visual feature
202
vpFeaturePoint
p[4] ;
203
for
(i=0 ; i < 4 ; i++)
204
vpFeatureBuilder::create
(p[i], cam, dot[i]);
//retrieve x,y of the vpFeaturePoint structure
205
206
// Set the position of the square target in a frame which origin is
207
// centered in the middle of the square
208
vpPoint
point[4] ;
209
point[0].
setWorldCoordinates
(-L, -L, 0) ;
210
point[1].
setWorldCoordinates
( L, -L, 0) ;
211
point[2].
setWorldCoordinates
( L, L, 0) ;
212
point[3].
setWorldCoordinates
(-L, L, 0) ;
213
214
// Initialise a desired pose to compute s*, the desired 2D point features
215
vpHomogeneousMatrix
cMo;
216
vpTranslationVector
cto(0, 0, 0.7);
// tz = 0.7 meter
217
vpRxyzVector
cro(
vpMath::rad
(0),
vpMath::rad
(0),
vpMath::rad
(0));
// No rotations
218
vpRotationMatrix
cRo(cro);
// Build the rotation matrix
219
cMo.
buildFrom
(cto, cRo);
// Build the homogeneous matrix
220
221
// sets the desired position of the 2D visual feature
222
vpFeaturePoint
pd[4] ;
223
// Compute the desired position of the features from the desired pose
224
for
(
int
i=0; i < 4; i ++) {
225
vpColVector
cP, p ;
226
point[i].
changeFrame
(cMo, cP) ;
227
point[i].
projection
(cP, p) ;
228
229
pd[i].
set_x
(p[0]) ;
230
pd[i].
set_y
(p[1]) ;
231
pd[i].
set_Z
(cP[2]);
232
}
233
234
// Define the task
235
// - we want an eye-in-hand control law
236
// - robot is controlled in the camera frame
237
// - Interaction matrix is computed with the desired visual features
238
task.
setServo
(
vpServo::EYEINHAND_CAMERA
) ;
239
task.
setInteractionMatrixType
(
vpServo::DESIRED
,
vpServo::PSEUDO_INVERSE
);
240
241
// We want to see a point on a point
242
std::cout << std::endl ;
243
for
(i=0 ; i < 4 ; i++)
244
task.
addFeature
(p[i],pd[i]) ;
245
246
// Set the proportional gain
247
task.
setLambda
(0.4) ;
248
249
// Display task information
250
task.
print
() ;
251
252
// Initialise the velocity control of the robot
253
robot.
setRobotState
(
vpRobot::STATE_VELOCITY_CONTROL
) ;
254
255
std::cout <<
"\nHit CTRL-C to stop the loop...\n"
<< std::flush;
256
257
for
( ; ; ) {
258
// Acquire a new image from the camera
259
g.
acquire
(I) ;
260
261
// Display this image
262
vpDisplay::display
(I) ;
263
264
// For each point...
265
for
(i=0 ; i < 4 ; i++) {
266
// Achieve the tracking of the dot in the image
267
dot[i].
track
(I) ;
268
// Get the dot cog
269
cog = dot[i].
getCog
();
270
// Display a green cross at the center of gravity position in the
271
// image
272
vpDisplay::displayCross
(I,cog, 10,
vpColor::green
) ;
273
}
274
275
// Printing on stdout concerning task information
276
// task.print() ;
277
278
// Update the point feature from the dot location
279
for
(i=0 ; i < 4 ; i++)
280
vpFeatureBuilder::create
(p[i], cam, dot[i]);
281
282
vpColVector
v ;
283
// Compute the visual servoing skew vector
284
v = task.
computeControlLaw
() ;
285
286
// Display the current and desired feature points in the image display
287
vpServoDisplay::display
(task, cam, I);
288
289
// Apply the computed camera velocities to the robot
290
robot.
setVelocity
(
vpRobot::CAMERA_FRAME
, v) ;
291
292
// Save velocities applied to the robot in the log file
293
// v[0], v[1], v[2] correspond to camera translation velocities in m/s
294
// v[3], v[4], v[5] correspond to camera rotation velocities in rad/s
295
flog << v[0] <<
" "
<< v[1] <<
" "
<< v[2] <<
" "
296
<< v[3] <<
" "
<< v[4] <<
" "
<< v[5] <<
" "
;
297
298
// Get the measured joint velocities of the robot
299
vpColVector
qvel;
300
robot.
getVelocity
(
vpRobot::CAMERA_FRAME
, qvel);
301
// Save measured camera velocities of the robot in the log file:
302
// - qvel[0], qvel[1], qvel[2] correspond to measured camera translation
303
// velocities in m/s
304
// - qvel[3], qvel[4], qvel[5] correspond to measured camera rotation
305
// velocities in rad/s
306
flog << qvel[0] <<
" "
<< qvel[1] <<
" "
<< qvel[2] <<
" "
307
<< qvel[3] <<
" "
<< qvel[4] <<
" "
<< qvel[5] <<
" "
;
308
309
// Get the measured joint positions of the robot
310
vpColVector
q;
311
robot.
getPosition
(
vpRobot::ARTICULAR_FRAME
, q);
312
// Save measured joint positions of the robot in the log file
313
// - q[0], q[1], q[2] correspond to measured joint translation
314
// positions in m
315
// - q[3], q[4], q[5] correspond to measured joint rotation
316
// positions in rad
317
flog << q[0] <<
" "
<< q[1] <<
" "
<< q[2] <<
" "
318
<< q[3] <<
" "
<< q[4] <<
" "
<< q[5] <<
" "
;
319
320
// Save feature error (s-s*) for the 4 feature points. For each feature
321
// point, we have 2 errors (along x and y axis). This error is expressed
322
// in meters in the camera frame
323
flog << ( task.
getError
() ).t() << std::endl;
324
325
// Flush the display
326
vpDisplay::flush
(I) ;
327
}
328
329
flog.close() ;
// Close the log file
330
331
// Display task information
332
task.
print
() ;
333
334
// Kill the task
335
task.
kill
();
336
337
return
0;
338
}
339
catch
(...) {
340
flog.close() ;
// Close the log file
341
vpERROR_TRACE
(
" Test failed"
) ;
342
return
0;
343
}
344
}
345
346
#else
347
int
348
main()
349
{
350
vpERROR_TRACE
(
"You do not have an afma6 robot or a firewire framegrabber connected to your computer..."
);
351
352
}
353
354
#endif
example
servo-afma6
servoAfma6FourPoints2DCamVelocityInteractionDesired.cpp
Generated on Thu Nov 7 2013 03:14:00 for ViSP by
1.8.4