ViSP
Main Page
Related Pages
Modules
Classes
Examples
All
Classes
Functions
Variables
Enumerations
Enumerator
Friends
Groups
Pages
servoViper650FourPoints2DArtVelocityInteractionCurrent.cpp
1
/****************************************************************************
2
*
3
* $Id: servoViper850FourPoints2DArtVelocityInteractionCurrent.cpp 3870 2012-09-05 17:03:43Z fspindle $
4
*
5
* This file is part of the ViSP software.
6
* Copyright (C) 2005 - 2013 by INRIA. All rights reserved.
7
*
8
* This software is free software; you can redistribute it and/or
9
* modify it under the terms of the GNU General Public License
10
* ("GPL") version 2 as published by the Free Software Foundation.
11
* See the file LICENSE.txt at the root directory of this source
12
* distribution for additional information about the GNU GPL.
13
*
14
* For using ViSP with software that can not be combined with the GNU
15
* GPL, please contact INRIA about acquiring a ViSP Professional
16
* Edition License.
17
*
18
* See http://www.irisa.fr/lagadic/visp/visp.html for more information.
19
*
20
* This software was developed at:
21
* INRIA Rennes - Bretagne Atlantique
22
* Campus Universitaire de Beaulieu
23
* 35042 Rennes Cedex
24
* France
25
* http://www.irisa.fr/lagadic
26
*
27
* If you have questions regarding the use of this file, please contact
28
* INRIA at visp@inria.fr
29
*
30
* This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE
31
* WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE.
32
*
33
*
34
* Description:
35
* tests the control law
36
* eye-in-hand control
37
* velocity computed in the articular frame
38
*
39
* Authors:
40
* Fabien Spindler
41
*
42
*****************************************************************************/
55
#include <visp/vpConfig.h>
56
#include <visp/vpDebug.h>
57
58
#include <stdio.h>
59
#include <iostream>
60
#include <fstream>
61
#include <sstream>
62
#include <stdlib.h>
63
#if (defined (VISP_HAVE_VIPER650) && defined (VISP_HAVE_DC1394_2))
64
65
#include <visp/vp1394TwoGrabber.h>
66
#include <visp/vpImage.h>
67
#include <visp/vpDisplay.h>
68
#include <visp/vpDisplayX.h>
69
#include <visp/vpDisplayOpenCV.h>
70
#include <visp/vpDisplayGTK.h>
71
#include <visp/vpDot2.h>
72
#include <visp/vpFeatureBuilder.h>
73
#include <visp/vpFeaturePoint.h>
74
#include <visp/vpHomogeneousMatrix.h>
75
#include <visp/vpIoTools.h>
76
#include <visp/vpMath.h>
77
#include <visp/vpPoint.h>
78
#include <visp/vpPose.h>
79
#include <visp/vpRobotViper650.h>
80
#include <visp/vpServo.h>
81
#include <visp/vpServoDisplay.h>
82
83
#define L 0.05 // to deal with a 10cm by 10cm square
84
110
void
compute_pose(
vpPoint
point[],
vpDot2
dot[],
int
ndot,
111
vpCameraParameters
cam,
112
vpHomogeneousMatrix
&cMo,
113
vpTranslationVector
&cto,
114
vpRxyzVector
&cro,
bool
init)
115
{
116
vpHomogeneousMatrix
cMo_dementhon;
// computed pose with dementhon
117
vpHomogeneousMatrix
cMo_lagrange;
// computed pose with dementhon
118
vpRotationMatrix
cRo;
119
vpPose
pose;
120
vpImagePoint
cog;
121
for
(
int
i=0; i < ndot; i ++) {
122
123
double
x=0, y=0;
124
cog = dot[i].
getCog
();
125
vpPixelMeterConversion::convertPoint
(cam,
126
cog,
127
x, y) ;
//pixel to meter conversion
128
point[i].
set_x
(x) ;
//projection perspective p
129
point[i].
set_y
(y) ;
130
pose.
addPoint
(point[i]) ;
131
}
132
133
if
(init ==
true
) {
134
pose.
computePose
(
vpPose::DEMENTHON
, cMo_dementhon) ;
135
// Compute and return the residual expressed in meter for the pose matrix
136
// 'cMo'
137
double
residual_dementhon = pose.
computeResidual
(cMo_dementhon);
138
pose.
computePose
(
vpPose::LAGRANGE
, cMo_lagrange) ;
139
double
residual_lagrange = pose.
computeResidual
(cMo_lagrange);
140
141
// Select the best pose to initialize the lowe pose computation
142
if
(residual_lagrange < residual_dementhon)
143
cMo = cMo_lagrange;
144
else
145
cMo = cMo_dementhon;
146
147
}
148
else
{
// init = false; use of the previous pose to initialise LOWE
149
cRo.
buildFrom
(cro);
150
cMo.
buildFrom
(cto, cRo);
151
}
152
pose.
computePose
(
vpPose::LOWE
, cMo) ;
153
cMo.
extract
(cto);
154
cMo.
extract
(cRo);
155
cro.
buildFrom
(cRo);
156
}
157
158
int
159
main()
160
{
161
// Log file creation in /tmp/$USERNAME/log.dat
162
// This file contains by line:
163
// - the 6 computed joint velocities (m/s, rad/s) to achieve the task
164
// - the 6 mesured joint velocities (m/s, rad/s)
165
// - the 6 mesured joint positions (m, rad)
166
// - the 8 values of s - s*
167
std::string username;
168
// Get the user login name
169
vpIoTools::getUserName
(username);
170
171
// Create a log filename to save velocities...
172
std::string logdirname;
173
logdirname =
"/tmp/"
+ username;
174
175
// Test if the output path exist. If no try to create it
176
if
(
vpIoTools::checkDirectory
(logdirname) ==
false
) {
177
try
{
178
// Create the dirname
179
vpIoTools::makeDirectory
(logdirname);
180
}
181
catch
(...) {
182
std::cerr << std::endl
183
<<
"ERROR:"
<< std::endl;
184
std::cerr <<
" Cannot create "
<< logdirname << std::endl;
185
return
(-1);
186
}
187
}
188
std::string logfilename;
189
logfilename = logdirname +
"/log.dat"
;
190
191
// Open the log file name
192
std::ofstream flog(logfilename.c_str());
193
194
try
{
195
vpRobotViper650
robot ;
196
// Load the end-effector to camera frame transformation obtained
197
// using a camera intrinsic model with distortion
198
vpCameraParameters::vpCameraParametersProjType
projModel =
199
vpCameraParameters::perspectiveProjWithDistortion
;
200
robot.
init
(
vpRobotViper650::TOOL_PTGREY_FLEA2_CAMERA
, projModel);
201
202
vpServo
task ;
203
204
vpImage<unsigned char>
I ;
205
int
i ;
206
207
bool
reset =
false
;
208
vp1394TwoGrabber
g(reset);
209
g.setVideoMode(
vp1394TwoGrabber::vpVIDEO_MODE_640x480_MONO8
);
210
g.setFramerate(
vp1394TwoGrabber::vpFRAMERATE_60
);
211
g.open(I) ;
212
213
g.acquire(I) ;
214
215
#ifdef VISP_HAVE_X11
216
vpDisplayX
display(I,100,100,
"Current image"
) ;
217
#elif defined(VISP_HAVE_OPENCV)
218
vpDisplayOpenCV
display(I,100,100,
"Current image"
) ;
219
#elif defined(VISP_HAVE_GTK)
220
vpDisplayGTK
display(I,100,100,
"Current image"
) ;
221
#endif
222
223
vpDisplay::display
(I) ;
224
vpDisplay::flush
(I) ;
225
226
std::cout << std::endl ;
227
std::cout <<
"-------------------------------------------------------"
<< std::endl ;
228
std::cout <<
" Test program for vpServo "
<<std::endl ;
229
std::cout <<
" Eye-in-hand task control, velocity computed in the joint space"
<< std::endl ;
230
std::cout <<
" Use of the Afma6 robot "
<< std::endl ;
231
std::cout <<
" task : servo 4 points on a square with dimention "
<< L <<
" meters"
<< std::endl ;
232
std::cout <<
"-------------------------------------------------------"
<< std::endl ;
233
std::cout << std::endl ;
234
235
vpDot2
dot[4] ;
236
vpImagePoint
cog;
237
238
std::cout <<
"Click on the 4 dots clockwise starting from upper/left dot..."
239
<< std::endl;
240
241
for
(i=0 ; i < 4 ; i++) {
242
dot[i].
setGraphics
(
true
) ;
243
dot[i].
initTracking
(I) ;
244
cog = dot[i].
getCog
();
245
vpDisplay::displayCross
(I, cog, 10,
vpColor::blue
) ;
246
vpDisplay::flush
(I);
247
}
248
249
vpCameraParameters
cam ;
250
251
// Update camera parameters
252
robot.
getCameraParameters
(cam, I);
253
254
std::cout <<
"Camera parameters: \n"
<< cam << std::endl;
255
256
// Sets the current position of the visual feature
257
vpFeaturePoint
p[4] ;
258
for
(i=0 ; i < 4 ; i++)
259
vpFeatureBuilder::create
(p[i], cam, dot[i]);
//retrieve x,y of the vpFeaturePoint structure
260
261
// Set the position of the square target in a frame which origin is
262
// centered in the middle of the square
263
vpPoint
point[4] ;
264
point[0].
setWorldCoordinates
(-L, -L, 0) ;
265
point[1].
setWorldCoordinates
( L, -L, 0) ;
266
point[2].
setWorldCoordinates
( L, L, 0) ;
267
point[3].
setWorldCoordinates
(-L, L, 0) ;
268
269
// Initialise a desired pose to compute s*, the desired 2D point features
270
vpHomogeneousMatrix
cMo;
271
vpTranslationVector
cto(0, 0, 0.5);
// tz = 0.5 meter
272
vpRxyzVector
cro(
vpMath::rad
(0),
vpMath::rad
(10),
vpMath::rad
(20));
273
vpRotationMatrix
cRo(cro);
// Build the rotation matrix
274
cMo.
buildFrom
(cto, cRo);
// Build the homogeneous matrix
275
276
// Sets the desired position of the 2D visual feature
277
vpFeaturePoint
pd[4] ;
278
// Compute the desired position of the features from the desired pose
279
for
(
int
i=0; i < 4; i ++) {
280
vpColVector
cP, p ;
281
point[i].
changeFrame
(cMo, cP) ;
282
point[i].
projection
(cP, p) ;
283
284
pd[i].
set_x
(p[0]) ;
285
pd[i].
set_y
(p[1]) ;
286
pd[i].
set_Z
(cP[2]);
287
}
288
289
// We want to see a point on a point
290
for
(i=0 ; i < 4 ; i++)
291
task.
addFeature
(p[i],pd[i]) ;
292
293
// Set the proportional gain
294
task.
setLambda
(0.3) ;
295
296
// Display task information
297
task.
print
() ;
298
299
// Define the task
300
// - we want an eye-in-hand control law
301
// - articular velocity are computed
302
task.
setServo
(
vpServo::EYEINHAND_L_cVe_eJe
) ;
303
task.
setInteractionMatrixType
(
vpServo::CURRENT
,
vpServo::PSEUDO_INVERSE
) ;
304
task.
print
() ;
305
306
vpVelocityTwistMatrix
cVe ;
307
robot.
get_cVe
(cVe) ;
308
task.
set_cVe
(cVe) ;
309
task.
print
() ;
310
311
// Set the Jacobian (expressed in the end-effector frame)
312
vpMatrix
eJe ;
313
robot.
get_eJe
(eJe) ;
314
task.
set_eJe
(eJe) ;
315
task.
print
() ;
316
317
// Initialise the velocity control of the robot
318
robot.
setRobotState
(
vpRobot::STATE_VELOCITY_CONTROL
) ;
319
320
std::cout <<
"\nHit CTRL-C to stop the loop...\n"
<< std::flush;
321
for
( ; ; ) {
322
// Acquire a new image from the camera
323
g.acquire(I) ;
324
325
// Display this image
326
vpDisplay::display
(I) ;
327
328
try
{
329
// For each point...
330
for
(i=0 ; i < 4 ; i++) {
331
// Achieve the tracking of the dot in the image
332
dot[i].
track
(I) ;
333
// Display a green cross at the center of gravity position in the
334
// image
335
cog = dot[i].
getCog
();
336
vpDisplay::displayCross
(I, cog, 10,
vpColor::green
) ;
337
}
338
}
339
catch
(...) {
340
flog.close() ;
// Close the log file
341
vpTRACE
(
"Error detected while tracking visual features"
) ;
342
robot.
stopMotion
() ;
343
return
(1) ;
344
}
345
346
// During the servo, we compute the pose using LOWE method. For the
347
// initial pose used in the non linear minimisation we use the pose
348
// computed at the previous iteration.
349
compute_pose(point, dot, 4, cam, cMo, cto, cro,
false
);
350
351
for
(i=0 ; i < 4 ; i++) {
352
// Update the point feature from the dot location
353
vpFeatureBuilder::create
(p[i], cam, dot[i]);
354
// Set the feature Z coordinate from the pose
355
vpColVector
cP;
356
point[i].
changeFrame
(cMo, cP) ;
357
358
p[i].set_Z(cP[2]);
359
}
360
361
// Get the jacobian of the robot
362
robot.
get_eJe
(eJe) ;
363
// Update this jacobian in the task structure. It will be used to compute
364
// the velocity skew (as an articular velocity)
365
// qdot = -lambda * L^+ * cVe * eJe * (s-s*)
366
task.
set_eJe
(eJe) ;
367
368
vpColVector
v ;
369
// Compute the visual servoing skew vector
370
v = task.
computeControlLaw
() ;
371
372
// Display the current and desired feature points in the image display
373
vpServoDisplay::display
(task,cam,I) ;
374
375
// Apply the computed joint velocities to the robot
376
robot.
setVelocity
(
vpRobot::ARTICULAR_FRAME
, v) ;
377
378
// Save velocities applied to the robot in the log file
379
// v[0], v[1], v[2] correspond to joint translation velocities in m/s
380
// v[3], v[4], v[5] correspond to joint rotation velocities in rad/s
381
flog << v[0] <<
" "
<< v[1] <<
" "
<< v[2] <<
" "
382
<< v[3] <<
" "
<< v[4] <<
" "
<< v[5] <<
" "
;
383
384
// Get the measured joint velocities of the robot
385
vpColVector
qvel;
386
robot.
getVelocity
(
vpRobot::ARTICULAR_FRAME
, qvel);
387
// Save measured joint velocities of the robot in the log file:
388
// - qvel[0], qvel[1], qvel[2] correspond to measured joint translation
389
// velocities in m/s
390
// - qvel[3], qvel[4], qvel[5] correspond to measured joint rotation
391
// velocities in rad/s
392
flog << qvel[0] <<
" "
<< qvel[1] <<
" "
<< qvel[2] <<
" "
393
<< qvel[3] <<
" "
<< qvel[4] <<
" "
<< qvel[5] <<
" "
;
394
395
// Get the measured joint positions of the robot
396
vpColVector
q;
397
robot.
getPosition
(
vpRobot::ARTICULAR_FRAME
, q);
398
// Save measured joint positions of the robot in the log file
399
// - q[0], q[1], q[2] correspond to measured joint translation
400
// positions in m
401
// - q[3], q[4], q[5] correspond to measured joint rotation
402
// positions in rad
403
flog << q[0] <<
" "
<< q[1] <<
" "
<< q[2] <<
" "
404
<< q[3] <<
" "
<< q[4] <<
" "
<< q[5] <<
" "
;
405
406
// Save feature error (s-s*) for the 4 feature points. For each feature
407
// point, we have 2 errors (along x and y axis). This error is expressed
408
// in meters in the camera frame
409
flog << ( task.
getError
() ).t() << std::endl;
410
411
// Flush the display
412
vpDisplay::flush
(I) ;
413
414
// vpTRACE("\t\t || s - s* || = %f ", ( task.getError() ).sumSquare()) ;
415
}
416
417
vpTRACE
(
"Display task information "
) ;
418
task.
print
() ;
419
task.
kill
();
420
flog.close() ;
// Close the log file
421
return
0;
422
}
423
catch
(...)
424
{
425
flog.close() ;
// Close the log file
426
vpERROR_TRACE
(
" Test failed"
) ;
427
return
0;
428
}
429
}
430
431
#else
432
int
433
main()
434
{
435
vpERROR_TRACE
(
"You do not have an afma6 robot or a firewire framegrabber connected to your computer..."
);
436
437
}
438
439
#endif
example
servo-viper650
servoViper650FourPoints2DArtVelocityInteractionCurrent.cpp
Generated on Fri Apr 26 2013 19:54:32 for ViSP by
1.8.1.2